From cdfc0a66aaea250bd817ec3ea66ee62e5708fadc Mon Sep 17 00:00:00 2001 From: wantsui Date: Wed, 15 Jan 2025 10:44:28 -0500 Subject: [PATCH 01/16] chore: clean up snowflake instructions (#11891) The current doc makes it seem like you need to call both patch and patch call at the same time: ``` from ddtrace import patch, patch_all patch(snowflake=True) patch_all(snowflake=True) ``` This PR splits it out. ## Checklist - [x] PR author has checked that all the criteria below are met - The PR description includes an overview of the change - The PR description articulates the motivation for the change - The change includes tests OR the PR description describes a testing strategy - The PR description notes risks associated with the change, if any - Newly-added code is easy to change - The change follows the [library release note guidelines](https://ddtrace.readthedocs.io/en/stable/releasenotes.html) - The change includes or references documentation updates if necessary - Backport labels are set (if [applicable](https://ddtrace.readthedocs.io/en/latest/contributing.html#backporting)) ## Reviewer Checklist - [x] Reviewer has checked that all the criteria below are met - Title is accurate - All changes are related to the pull request's stated goal - Avoids breaking [API](https://ddtrace.readthedocs.io/en/stable/versioning.html#interfaces) changes - Testing strategy adequately addresses listed risks - Newly-added code is easy to change - Release note makes sense to a user of the library - If necessary, author has acknowledged and discussed the performance implications of this PR as reported in the benchmarks PR comment - Backport labels are set in a manner that is consistent with the [release branch maintenance policy](https://ddtrace.readthedocs.io/en/latest/contributing.html#backporting) --- ddtrace/contrib/snowflake/__init__.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/ddtrace/contrib/snowflake/__init__.py b/ddtrace/contrib/snowflake/__init__.py index e675ff7a067..bbc892a5362 100644 --- a/ddtrace/contrib/snowflake/__init__.py +++ b/ddtrace/contrib/snowflake/__init__.py @@ -9,13 +9,19 @@ The integration is not enabled automatically when using :ref:`ddtrace-run` or :ref:`import ddtrace.auto`. -Use :func:`patch()` to manually enable the integration:: +Use ``DD_TRACE_SNOWFLAKE_ENABLED=true`` to enable it with ``ddtrace-run`` - from ddtrace import patch, patch_all +or :func:`patch()` to manually enable the integration:: + + from ddtrace import patch patch(snowflake=True) + +or use :func:`patch_all()` to manually enable the integration:: + + from ddtrace import patch_all patch_all(snowflake=True) -or the ``DD_TRACE_SNOWFLAKE_ENABLED=true`` to enable it with ``ddtrace-run``. + Global Configuration From 434d565fda10823c0404ed8153867515d808a2ec Mon Sep 17 00:00:00 2001 From: Munir Abdinur Date: Wed, 15 Jan 2025 11:19:43 -0500 Subject: [PATCH 02/16] chore(telemetry): improves typing for telemetry metric namespaces (#11564) Follow up to: #11565 Prevents telemetry metrics from being queued with an invalid namespace ## Checklist - [ ] PR author has checked that all the criteria below are met - The PR description includes an overview of the change - The PR description articulates the motivation for the change - The change includes tests OR the PR description describes a testing strategy - The PR description notes risks associated with the change, if any - Newly-added code is easy to change - The change follows the [library release note guidelines](https://ddtrace.readthedocs.io/en/stable/releasenotes.html) - The change includes or references documentation updates if necessary - Backport labels are set (if [applicable](https://ddtrace.readthedocs.io/en/latest/contributing.html#backporting)) ## Reviewer Checklist - [ ] Reviewer has checked that all the criteria below are met - Title is accurate - All changes are related to the pull request's stated goal - Avoids breaking [API](https://ddtrace.readthedocs.io/en/stable/versioning.html#interfaces) changes - Testing strategy adequately addresses listed risks - Newly-added code is easy to change - Release note makes sense to a user of the library - If necessary, author has acknowledged and discussed the performance implications of this PR as reported in the benchmarks PR comment - Backport labels are set in a manner that is consistent with the [release branch maintenance policy](https://ddtrace.readthedocs.io/en/latest/contributing.html#backporting) --- ddtrace/_monkey.py | 6 +- ddtrace/_trace/processor/__init__.py | 4 +- ddtrace/_trace/telemetry.py | 5 +- ddtrace/appsec/_iast/_metrics.py | 16 ++--- ddtrace/appsec/_metrics.py | 10 +-- .../ci_visibility/telemetry/api_request.py | 12 ++-- .../ci_visibility/telemetry/constants.py | 3 - .../ci_visibility/telemetry/coverage.py | 12 ++-- .../telemetry/early_flake_detection.py | 6 +- .../ci_visibility/telemetry/events.py | 20 ++++-- .../internal/ci_visibility/telemetry/git.py | 38 ++++++---- .../internal/ci_visibility/telemetry/itr.py | 16 +++-- .../ci_visibility/telemetry/payload.py | 22 ++++-- ddtrace/internal/telemetry/constants.py | 10 ++- .../internal/telemetry/metrics_namespaces.py | 21 ++++-- ddtrace/internal/telemetry/writer.py | 19 ++--- ddtrace/llmobs/_evaluators/ragas/base.py | 6 +- ddtrace/llmobs/_evaluators/runner.py | 4 +- ddtrace/llmobs/_evaluators/sampler.py | 6 +- ddtrace/settings/_otel_remapper.py | 8 +-- tests/appsec/appsec/test_telemetry.py | 12 ++-- tests/appsec/contrib_appsec/utils.py | 2 +- tests/appsec/iast/test_telemetry.py | 10 +-- tests/telemetry/app.py | 4 +- tests/telemetry/test_telemetry_metrics.py | 69 +++++++++---------- tests/tracer/test_processors.py | 31 ++++++--- 26 files changed, 216 insertions(+), 156 deletions(-) diff --git a/ddtrace/_monkey.py b/ddtrace/_monkey.py index ff7747ce395..d2306ace9ce 100644 --- a/ddtrace/_monkey.py +++ b/ddtrace/_monkey.py @@ -6,6 +6,7 @@ from wrapt.importer import when_imported from ddtrace.appsec import load_common_appsec_modules +from ddtrace.internal.telemetry.constants import TELEMETRY_NAMESPACE from .appsec._iast._utils import _is_iast_enabled from .internal import telemetry @@ -186,7 +187,10 @@ def on_import(hook): ) telemetry.telemetry_writer.add_integration(module, False, PATCH_MODULES.get(module) is True, str(e)) telemetry.telemetry_writer.add_count_metric( - "tracers", "integration_errors", 1, (("integration_name", module), ("error_type", type(e).__name__)) + TELEMETRY_NAMESPACE.TRACERS, + "integration_errors", + 1, + (("integration_name", module), ("error_type", type(e).__name__)), ) else: if hasattr(imported_module, "get_versions"): diff --git a/ddtrace/_trace/processor/__init__.py b/ddtrace/_trace/processor/__init__.py index 03d815b86d2..fc59a64828b 100644 --- a/ddtrace/_trace/processor/__init__.py +++ b/ddtrace/_trace/processor/__init__.py @@ -26,7 +26,7 @@ from ddtrace.internal.sampling import is_single_span_sampled from ddtrace.internal.service import ServiceStatusError from ddtrace.internal.telemetry.constants import TELEMETRY_LOG_LEVEL -from ddtrace.internal.telemetry.constants import TELEMETRY_NAMESPACE_TAG_TRACER +from ddtrace.internal.telemetry.constants import TELEMETRY_NAMESPACE from ddtrace.internal.writer import TraceWriter @@ -392,6 +392,6 @@ def _queue_span_count_metrics(self, metric_name: str, tag_name: str, min_count: if config._telemetry_enabled and sum(self._span_metrics[metric_name].values()) >= min_count: for tag_value, count in self._span_metrics[metric_name].items(): telemetry.telemetry_writer.add_count_metric( - TELEMETRY_NAMESPACE_TAG_TRACER, metric_name, count, tags=((tag_name, tag_value),) + TELEMETRY_NAMESPACE.TRACERS, metric_name, count, tags=((tag_name, tag_value),) ) self._span_metrics[metric_name] = defaultdict(int) diff --git a/ddtrace/_trace/telemetry.py b/ddtrace/_trace/telemetry.py index f9cd9ef79b9..929acd101ec 100644 --- a/ddtrace/_trace/telemetry.py +++ b/ddtrace/_trace/telemetry.py @@ -2,11 +2,12 @@ from typing import Tuple from ddtrace.internal.telemetry import telemetry_writer +from ddtrace.internal.telemetry.constants import TELEMETRY_NAMESPACE def record_span_pointer_calculation(context: str, span_pointer_count: int) -> None: telemetry_writer.add_count_metric( - namespace="tracers", + namespace=TELEMETRY_NAMESPACE.TRACERS, name="span_pointer_calculation", value=1, tags=(("context", context), ("count", _span_pointer_count_to_tag(span_pointer_count))), @@ -45,7 +46,7 @@ def record_span_pointer_calculation_issue( tags += additional_tags telemetry_writer.add_count_metric( - namespace="tracers", + namespace=TELEMETRY_NAMESPACE.TRACERS, name="span_pointer_calculation.issue", value=1, tags=tags, diff --git a/ddtrace/appsec/_iast/_metrics.py b/ddtrace/appsec/_iast/_metrics.py index e9e0f604e69..35e2729565e 100644 --- a/ddtrace/appsec/_iast/_metrics.py +++ b/ddtrace/appsec/_iast/_metrics.py @@ -12,7 +12,7 @@ from ddtrace.internal import telemetry from ddtrace.internal.logger import get_logger from ddtrace.internal.telemetry.constants import TELEMETRY_LOG_LEVEL -from ddtrace.internal.telemetry.constants import TELEMETRY_NAMESPACE_TAG_IAST +from ddtrace.internal.telemetry.constants import TELEMETRY_NAMESPACE from ddtrace.settings.asm import config as asm_config @@ -73,19 +73,19 @@ def _set_metric_iast_instrumented_source(source_type): from ._taint_tracking import origin_to_str telemetry.telemetry_writer.add_count_metric( - TELEMETRY_NAMESPACE_TAG_IAST, "instrumented.source", 1, (("source_type", origin_to_str(source_type)),) + TELEMETRY_NAMESPACE.IAST, "instrumented.source", 1, (("source_type", origin_to_str(source_type)),) ) @metric_verbosity(TELEMETRY_MANDATORY_VERBOSITY) def _set_metric_iast_instrumented_propagation(): - telemetry.telemetry_writer.add_count_metric(TELEMETRY_NAMESPACE_TAG_IAST, "instrumented.propagation", 1) + telemetry.telemetry_writer.add_count_metric(TELEMETRY_NAMESPACE.IAST, "instrumented.propagation", 1) @metric_verbosity(TELEMETRY_MANDATORY_VERBOSITY) def _set_metric_iast_instrumented_sink(vulnerability_type, counter=1): telemetry.telemetry_writer.add_count_metric( - TELEMETRY_NAMESPACE_TAG_IAST, "instrumented.sink", counter, (("vulnerability_type", vulnerability_type),) + TELEMETRY_NAMESPACE.IAST, "instrumented.sink", counter, (("vulnerability_type", vulnerability_type),) ) @@ -94,14 +94,14 @@ def _set_metric_iast_executed_source(source_type): from ._taint_tracking import origin_to_str telemetry.telemetry_writer.add_count_metric( - TELEMETRY_NAMESPACE_TAG_IAST, "executed.source", 1, (("source_type", origin_to_str(source_type)),) + TELEMETRY_NAMESPACE.IAST, "executed.source", 1, (("source_type", origin_to_str(source_type)),) ) @metric_verbosity(TELEMETRY_INFORMATION_VERBOSITY) def _set_metric_iast_executed_sink(vulnerability_type): telemetry.telemetry_writer.add_count_metric( - TELEMETRY_NAMESPACE_TAG_IAST, "executed.sink", 1, (("vulnerability_type", vulnerability_type),) + TELEMETRY_NAMESPACE.IAST, "executed.sink", 1, (("vulnerability_type", vulnerability_type),) ) @@ -115,9 +115,7 @@ def _request_tainted(): def _set_metric_iast_request_tainted(): total_objects_tainted = _request_tainted() if total_objects_tainted > 0: - telemetry.telemetry_writer.add_count_metric( - TELEMETRY_NAMESPACE_TAG_IAST, "request.tainted", total_objects_tainted - ) + telemetry.telemetry_writer.add_count_metric(TELEMETRY_NAMESPACE.IAST, "request.tainted", total_objects_tainted) def _set_span_tag_iast_request_tainted(span): diff --git a/ddtrace/appsec/_metrics.py b/ddtrace/appsec/_metrics.py index cbe8490d717..3d5c7e3e59f 100644 --- a/ddtrace/appsec/_metrics.py +++ b/ddtrace/appsec/_metrics.py @@ -5,7 +5,7 @@ from ddtrace.internal import telemetry from ddtrace.internal.logger import get_logger from ddtrace.internal.telemetry.constants import TELEMETRY_LOG_LEVEL -from ddtrace.internal.telemetry.constants import TELEMETRY_NAMESPACE_TAG_APPSEC +from ddtrace.internal.telemetry.constants import TELEMETRY_NAMESPACE log = get_logger(__name__) @@ -36,7 +36,7 @@ def _set_waf_updates_metric(info): tags = (("waf_version", DDWAF_VERSION),) telemetry.telemetry_writer.add_count_metric( - TELEMETRY_NAMESPACE_TAG_APPSEC, + TELEMETRY_NAMESPACE.APPSEC, "waf.updates", 1.0, tags=tags, @@ -56,7 +56,7 @@ def _set_waf_init_metric(info): tags = (("waf_version", DDWAF_VERSION),) telemetry.telemetry_writer.add_count_metric( - TELEMETRY_NAMESPACE_TAG_APPSEC, + TELEMETRY_NAMESPACE.APPSEC, "waf.init", 1.0, tags=tags, @@ -90,7 +90,7 @@ def _set_waf_request_metrics(*args): ) telemetry.telemetry_writer.add_count_metric( - TELEMETRY_NAMESPACE_TAG_APPSEC, + TELEMETRY_NAMESPACE.APPSEC, "waf.requests", 1.0, tags=tags_request, @@ -101,7 +101,7 @@ def _set_waf_request_metrics(*args): for rule_type, value in rasp[t].items(): if value: telemetry.telemetry_writer.add_count_metric( - TELEMETRY_NAMESPACE_TAG_APPSEC, + TELEMETRY_NAMESPACE.APPSEC, n, float(value), tags=_TYPES_AND_TAGS.get(rule_type, ()) + (("waf_version", DDWAF_VERSION),), diff --git a/ddtrace/internal/ci_visibility/telemetry/api_request.py b/ddtrace/internal/ci_visibility/telemetry/api_request.py index 076cc0cca77..77f3ea5f626 100644 --- a/ddtrace/internal/ci_visibility/telemetry/api_request.py +++ b/ddtrace/internal/ci_visibility/telemetry/api_request.py @@ -1,10 +1,10 @@ import dataclasses from typing import Optional -from ddtrace.internal.ci_visibility.telemetry.constants import CIVISIBILITY_TELEMETRY_NAMESPACE as _NAMESPACE from ddtrace.internal.ci_visibility.telemetry.constants import ERROR_TYPES from ddtrace.internal.logger import get_logger from ddtrace.internal.telemetry import telemetry_writer +from ddtrace.internal.telemetry.constants import TELEMETRY_NAMESPACE log = get_logger(__name__) @@ -32,13 +32,15 @@ def record_api_request( error, ) - telemetry_writer.add_count_metric(_NAMESPACE, f"{metric_names.count}", 1) - telemetry_writer.add_distribution_metric(_NAMESPACE, f"{metric_names.duration}", duration) + telemetry_writer.add_count_metric(TELEMETRY_NAMESPACE.CIVISIBILITY, f"{metric_names.count}", 1) + telemetry_writer.add_distribution_metric(TELEMETRY_NAMESPACE.CIVISIBILITY, f"{metric_names.duration}", duration) if response_bytes is not None: if metric_names.response_bytes is not None: # We don't always want to record response bytes (for settings requests), so assume that no metric name # means we don't want to record it. - telemetry_writer.add_distribution_metric(_NAMESPACE, f"{metric_names.response_bytes}", response_bytes) + telemetry_writer.add_distribution_metric( + TELEMETRY_NAMESPACE.CIVISIBILITY, f"{metric_names.response_bytes}", response_bytes + ) if error is not None: record_api_request_error(metric_names.error, error) @@ -46,4 +48,4 @@ def record_api_request( def record_api_request_error(error_metric_name: str, error: ERROR_TYPES): log.debug("Recording early flake detection request error telemetry: %s", error) - telemetry_writer.add_count_metric(_NAMESPACE, error_metric_name, 1, (("error_type", error),)) + telemetry_writer.add_count_metric(TELEMETRY_NAMESPACE.CIVISIBILITY, error_metric_name, 1, (("error_type", error),)) diff --git a/ddtrace/internal/ci_visibility/telemetry/constants.py b/ddtrace/internal/ci_visibility/telemetry/constants.py index dad54511c04..191338e86e9 100644 --- a/ddtrace/internal/ci_visibility/telemetry/constants.py +++ b/ddtrace/internal/ci_visibility/telemetry/constants.py @@ -1,9 +1,6 @@ from enum import Enum -CIVISIBILITY_TELEMETRY_NAMESPACE = "civisibility" - - class ERROR_TYPES(str, Enum): TIMEOUT = "timeout" NETWORK = "network" diff --git a/ddtrace/internal/ci_visibility/telemetry/coverage.py b/ddtrace/internal/ci_visibility/telemetry/coverage.py index e3370fbee6e..392196f7236 100644 --- a/ddtrace/internal/ci_visibility/telemetry/coverage.py +++ b/ddtrace/internal/ci_visibility/telemetry/coverage.py @@ -3,10 +3,10 @@ from typing import Optional from typing import Tuple -from ddtrace.internal.ci_visibility.telemetry.constants import CIVISIBILITY_TELEMETRY_NAMESPACE as _NAMESPACE from ddtrace.internal.ci_visibility.telemetry.constants import TEST_FRAMEWORKS from ddtrace.internal.logger import get_logger from ddtrace.internal.telemetry import telemetry_writer +from ddtrace.internal.telemetry.constants import TELEMETRY_NAMESPACE log = get_logger(__name__) @@ -30,7 +30,7 @@ def record_code_coverage_started(coverage_library: COVERAGE_LIBRARY, test_framew _tags: List[Tuple[str, str]] = [("library", coverage_library)] if test_framework is not None: _tags.append(("test_framework", test_framework)) - telemetry_writer.add_count_metric(_NAMESPACE, COVERAGE_TELEMETRY.STARTED, 1, tuple(_tags)) + telemetry_writer.add_count_metric(TELEMETRY_NAMESPACE.CIVISIBILITY, COVERAGE_TELEMETRY.STARTED, 1, tuple(_tags)) def record_code_coverage_finished(coverage_library: COVERAGE_LIBRARY, test_framework: Optional[TEST_FRAMEWORKS] = None): @@ -38,19 +38,19 @@ def record_code_coverage_finished(coverage_library: COVERAGE_LIBRARY, test_frame _tags: List[Tuple[str, str]] = [("library", coverage_library)] if test_framework is not None: _tags.append(("test_framework", test_framework)) - telemetry_writer.add_count_metric(_NAMESPACE, COVERAGE_TELEMETRY.FINISHED, 1, tuple(_tags)) + telemetry_writer.add_count_metric(TELEMETRY_NAMESPACE.CIVISIBILITY, COVERAGE_TELEMETRY.FINISHED, 1, tuple(_tags)) def record_code_coverage_empty(): log.debug("Recording code coverage empty telemetry") - telemetry_writer.add_count_metric(_NAMESPACE, COVERAGE_TELEMETRY.IS_EMPTY, 1) + telemetry_writer.add_count_metric(TELEMETRY_NAMESPACE.CIVISIBILITY, COVERAGE_TELEMETRY.IS_EMPTY, 1) def record_code_coverage_files(count_files: int): log.debug("Recording code coverage files telemetry: %s", count_files) - telemetry_writer.add_distribution_metric(_NAMESPACE, COVERAGE_TELEMETRY.FILES, count_files) + telemetry_writer.add_distribution_metric(TELEMETRY_NAMESPACE.CIVISIBILITY, COVERAGE_TELEMETRY.FILES, count_files) def record_code_coverage_error(): log.debug("Recording code coverage error telemetry") - telemetry_writer.add_count_metric(_NAMESPACE, COVERAGE_TELEMETRY.ERRORS, 1) + telemetry_writer.add_count_metric(TELEMETRY_NAMESPACE.CIVISIBILITY, COVERAGE_TELEMETRY.ERRORS, 1) diff --git a/ddtrace/internal/ci_visibility/telemetry/early_flake_detection.py b/ddtrace/internal/ci_visibility/telemetry/early_flake_detection.py index f8a512e7048..b9e9e48d021 100644 --- a/ddtrace/internal/ci_visibility/telemetry/early_flake_detection.py +++ b/ddtrace/internal/ci_visibility/telemetry/early_flake_detection.py @@ -1,8 +1,8 @@ from enum import Enum -from ddtrace.internal.ci_visibility.telemetry.constants import CIVISIBILITY_TELEMETRY_NAMESPACE as _NAMESPACE from ddtrace.internal.logger import get_logger from ddtrace.internal.telemetry import telemetry_writer +from ddtrace.internal.telemetry.constants import TELEMETRY_NAMESPACE log = get_logger(__name__) @@ -19,5 +19,7 @@ class EARLY_FLAKE_DETECTION_TELEMETRY(str, Enum): def record_early_flake_detection_tests_count(early_flake_detection_count: int): log.debug("Recording early flake detection tests count telemetry: %s", early_flake_detection_count) telemetry_writer.add_distribution_metric( - _NAMESPACE, EARLY_FLAKE_DETECTION_TELEMETRY.RESPONSE_TESTS.value, early_flake_detection_count + TELEMETRY_NAMESPACE.CIVISIBILITY, + EARLY_FLAKE_DETECTION_TELEMETRY.RESPONSE_TESTS.value, + early_flake_detection_count, ) diff --git a/ddtrace/internal/ci_visibility/telemetry/events.py b/ddtrace/internal/ci_visibility/telemetry/events.py index 34c603c3b03..b630ee96413 100644 --- a/ddtrace/internal/ci_visibility/telemetry/events.py +++ b/ddtrace/internal/ci_visibility/telemetry/events.py @@ -3,11 +3,11 @@ from typing import Optional from typing import Tuple -from ddtrace.internal.ci_visibility.telemetry.constants import CIVISIBILITY_TELEMETRY_NAMESPACE as _NAMESPACE from ddtrace.internal.ci_visibility.telemetry.constants import EVENT_TYPES from ddtrace.internal.ci_visibility.telemetry.constants import TEST_FRAMEWORKS from ddtrace.internal.logger import get_logger from ddtrace.internal.telemetry import telemetry_writer +from ddtrace.internal.telemetry.constants import TELEMETRY_NAMESPACE log = get_logger(__name__) @@ -67,7 +67,7 @@ def _record_event( if early_flake_detection_abort_reason and event == EVENTS_TELEMETRY.FINISHED and event_type == EVENT_TYPES.SESSION: _tags.append(("early_flake_detection_abort_reason", early_flake_detection_abort_reason)) - telemetry_writer.add_count_metric(_NAMESPACE, event.value, 1, tuple(_tags)) + telemetry_writer.add_count_metric(TELEMETRY_NAMESPACE.CIVISIBILITY, event.value, 1, tuple(_tags)) def record_event_created( @@ -117,11 +117,19 @@ def record_event_finished( def record_manual_api_event_created(event_type: EVENT_TYPES): # Note: _created suffix is added in cases we were to change the metric name in the future. # The current metric applies to event creation even though it does not specify it - telemetry_writer.add_count_metric(_NAMESPACE, EVENTS_TELEMETRY.MANUAL_API_EVENT, 1, (("event_type", event_type),)) + telemetry_writer.add_count_metric( + TELEMETRY_NAMESPACE.CIVISIBILITY, + EVENTS_TELEMETRY.MANUAL_API_EVENT, + 1, + (("event_type", event_type),) + ) def record_events_enqueued_for_serialization(events_count: int): - telemetry_writer.add_count_metric(_NAMESPACE, EVENTS_TELEMETRY.ENQUEUED_FOR_SERIALIZATION, events_count) + telemetry_writer.add_count_metric( + TELEMETRY_NAMESPACE.CIVISIBILITY, + EVENTS_TELEMETRY.ENQUEUED_FOR_SERIALIZATION, + events_count) def record_event_created_test( @@ -139,7 +147,7 @@ def record_event_created_test( if is_benchmark: tags.append(("is_benchmark", "true")) - telemetry_writer.add_count_metric(_NAMESPACE, EVENTS_TELEMETRY.FINISHED, 1, tuple(tags)) + telemetry_writer.add_count_metric(TELEMETRY_NAMESPACE.CIVISIBILITY, EVENTS_TELEMETRY.FINISHED, 1, tuple(tags)) def record_event_finished_test( @@ -190,4 +198,4 @@ def record_event_finished_test( if is_quarantined: tags.append(("is_quarantined", "true")) - telemetry_writer.add_count_metric(_NAMESPACE, EVENTS_TELEMETRY.FINISHED, 1, tuple(tags)) + telemetry_writer.add_count_metric(TELEMETRY_NAMESPACE.CIVISIBILITY, EVENTS_TELEMETRY.FINISHED, 1, tuple(tags)) diff --git a/ddtrace/internal/ci_visibility/telemetry/git.py b/ddtrace/internal/ci_visibility/telemetry/git.py index faf01621cde..41bca64a8fd 100644 --- a/ddtrace/internal/ci_visibility/telemetry/git.py +++ b/ddtrace/internal/ci_visibility/telemetry/git.py @@ -1,11 +1,11 @@ from typing import Optional -from ddtrace.internal.ci_visibility.telemetry.constants import CIVISIBILITY_TELEMETRY_NAMESPACE as _NAMESPACE from ddtrace.internal.ci_visibility.telemetry.constants import ERROR_TYPES from ddtrace.internal.ci_visibility.telemetry.constants import GIT_TELEMETRY from ddtrace.internal.ci_visibility.telemetry.constants import GIT_TELEMETRY_COMMANDS from ddtrace.internal.logger import get_logger from ddtrace.internal.telemetry import telemetry_writer +from ddtrace.internal.telemetry.constants import TELEMETRY_NAMESPACE log = get_logger(__name__) @@ -14,35 +14,45 @@ def record_git_command(command: GIT_TELEMETRY_COMMANDS, duration: float, exit_code: Optional[int]) -> None: log.debug("Recording git command telemetry: %s, %s, %s", command, duration, exit_code) tags = (("command", command),) - telemetry_writer.add_count_metric(_NAMESPACE, GIT_TELEMETRY.COMMAND_COUNT, 1, tags) - telemetry_writer.add_distribution_metric(_NAMESPACE, GIT_TELEMETRY.COMMAND_MS, duration, tags) + telemetry_writer.add_count_metric(TELEMETRY_NAMESPACE.CIVISIBILITY, GIT_TELEMETRY.COMMAND_COUNT, 1, tags) + telemetry_writer.add_distribution_metric(TELEMETRY_NAMESPACE.CIVISIBILITY, GIT_TELEMETRY.COMMAND_MS, duration, tags) if exit_code is not None and exit_code != 0: error_tags = (("command", command), ("exit_code", str(exit_code))) - telemetry_writer.add_count_metric(_NAMESPACE, GIT_TELEMETRY.COMMAND_ERRORS, 1, error_tags) + telemetry_writer.add_count_metric(TELEMETRY_NAMESPACE.CIVISIBILITY, GIT_TELEMETRY.COMMAND_ERRORS, 1, error_tags) def record_search_commits(duration: float, error: Optional[ERROR_TYPES] = None) -> None: log.debug("Recording search commits telemetry: %s, %s", duration, error) - telemetry_writer.add_count_metric(_NAMESPACE, GIT_TELEMETRY.SEARCH_COMMITS_COUNT, 1) - telemetry_writer.add_distribution_metric(_NAMESPACE, GIT_TELEMETRY.SEARCH_COMMITS_MS, duration) + telemetry_writer.add_count_metric(TELEMETRY_NAMESPACE.CIVISIBILITY, GIT_TELEMETRY.SEARCH_COMMITS_COUNT, 1) + telemetry_writer.add_distribution_metric( + TELEMETRY_NAMESPACE.CIVISIBILITY, GIT_TELEMETRY.SEARCH_COMMITS_MS, duration + ) if error is not None: error_tags = (("error_type", str(error)),) - telemetry_writer.add_count_metric(_NAMESPACE, GIT_TELEMETRY.SEARCH_COMMITS_ERRORS, 1, error_tags) + telemetry_writer.add_count_metric( + TELEMETRY_NAMESPACE.CIVISIBILITY, GIT_TELEMETRY.SEARCH_COMMITS_ERRORS, 1, error_tags + ) def record_objects_pack_request(duration: float, error: Optional[ERROR_TYPES] = None) -> None: log.debug("Recording objects pack request telmetry: %s, %s", duration, error) - telemetry_writer.add_count_metric(_NAMESPACE, GIT_TELEMETRY.OBJECTS_PACK_COUNT, 1) - telemetry_writer.add_distribution_metric(_NAMESPACE, GIT_TELEMETRY.OBJECTS_PACK_MS, duration) + telemetry_writer.add_count_metric(TELEMETRY_NAMESPACE.CIVISIBILITY, GIT_TELEMETRY.OBJECTS_PACK_COUNT, 1) + telemetry_writer.add_distribution_metric(TELEMETRY_NAMESPACE.CIVISIBILITY, GIT_TELEMETRY.OBJECTS_PACK_MS, duration) if error is not None: error_tags = (("error", error),) - telemetry_writer.add_count_metric(_NAMESPACE, GIT_TELEMETRY.OBJECTS_PACK_ERRORS, 1, error_tags) + telemetry_writer.add_count_metric( + TELEMETRY_NAMESPACE.CIVISIBILITY, GIT_TELEMETRY.OBJECTS_PACK_ERRORS, 1, error_tags + ) def record_objects_pack_data(num_files: int, num_bytes: int) -> None: log.debug("Recording objects pack data telemetry: %s, %s", num_files, num_bytes) - telemetry_writer.add_distribution_metric(_NAMESPACE, GIT_TELEMETRY.OBJECTS_PACK_BYTES, num_bytes) - telemetry_writer.add_distribution_metric(_NAMESPACE, GIT_TELEMETRY.OBJECTS_PACK_FILES, num_files) + telemetry_writer.add_distribution_metric( + TELEMETRY_NAMESPACE.CIVISIBILITY, GIT_TELEMETRY.OBJECTS_PACK_BYTES, num_bytes + ) + telemetry_writer.add_distribution_metric( + TELEMETRY_NAMESPACE.CIVISIBILITY, GIT_TELEMETRY.OBJECTS_PACK_FILES, num_files + ) def record_settings_response( @@ -87,4 +97,6 @@ def record_settings_response( response_tags.append(("quarantine_enabled", "true")) if response_tags: - telemetry_writer.add_count_metric(_NAMESPACE, GIT_TELEMETRY.SETTINGS_RESPONSE, 1, tuple(response_tags)) + telemetry_writer.add_count_metric( + TELEMETRY_NAMESPACE.CIVISIBILITY, GIT_TELEMETRY.SETTINGS_RESPONSE, 1, tuple(response_tags) + ) diff --git a/ddtrace/internal/ci_visibility/telemetry/itr.py b/ddtrace/internal/ci_visibility/telemetry/itr.py index 210a4103734..b8bf6889471 100644 --- a/ddtrace/internal/ci_visibility/telemetry/itr.py +++ b/ddtrace/internal/ci_visibility/telemetry/itr.py @@ -2,10 +2,10 @@ import functools from ddtrace.internal.ci_visibility.constants import SUITE -from ddtrace.internal.ci_visibility.telemetry.constants import CIVISIBILITY_TELEMETRY_NAMESPACE as _NAMESPACE from ddtrace.internal.ci_visibility.telemetry.constants import EVENT_TYPES from ddtrace.internal.logger import get_logger from ddtrace.internal.telemetry import telemetry_writer +from ddtrace.internal.telemetry.constants import TELEMETRY_NAMESPACE log = get_logger(__name__) @@ -40,18 +40,24 @@ def wrapper(event_type: str): @_enforce_event_is_test_or_suite def record_itr_skipped(event_type: EVENT_TYPES): log.debug("Recording itr skipped telemetry for %s", event_type) - telemetry_writer.add_count_metric(_NAMESPACE, ITR_TELEMETRY.SKIPPED, 1, (("event_type", event_type.value),)) + telemetry_writer.add_count_metric( + TELEMETRY_NAMESPACE.CIVISIBILITY, ITR_TELEMETRY.SKIPPED, 1, (("event_type", event_type.value),) + ) @_enforce_event_is_test_or_suite def record_itr_unskippable(event_type: EVENT_TYPES): log.debug("Recording itr unskippable telemetry for %s", event_type) - telemetry_writer.add_count_metric(_NAMESPACE, ITR_TELEMETRY.UNSKIPPABLE, 1, (("event_type", event_type.value),)) + telemetry_writer.add_count_metric( + TELEMETRY_NAMESPACE.CIVISIBILITY, ITR_TELEMETRY.UNSKIPPABLE, 1, (("event_type", event_type.value),) + ) def record_itr_forced_run(event_type: EVENT_TYPES): log.debug("Recording itr forced run telemetry for %s", event_type) - telemetry_writer.add_count_metric(_NAMESPACE, ITR_TELEMETRY.FORCED_RUN, 1, (("event_type", event_type.value),)) + telemetry_writer.add_count_metric( + TELEMETRY_NAMESPACE.CIVISIBILITY, ITR_TELEMETRY.FORCED_RUN, 1, (("event_type", event_type.value),) + ) def record_skippable_count(skippable_count: int, skipping_level: str): @@ -60,4 +66,4 @@ def record_skippable_count(skippable_count: int, skipping_level: str): if skipping_level == SUITE else SKIPPABLE_TESTS_TELEMETRY.RESPONSE_TESTS ) - telemetry_writer.add_count_metric(_NAMESPACE, skippable_count_metric, skippable_count) + telemetry_writer.add_count_metric(TELEMETRY_NAMESPACE.CIVISIBILITY, skippable_count_metric, skippable_count) diff --git a/ddtrace/internal/ci_visibility/telemetry/payload.py b/ddtrace/internal/ci_visibility/telemetry/payload.py index 1cf41d306ff..f5dd7a9ca00 100644 --- a/ddtrace/internal/ci_visibility/telemetry/payload.py +++ b/ddtrace/internal/ci_visibility/telemetry/payload.py @@ -1,8 +1,8 @@ from enum import Enum -from ddtrace.internal.ci_visibility.telemetry.constants import CIVISIBILITY_TELEMETRY_NAMESPACE as _NAMESPACE from ddtrace.internal.logger import get_logger from ddtrace.internal.telemetry import telemetry_writer +from ddtrace.internal.telemetry.constants import TELEMETRY_NAMESPACE log = get_logger(__name__) @@ -31,38 +31,46 @@ class REQUEST_ERROR_TYPE(str, Enum): def record_endpoint_payload_bytes(endpoint: ENDPOINT, nbytes: int) -> None: log.debug("Recording endpoint payload bytes: %s, %s", endpoint, nbytes) tags = (("endpoint", endpoint.value),) - telemetry_writer.add_distribution_metric(_NAMESPACE, ENDPOINT_PAYLOAD_TELEMETRY.BYTES.value, nbytes, tags) + telemetry_writer.add_distribution_metric( + TELEMETRY_NAMESPACE.CIVISIBILITY, ENDPOINT_PAYLOAD_TELEMETRY.BYTES.value, nbytes, tags + ) def record_endpoint_payload_request(endpoint: ENDPOINT) -> None: log.debug("Recording endpoint payload request: %s", endpoint) tags = (("endpoint", endpoint.value),) - telemetry_writer.add_count_metric(_NAMESPACE, ENDPOINT_PAYLOAD_TELEMETRY.REQUESTS_COUNT.value, 1, tags) + telemetry_writer.add_count_metric( + TELEMETRY_NAMESPACE.CIVISIBILITY, ENDPOINT_PAYLOAD_TELEMETRY.REQUESTS_COUNT.value, 1, tags + ) def record_endpoint_payload_request_time(endpoint: ENDPOINT, seconds: float) -> None: log.debug("Recording endpoint payload request time: %s, %s seconds", endpoint, seconds) tags = (("endpoint", endpoint.value),) telemetry_writer.add_distribution_metric( - _NAMESPACE, ENDPOINT_PAYLOAD_TELEMETRY.REQUESTS_MS.value, seconds * 1000, tags + TELEMETRY_NAMESPACE.CIVISIBILITY, ENDPOINT_PAYLOAD_TELEMETRY.REQUESTS_MS.value, seconds * 1000, tags ) def record_endpoint_payload_request_error(endpoint: ENDPOINT, error_type: REQUEST_ERROR_TYPE) -> None: log.debug("Recording endpoint payload request error: %s, %s", endpoint, error_type) tags = (("endpoint", endpoint.value), ("error_type", error_type)) - telemetry_writer.add_count_metric(_NAMESPACE, ENDPOINT_PAYLOAD_TELEMETRY.REQUESTS_ERRORS.value, 1, tags) + telemetry_writer.add_count_metric( + TELEMETRY_NAMESPACE.CIVISIBILITY, ENDPOINT_PAYLOAD_TELEMETRY.REQUESTS_ERRORS.value, 1, tags + ) def record_endpoint_payload_events_count(endpoint: ENDPOINT, count: int) -> None: log.debug("Recording endpoint payload events count: %s, %s", endpoint, count) tags = (("endpoint", endpoint.value),) - telemetry_writer.add_distribution_metric(_NAMESPACE, ENDPOINT_PAYLOAD_TELEMETRY.EVENTS_COUNT.value, count, tags) + telemetry_writer.add_distribution_metric( + TELEMETRY_NAMESPACE.CIVISIBILITY, ENDPOINT_PAYLOAD_TELEMETRY.EVENTS_COUNT.value, count, tags + ) def record_endpoint_payload_events_serialization_time(endpoint: ENDPOINT, seconds: float) -> None: log.debug("Recording endpoint payload serialization time: %s, %s seconds", endpoint, seconds) tags = (("endpoint", endpoint.value),) telemetry_writer.add_distribution_metric( - _NAMESPACE, ENDPOINT_PAYLOAD_TELEMETRY.EVENTS_SERIALIZATION_MS.value, seconds * 1000, tags + TELEMETRY_NAMESPACE.CIVISIBILITY, ENDPOINT_PAYLOAD_TELEMETRY.EVENTS_SERIALIZATION_MS.value, seconds * 1000, tags ) diff --git a/ddtrace/internal/telemetry/constants.py b/ddtrace/internal/telemetry/constants.py index 3298fdd7616..a809b5f2f4f 100644 --- a/ddtrace/internal/telemetry/constants.py +++ b/ddtrace/internal/telemetry/constants.py @@ -1,9 +1,13 @@ from enum import Enum -TELEMETRY_NAMESPACE_TAG_TRACER = "tracers" -TELEMETRY_NAMESPACE_TAG_APPSEC = "appsec" -TELEMETRY_NAMESPACE_TAG_IAST = "iast" +class TELEMETRY_NAMESPACE(Enum): + TRACERS = "tracers" + APPSEC = "appsec" + IAST = "iast" + CIVISIBILITY = "civisibility" + MLOBS = "mlobs" + TELEMETRY_TYPE_GENERATE_METRICS = "generate-metrics" TELEMETRY_TYPE_DISTRIBUTION = "distributions" diff --git a/ddtrace/internal/telemetry/metrics_namespaces.py b/ddtrace/internal/telemetry/metrics_namespaces.py index 927f6de775d..4b432ba330c 100644 --- a/ddtrace/internal/telemetry/metrics_namespaces.py +++ b/ddtrace/internal/telemetry/metrics_namespaces.py @@ -5,6 +5,7 @@ from typing import Type # noqa:F401 from ddtrace.internal import forksafe +from ddtrace.internal.telemetry.constants import TELEMETRY_NAMESPACE from ddtrace.internal.telemetry.constants import TELEMETRY_TYPE_DISTRIBUTION from ddtrace.internal.telemetry.constants import TELEMETRY_TYPE_GENERATE_METRICS from ddtrace.internal.telemetry.metrics import DistributionMetric @@ -34,23 +35,31 @@ def flush(self): } return namespace_metrics - def add_metric(self, metric_class, namespace, name, value=1.0, tags=None, interval=None): - # type: (Type[Metric], str, str, float, MetricTagType, Optional[float]) -> None + def add_metric( + self, + metric_class: Type[Metric], + namespace: TELEMETRY_NAMESPACE, + name: str, + value: float = 1.0, + tags: MetricTagType = None, + interval: Optional[float] = None, + ) -> None: """ Telemetry Metrics are stored in DD dashboards, check the metrics in datadoghq.com/metric/explorer. The metric will store in dashboard as "dd.instrumentation_telemetry_data." + namespace + "." + name """ - metric_id = Metric.get_id(name, namespace, tags, metric_class.metric_type) + namespace_str = namespace.value + metric_id = Metric.get_id(name, namespace_str, tags, metric_class.metric_type) if metric_class is DistributionMetric: metrics_type_payload = TELEMETRY_TYPE_DISTRIBUTION else: metrics_type_payload = TELEMETRY_TYPE_GENERATE_METRICS with self._lock: - existing_metric = self._metrics_data[metrics_type_payload][namespace].get(metric_id) + existing_metric = self._metrics_data[metrics_type_payload][namespace_str].get(metric_id) if existing_metric: existing_metric.add_point(value) else: - new_metric = metric_class(namespace, name, tags=tags, common=True, interval=interval) + new_metric = metric_class(namespace_str, name, tags=tags, common=True, interval=interval) new_metric.add_point(value) - self._metrics_data[metrics_type_payload][namespace][metric_id] = new_metric + self._metrics_data[metrics_type_payload][namespace_str][metric_id] = new_metric diff --git a/ddtrace/internal/telemetry/writer.py b/ddtrace/internal/telemetry/writer.py index 2be240c06fd..35a73d5e235 100644 --- a/ddtrace/internal/telemetry/writer.py +++ b/ddtrace/internal/telemetry/writer.py @@ -31,6 +31,7 @@ from . import modules from .constants import TELEMETRY_APM_PRODUCT from .constants import TELEMETRY_LOG_LEVEL # noqa:F401 +from .constants import TELEMETRY_NAMESPACE from .constants import TELEMETRY_TYPE_DISTRIBUTION from .constants import TELEMETRY_TYPE_GENERATE_METRICS from .constants import TELEMETRY_TYPE_LOGS @@ -337,7 +338,7 @@ def _app_started(self, register_app_shutdown=True): } # SOABI should help us identify which wheels people are getting from PyPI - self.add_configurations(get_python_config_vars()) # type: ignore + self.add_configurations(get_python_config_vars()) payload = { "configuration": self._flush_configuration_queue(), @@ -474,7 +475,6 @@ def add_configuration(self, configuration_name, configuration_value, origin="unk } def add_configurations(self, configuration_list): - # type: (List[Tuple[str, Union[bool, float, str], str]]) -> None """Creates and queues a list of configurations""" with self._service_lock: for name, value, _origin in configuration_list: @@ -485,7 +485,6 @@ def add_configurations(self, configuration_list): } def add_log(self, level, message, stack_trace="", tags=None): - # type: (TELEMETRY_LOG_LEVEL, str, str, Optional[Dict]) -> None """ Queues log. This event is meant to send library logs to Datadog’s backend through the Telemetry intake. This will make support cycles easier and ensure we know about potentially silent issues in libraries. @@ -507,8 +506,7 @@ def add_log(self, level, message, stack_trace="", tags=None): data["stack_trace"] = stack_trace self._logs.add(data) - def add_gauge_metric(self, namespace, name, value, tags=None): - # type: (str,str, float, MetricTagType) -> None + def add_gauge_metric(self, namespace: TELEMETRY_NAMESPACE, name: str, value: float, tags: MetricTagType = None): """ Queues gauge metric """ @@ -522,8 +520,7 @@ def add_gauge_metric(self, namespace, name, value, tags=None): self.interval, ) - def add_rate_metric(self, namespace, name, value=1.0, tags=None): - # type: (str,str, float, MetricTagType) -> None + def add_rate_metric(self, namespace: TELEMETRY_NAMESPACE, name: str, value: float, tags: MetricTagType = None): """ Queues rate metric """ @@ -537,8 +534,7 @@ def add_rate_metric(self, namespace, name, value=1.0, tags=None): self.interval, ) - def add_count_metric(self, namespace, name, value=1.0, tags=None): - # type: (str,str, float, MetricTagType) -> None + def add_count_metric(self, namespace: TELEMETRY_NAMESPACE, name: str, value: int = 1, tags: MetricTagType = None): """ Queues count metric """ @@ -551,8 +547,7 @@ def add_count_metric(self, namespace, name, value=1.0, tags=None): tags, ) - def add_distribution_metric(self, namespace, name, value=1.0, tags=None): - # type: (str,str, float, MetricTagType) -> None + def add_distribution_metric(self, namespace: TELEMETRY_NAMESPACE, name: str, value, tags: MetricTagType = None): """ Queues distributions metric """ @@ -708,7 +703,7 @@ def _telemetry_excepthook(self, tp, value, root_traceback): internal_index = dir_parts.index("internal") integration_name = dir_parts[internal_index + 1] self.add_count_metric( - "tracers", + TELEMETRY_NAMESPACE.TRACERS, "integration_errors", 1, (("integration_name", integration_name), ("error_type", tp.__name__)), diff --git a/ddtrace/llmobs/_evaluators/ragas/base.py b/ddtrace/llmobs/_evaluators/ragas/base.py index 23aa4cd3caa..10e89165a01 100644 --- a/ddtrace/llmobs/_evaluators/ragas/base.py +++ b/ddtrace/llmobs/_evaluators/ragas/base.py @@ -6,8 +6,8 @@ from ddtrace.internal.logger import get_logger from ddtrace.internal.telemetry import telemetry_writer -from ddtrace.internal.telemetry.constants import TELEMETRY_APM_PRODUCT from ddtrace.internal.telemetry.constants import TELEMETRY_LOG_LEVEL +from ddtrace.internal.telemetry.constants import TELEMETRY_NAMESPACE from ddtrace.internal.utils.version import parse_version from ddtrace.llmobs._constants import INTERNAL_CONTEXT_VARIABLE_KEYS from ddtrace.llmobs._constants import INTERNAL_QUERY_VARIABLE_KEYS @@ -121,7 +121,7 @@ def __init__(self, llmobs_service): raise NotImplementedError("Failed to load dependencies for `{}` evaluator".format(self.LABEL)) from e finally: telemetry_writer.add_count_metric( - namespace=TELEMETRY_APM_PRODUCT.LLMOBS, + namespace=TELEMETRY_NAMESPACE.MLOBS, name="evaluators.init", value=1, tags=( @@ -143,7 +143,7 @@ def run_and_submit_evaluation(self, span_event: dict): return score_result_or_failure, metric_metadata = self.evaluate(span_event) telemetry_writer.add_count_metric( - TELEMETRY_APM_PRODUCT.LLMOBS, + TELEMETRY_NAMESPACE.MLOBS, "evaluators.run", 1, tags=( diff --git a/ddtrace/llmobs/_evaluators/runner.py b/ddtrace/llmobs/_evaluators/runner.py index 3d26998f1b4..ffbe4a58d64 100644 --- a/ddtrace/llmobs/_evaluators/runner.py +++ b/ddtrace/llmobs/_evaluators/runner.py @@ -7,7 +7,7 @@ from ddtrace.internal.logger import get_logger from ddtrace.internal.periodic import PeriodicService from ddtrace.internal.telemetry import telemetry_writer -from ddtrace.internal.telemetry.constants import TELEMETRY_APM_PRODUCT +from ddtrace.internal.telemetry.constants import TELEMETRY_NAMESPACE from ddtrace.llmobs._evaluators.ragas.faithfulness import RagasFaithfulnessEvaluator from ddtrace.llmobs._evaluators.sampler import EvaluatorRunnerSampler @@ -56,7 +56,7 @@ def __init__(self, interval: float, llmobs_service=None, evaluators=None): raise e finally: telemetry_writer.add_count_metric( - namespace=TELEMETRY_APM_PRODUCT.LLMOBS, + namespace=TELEMETRY_NAMESPACE.MLOBS, name="evaluators.init", value=1, tags=( diff --git a/ddtrace/llmobs/_evaluators/sampler.py b/ddtrace/llmobs/_evaluators/sampler.py index 9dcb0759724..3598e90f7f3 100644 --- a/ddtrace/llmobs/_evaluators/sampler.py +++ b/ddtrace/llmobs/_evaluators/sampler.py @@ -9,8 +9,8 @@ from ddtrace._trace.sampling_rule import SamplingRule from ddtrace.internal.logger import get_logger from ddtrace.internal.telemetry import telemetry_writer -from ddtrace.internal.telemetry.constants import TELEMETRY_APM_PRODUCT from ddtrace.internal.telemetry.constants import TELEMETRY_LOG_LEVEL +from ddtrace.internal.telemetry.constants import TELEMETRY_NAMESPACE logger = get_logger(__name__) @@ -67,7 +67,7 @@ def parsing_failed_because(msg, maybe_throw_this): TELEMETRY_LOG_LEVEL.ERROR, message="Evaluator sampling parsing failure because: {}".format(msg) ) telemetry_writer.add_count_metric( - namespace=TELEMETRY_APM_PRODUCT.LLMOBS, + namespace=TELEMETRY_NAMESPACE.MLOBS, name="evaluators.error", value=1, tags=(("reason", "sampling_rule_parsing_failure"),), @@ -104,7 +104,7 @@ def parsing_failed_because(msg, maybe_throw_this): span_name = rule.get(EvaluatorRunnerSamplingRule.SPAN_NAME_KEY, SamplingRule.NO_RULE) evaluator_label = rule.get(EvaluatorRunnerSamplingRule.EVALUATOR_LABEL_KEY, SamplingRule.NO_RULE) telemetry_writer.add_distribution_metric( - TELEMETRY_APM_PRODUCT.LLMOBS, + TELEMETRY_NAMESPACE.MLOBS, "evaluators.rule_sample_rate", sample_rate, tags=(("evaluator_label", evaluator_label), ("span_name", span_name)), diff --git a/ddtrace/settings/_otel_remapper.py b/ddtrace/settings/_otel_remapper.py index 8bdb313fdef..d3501c2e3fa 100644 --- a/ddtrace/settings/_otel_remapper.py +++ b/ddtrace/settings/_otel_remapper.py @@ -28,7 +28,7 @@ def __class_getitem__(self, item): from ..constants import VERSION_KEY from ..internal.logger import get_logger from ..internal.telemetry import telemetry_writer -from ..internal.telemetry.constants import TELEMETRY_NAMESPACE_TAG_TRACER +from ..internal.telemetry.constants import TELEMETRY_NAMESPACE log = get_logger(__name__) @@ -169,7 +169,7 @@ def otel_remapping(): if otel_env.startswith("OTEL_") and otel_env != "OTEL_PYTHON_CONTEXT": log.warning("OpenTelemetry configuration %s is not supported by Datadog.", otel_env) telemetry_writer.add_count_metric( - TELEMETRY_NAMESPACE_TAG_TRACER, + TELEMETRY_NAMESPACE.TRACERS, "otel.env.unsupported", 1, (("config_opentelemetry", otel_env.lower()),), @@ -185,7 +185,7 @@ def otel_remapping(): otel_value, ) telemetry_writer.add_count_metric( - TELEMETRY_NAMESPACE_TAG_TRACER, + TELEMETRY_NAMESPACE.TRACERS, "otel.env.hiding", 1, (("config_opentelemetry", otel_env.lower()), ("config_datadog", dd_env.lower())), @@ -205,7 +205,7 @@ def otel_remapping(): otel_value, ) telemetry_writer.add_count_metric( - TELEMETRY_NAMESPACE_TAG_TRACER, + TELEMETRY_NAMESPACE.TRACERS, "otel.env.invalid", 1, (("config_opentelemetry", otel_env.lower()), ("config_datadog", dd_env.lower())), diff --git a/tests/appsec/appsec/test_telemetry.py b/tests/appsec/appsec/test_telemetry.py index 8678820e8d6..6932a7bdba0 100644 --- a/tests/appsec/appsec/test_telemetry.py +++ b/tests/appsec/appsec/test_telemetry.py @@ -12,7 +12,7 @@ from ddtrace.appsec._processor import AppSecSpanProcessor from ddtrace.contrib.trace_utils import set_http_meta from ddtrace.ext import SpanTypes -from ddtrace.internal.telemetry.constants import TELEMETRY_NAMESPACE_TAG_APPSEC +from ddtrace.internal.telemetry.constants import TELEMETRY_NAMESPACE from ddtrace.internal.telemetry.constants import TELEMETRY_TYPE_DISTRIBUTION from ddtrace.internal.telemetry.constants import TELEMETRY_TYPE_GENERATE_METRICS import tests.appsec.rules as rules @@ -27,7 +27,7 @@ def _assert_generate_metrics(metrics_result, is_rule_triggered=False, is_blocked_request=False): - generate_metrics = metrics_result[TELEMETRY_TYPE_GENERATE_METRICS][TELEMETRY_NAMESPACE_TAG_APPSEC] + generate_metrics = metrics_result[TELEMETRY_TYPE_GENERATE_METRICS][TELEMETRY_NAMESPACE.APPSEC.value] assert len(generate_metrics) == 2, "Expected 2 generate_metrics" for _metric_id, metric in generate_metrics.items(): if metric.name == "waf.requests": @@ -44,7 +44,7 @@ def _assert_generate_metrics(metrics_result, is_rule_triggered=False, is_blocked def _assert_distributions_metrics(metrics_result, is_rule_triggered=False, is_blocked_request=False): - distributions_metrics = metrics_result[TELEMETRY_TYPE_DISTRIBUTION][TELEMETRY_NAMESPACE_TAG_APPSEC] + distributions_metrics = metrics_result[TELEMETRY_TYPE_DISTRIBUTION][TELEMETRY_NAMESPACE.APPSEC.value] assert len(distributions_metrics) == 2, "Expected 2 distributions_metrics" for _metric_id, metric in distributions_metrics.items(): @@ -69,8 +69,8 @@ def test_metrics_when_appsec_doesnt_runs(telemetry_writer, tracer): rules.Config(), ) metrics_data = telemetry_writer._namespace._metrics_data - assert len(metrics_data[TELEMETRY_TYPE_GENERATE_METRICS][TELEMETRY_NAMESPACE_TAG_APPSEC]) == 0 - assert len(metrics_data[TELEMETRY_TYPE_DISTRIBUTION][TELEMETRY_NAMESPACE_TAG_APPSEC]) == 0 + assert len(metrics_data[TELEMETRY_TYPE_GENERATE_METRICS][TELEMETRY_NAMESPACE.APPSEC.value]) == 0 + assert len(metrics_data[TELEMETRY_TYPE_DISTRIBUTION][TELEMETRY_NAMESPACE.APPSEC.value]) == 0 def test_metrics_when_appsec_runs(telemetry_writer, tracer): @@ -136,7 +136,7 @@ def test_log_metric_error_ddwaf_timeout(telemetry_writer, tracer): assert len(list_metrics_logs) == 0 generate_metrics = telemetry_writer._namespace._metrics_data[TELEMETRY_TYPE_GENERATE_METRICS][ - TELEMETRY_NAMESPACE_TAG_APPSEC + TELEMETRY_NAMESPACE.APPSEC.value ] timeout_found = False diff --git a/tests/appsec/contrib_appsec/utils.py b/tests/appsec/contrib_appsec/utils.py index d3691e2bea3..d7aa077052f 100644 --- a/tests/appsec/contrib_appsec/utils.py +++ b/tests/appsec/contrib_appsec/utils.py @@ -1379,7 +1379,7 @@ def validate_top_function(trace): assert get_tag(http.STATUS_CODE) == str(code), (get_tag(http.STATUS_CODE), code) if code == 200: assert self.body(response).startswith(f"{endpoint} endpoint") - telemetry_calls = {(c.__name__, f"{ns}.{nm}", t): v for (c, ns, nm, v, t), _ in mocked.call_args_list} + telemetry_calls = {(c.__name__, f"{ns.value}.{nm}", t): v for (c, ns, nm, v, t), _ in mocked.call_args_list} if asm_enabled and ep_enabled and action_level > 0: self.check_rules_triggered([rule] * (1 if action_level == 2 else 2), root_span) assert self.check_for_stack_trace(root_span) diff --git a/tests/appsec/iast/test_telemetry.py b/tests/appsec/iast/test_telemetry.py index 106d9408815..139dab79918 100644 --- a/tests/appsec/iast/test_telemetry.py +++ b/tests/appsec/iast/test_telemetry.py @@ -28,7 +28,7 @@ from ddtrace.contrib.internal.sqlalchemy.patch import patch as sqli_sqlalchemy_patch from ddtrace.contrib.internal.sqlite3.patch import patch as sqli_sqlite3_patch from ddtrace.ext import SpanTypes -from ddtrace.internal.telemetry.constants import TELEMETRY_NAMESPACE_TAG_IAST +from ddtrace.internal.telemetry.constants import TELEMETRY_NAMESPACE from ddtrace.internal.telemetry.constants import TELEMETRY_TYPE_GENERATE_METRICS from tests.appsec.iast.aspects.conftest import _iast_patched_module from tests.appsec.utils import asm_context @@ -38,7 +38,7 @@ def _assert_instrumented_sink(telemetry_writer, vuln_type): metrics_result = telemetry_writer._namespace._metrics_data - generate_metrics = metrics_result[TELEMETRY_TYPE_GENERATE_METRICS][TELEMETRY_NAMESPACE_TAG_IAST] + generate_metrics = metrics_result[TELEMETRY_TYPE_GENERATE_METRICS][TELEMETRY_NAMESPACE.IAST.value] assert len(generate_metrics) == 1, "Expected 1 generate_metrics" assert [metric.name for metric in generate_metrics.values()] == ["instrumented.sink"] assert [metric._tags for metric in generate_metrics.values()] == [(("vulnerability_type", vuln_type),)] @@ -87,7 +87,7 @@ def test_metric_executed_sink(no_request_sampling, telemetry_writer, caplog): metrics_result = telemetry_writer._namespace._metrics_data - generate_metrics = metrics_result[TELEMETRY_TYPE_GENERATE_METRICS][TELEMETRY_NAMESPACE_TAG_IAST].values() + generate_metrics = metrics_result[TELEMETRY_TYPE_GENERATE_METRICS][TELEMETRY_NAMESPACE.IAST.value].values() assert len(generate_metrics) == 1 # Remove potential sinks from internal usage of the lib (like http.client, used to communicate with # the agent) @@ -151,7 +151,7 @@ def test_metric_instrumented_propagation(no_request_sampling, telemetry_writer): _iast_patched_module("benchmarks.bm.iast_fixtures.str_methods") metrics_result = telemetry_writer._namespace._metrics_data - generate_metrics = metrics_result[TELEMETRY_TYPE_GENERATE_METRICS][TELEMETRY_NAMESPACE_TAG_IAST] + generate_metrics = metrics_result[TELEMETRY_TYPE_GENERATE_METRICS][TELEMETRY_NAMESPACE.IAST.value] # Remove potential sinks from internal usage of the lib (like http.client, used to communicate with # the agent) filtered_metrics = [metric.name for metric in generate_metrics.values() if metric.name != "executed.sink"] @@ -175,7 +175,7 @@ def test_metric_request_tainted(no_request_sampling, telemetry_writer): metrics_result = telemetry_writer._namespace._metrics_data - generate_metrics = metrics_result[TELEMETRY_TYPE_GENERATE_METRICS][TELEMETRY_NAMESPACE_TAG_IAST] + generate_metrics = metrics_result[TELEMETRY_TYPE_GENERATE_METRICS][TELEMETRY_NAMESPACE.IAST.value] # Remove potential sinks from internal usage of the lib (like http.client, used to communicate with # the agent) filtered_metrics = [metric.name for metric in generate_metrics.values() if metric.name != "executed.sink"] diff --git a/tests/telemetry/app.py b/tests/telemetry/app.py index 7390d9b5da6..ae2b9932c9f 100644 --- a/tests/telemetry/app.py +++ b/tests/telemetry/app.py @@ -1,7 +1,7 @@ from flask import Flask from ddtrace.internal.telemetry import telemetry_writer -from ddtrace.internal.telemetry.constants import TELEMETRY_NAMESPACE_TAG_TRACER +from ddtrace.internal.telemetry.constants import TELEMETRY_NAMESPACE app = Flask(__name__) @@ -23,7 +23,7 @@ def starting_app_view(): @app.route("/count_metric") def metrics_view(): telemetry_writer.add_count_metric( - TELEMETRY_NAMESPACE_TAG_TRACER, + TELEMETRY_NAMESPACE.TRACERS, "test_metric", 1.0, ) diff --git a/tests/telemetry/test_telemetry_metrics.py b/tests/telemetry/test_telemetry_metrics.py index a3ea6051b8b..d1061d57770 100644 --- a/tests/telemetry/test_telemetry_metrics.py +++ b/tests/telemetry/test_telemetry_metrics.py @@ -3,8 +3,7 @@ from mock.mock import ANY from ddtrace.internal.telemetry.constants import TELEMETRY_LOG_LEVEL -from ddtrace.internal.telemetry.constants import TELEMETRY_NAMESPACE_TAG_APPSEC -from ddtrace.internal.telemetry.constants import TELEMETRY_NAMESPACE_TAG_TRACER +from ddtrace.internal.telemetry.constants import TELEMETRY_NAMESPACE from ddtrace.internal.telemetry.constants import TELEMETRY_TYPE_DISTRIBUTION from ddtrace.internal.telemetry.constants import TELEMETRY_TYPE_GENERATE_METRICS from tests.utils import override_global_config @@ -13,7 +12,7 @@ def _assert_metric( test_agent, expected_metrics, - namespace=TELEMETRY_NAMESPACE_TAG_TRACER, + namespace=TELEMETRY_NAMESPACE.TRACERS, type_paypload=TELEMETRY_TYPE_GENERATE_METRICS, ): assert len(expected_metrics) > 0, "expected_metrics should not be empty" @@ -23,7 +22,7 @@ def _assert_metric( metrics = [] for event in metrics_events: - if event["payload"]["namespace"] == namespace: + if event["payload"]["namespace"] == namespace.value: for metric in event["payload"]["series"]: metric["tags"].sort() metrics.append(metric) @@ -49,7 +48,7 @@ def _assert_logs(test_agent, expected_logs): def test_send_metric_flush_and_generate_metrics_series_is_restarted(telemetry_writer, test_agent_session, mock_time): """Check the queue of metrics is empty after run periodic method of PeriodicService""" - telemetry_writer.add_count_metric(TELEMETRY_NAMESPACE_TAG_TRACER, "test-metric2", 1, (("a", "b"),)) + telemetry_writer.add_count_metric(TELEMETRY_NAMESPACE.TRACERS, "test-metric2", 1, (("a", "b"),)) expected_series = [ { "common": True, @@ -62,7 +61,7 @@ def test_send_metric_flush_and_generate_metrics_series_is_restarted(telemetry_wr _assert_metric(test_agent_session, expected_series) - telemetry_writer.add_count_metric(TELEMETRY_NAMESPACE_TAG_TRACER, "test-metric2", 1, (("a", "b"),)) + telemetry_writer.add_count_metric(TELEMETRY_NAMESPACE.TRACERS, "test-metric2", 1, (("a", "b"),)) _assert_metric(test_agent_session, expected_series) @@ -75,8 +74,8 @@ def test_send_metric_datapoint_equal_type_and_tags_yields_single_series( But in Datadog, a datapoint also includes tags, which declare all the various scopes the datapoint belongs to https://www.datadoghq.com/blog/the-power-of-tagged-metrics/#whats-a-metric-tag """ - telemetry_writer.add_count_metric(TELEMETRY_NAMESPACE_TAG_TRACER, "test-metric", 2, (("a", "b"),)) - telemetry_writer.add_count_metric(TELEMETRY_NAMESPACE_TAG_TRACER, "test-metric", 3, (("a", "b"),)) + telemetry_writer.add_count_metric(TELEMETRY_NAMESPACE.TRACERS, "test-metric", 2, (("a", "b"),)) + telemetry_writer.add_count_metric(TELEMETRY_NAMESPACE.TRACERS, "test-metric", 3, (("a", "b"),)) expected_series = [ { @@ -99,9 +98,9 @@ def test_send_metric_datapoint_equal_type_different_tags_yields_multiple_series( But in Datadog, a datapoint also includes tags, which declare all the various scopes the datapoint belongs to https://www.datadoghq.com/blog/the-power-of-tagged-metrics/#whats-a-metric-tag """ - telemetry_writer.add_count_metric(TELEMETRY_NAMESPACE_TAG_TRACER, "test-metric", 4, (("a", "b"),)) + telemetry_writer.add_count_metric(TELEMETRY_NAMESPACE.TRACERS, "test-metric", 4, (("a", "b"),)) telemetry_writer.add_count_metric( - TELEMETRY_NAMESPACE_TAG_TRACER, + TELEMETRY_NAMESPACE.TRACERS, "test-metric", 5, ( @@ -109,7 +108,7 @@ def test_send_metric_datapoint_equal_type_different_tags_yields_multiple_series( ("c", "True"), ), ) - telemetry_writer.add_count_metric(TELEMETRY_NAMESPACE_TAG_TRACER, "test-metric", 6, tuple()) + telemetry_writer.add_count_metric(TELEMETRY_NAMESPACE.TRACERS, "test-metric", 6, tuple()) expected_series = [ { @@ -144,8 +143,8 @@ def test_send_metric_datapoint_with_different_types(telemetry_writer, test_agent But in Datadog, a datapoint also includes tags, which declare all the various scopes the datapoint belongs to https://www.datadoghq.com/blog/the-power-of-tagged-metrics/#whats-a-metric-tag """ - telemetry_writer.add_count_metric(TELEMETRY_NAMESPACE_TAG_TRACER, "test-metric", 1, (("a", "b"),)) - telemetry_writer.add_gauge_metric(TELEMETRY_NAMESPACE_TAG_TRACER, "test-metric", 1, (("a", "b"),)) + telemetry_writer.add_count_metric(TELEMETRY_NAMESPACE.TRACERS, "test-metric", 1, (("a", "b"),)) + telemetry_writer.add_gauge_metric(TELEMETRY_NAMESPACE.TRACERS, "test-metric", 1, (("a", "b"),)) expected_series = [ {"common": True, "metric": "test-metric", "points": [[1642544540, 1.0]], "tags": ["a:b"], "type": "count"}, @@ -162,11 +161,11 @@ def test_send_metric_datapoint_with_different_types(telemetry_writer, test_agent def test_send_tracers_count_metric(telemetry_writer, test_agent_session, mock_time): - telemetry_writer.add_count_metric(TELEMETRY_NAMESPACE_TAG_TRACER, "test-metric", 1, (("a", "b"),)) - telemetry_writer.add_count_metric(TELEMETRY_NAMESPACE_TAG_TRACER, "test-metric", 1, (("a", "b"),)) - telemetry_writer.add_count_metric(TELEMETRY_NAMESPACE_TAG_TRACER, "test-metric", 1, tuple()) + telemetry_writer.add_count_metric(TELEMETRY_NAMESPACE.TRACERS, "test-metric", 1, (("a", "b"),)) + telemetry_writer.add_count_metric(TELEMETRY_NAMESPACE.TRACERS, "test-metric", 1, (("a", "b"),)) + telemetry_writer.add_count_metric(TELEMETRY_NAMESPACE.TRACERS, "test-metric", 1, tuple()) telemetry_writer.add_count_metric( - TELEMETRY_NAMESPACE_TAG_TRACER, + TELEMETRY_NAMESPACE.TRACERS, "test-metric", 1, ( @@ -203,13 +202,13 @@ def test_send_tracers_count_metric(telemetry_writer, test_agent_session, mock_ti def test_send_appsec_rate_metric(telemetry_writer, test_agent_session, mock_time): telemetry_writer.add_rate_metric( - TELEMETRY_NAMESPACE_TAG_APPSEC, + TELEMETRY_NAMESPACE.APPSEC, "test-metric", 6, (("hi", "HELLO"), ("NAME", "CANDY")), ) - telemetry_writer.add_rate_metric(TELEMETRY_NAMESPACE_TAG_APPSEC, "test-metric", 6, tuple()) - telemetry_writer.add_rate_metric(TELEMETRY_NAMESPACE_TAG_APPSEC, "test-metric", 6, tuple()) + telemetry_writer.add_rate_metric(TELEMETRY_NAMESPACE.APPSEC, "test-metric", 6, tuple()) + telemetry_writer.add_rate_metric(TELEMETRY_NAMESPACE.APPSEC, "test-metric", 6, tuple()) expected_series = [ { @@ -230,12 +229,12 @@ def test_send_appsec_rate_metric(telemetry_writer, test_agent_session, mock_time }, ] - _assert_metric(test_agent_session, expected_series, namespace=TELEMETRY_NAMESPACE_TAG_APPSEC) + _assert_metric(test_agent_session, expected_series, namespace=TELEMETRY_NAMESPACE.APPSEC) def test_send_appsec_gauge_metric(telemetry_writer, test_agent_session, mock_time): telemetry_writer.add_gauge_metric( - TELEMETRY_NAMESPACE_TAG_APPSEC, + TELEMETRY_NAMESPACE.APPSEC, "test-metric", 5, ( @@ -243,8 +242,8 @@ def test_send_appsec_gauge_metric(telemetry_writer, test_agent_session, mock_tim ("NAME", "CANDY"), ), ) - telemetry_writer.add_gauge_metric(TELEMETRY_NAMESPACE_TAG_APPSEC, "test-metric", 5, (("a", "b"),)) - telemetry_writer.add_gauge_metric(TELEMETRY_NAMESPACE_TAG_APPSEC, "test-metric", 6, tuple()) + telemetry_writer.add_gauge_metric(TELEMETRY_NAMESPACE.APPSEC, "test-metric", 5, (("a", "b"),)) + telemetry_writer.add_gauge_metric(TELEMETRY_NAMESPACE.APPSEC, "test-metric", 6, tuple()) expected_series = [ { @@ -272,13 +271,13 @@ def test_send_appsec_gauge_metric(telemetry_writer, test_agent_session, mock_tim "type": "gauge", }, ] - _assert_metric(test_agent_session, expected_series, namespace=TELEMETRY_NAMESPACE_TAG_APPSEC) + _assert_metric(test_agent_session, expected_series, namespace=TELEMETRY_NAMESPACE.APPSEC) def test_send_appsec_distributions_metric(telemetry_writer, test_agent_session, mock_time): - telemetry_writer.add_distribution_metric(TELEMETRY_NAMESPACE_TAG_APPSEC, "test-metric", 4, tuple()) - telemetry_writer.add_distribution_metric(TELEMETRY_NAMESPACE_TAG_APPSEC, "test-metric", 5, tuple()) - telemetry_writer.add_distribution_metric(TELEMETRY_NAMESPACE_TAG_APPSEC, "test-metric", 6, tuple()) + telemetry_writer.add_distribution_metric(TELEMETRY_NAMESPACE.APPSEC, "test-metric", 4, tuple()) + telemetry_writer.add_distribution_metric(TELEMETRY_NAMESPACE.APPSEC, "test-metric", 5, tuple()) + telemetry_writer.add_distribution_metric(TELEMETRY_NAMESPACE.APPSEC, "test-metric", 6, tuple()) expected_series = [ { @@ -290,16 +289,16 @@ def test_send_appsec_distributions_metric(telemetry_writer, test_agent_session, _assert_metric( test_agent_session, expected_series, - namespace=TELEMETRY_NAMESPACE_TAG_APPSEC, + namespace=TELEMETRY_NAMESPACE.APPSEC, type_paypload=TELEMETRY_TYPE_DISTRIBUTION, ) def test_send_metric_flush_and_distributions_series_is_restarted(telemetry_writer, test_agent_session, mock_time): """Check the queue of metrics is empty after run periodic method of PeriodicService""" - telemetry_writer.add_distribution_metric(TELEMETRY_NAMESPACE_TAG_APPSEC, "test-metric", 4, tuple()) - telemetry_writer.add_distribution_metric(TELEMETRY_NAMESPACE_TAG_APPSEC, "test-metric", 5, tuple()) - telemetry_writer.add_distribution_metric(TELEMETRY_NAMESPACE_TAG_APPSEC, "test-metric", 6, tuple()) + telemetry_writer.add_distribution_metric(TELEMETRY_NAMESPACE.APPSEC, "test-metric", 4, tuple()) + telemetry_writer.add_distribution_metric(TELEMETRY_NAMESPACE.APPSEC, "test-metric", 5, tuple()) + telemetry_writer.add_distribution_metric(TELEMETRY_NAMESPACE.APPSEC, "test-metric", 6, tuple()) expected_series = [ { "metric": "test-metric", @@ -311,7 +310,7 @@ def test_send_metric_flush_and_distributions_series_is_restarted(telemetry_write _assert_metric( test_agent_session, expected_series, - namespace=TELEMETRY_NAMESPACE_TAG_APPSEC, + namespace=TELEMETRY_NAMESPACE.APPSEC, type_paypload=TELEMETRY_TYPE_DISTRIBUTION, ) @@ -323,12 +322,12 @@ def test_send_metric_flush_and_distributions_series_is_restarted(telemetry_write } ] - telemetry_writer.add_distribution_metric(TELEMETRY_NAMESPACE_TAG_APPSEC, "test-metric", 1, tuple()) + telemetry_writer.add_distribution_metric(TELEMETRY_NAMESPACE.APPSEC, "test-metric", 1, tuple()) _assert_metric( test_agent_session, expected_series, - namespace=TELEMETRY_NAMESPACE_TAG_APPSEC, + namespace=TELEMETRY_NAMESPACE.APPSEC, type_paypload=TELEMETRY_TYPE_DISTRIBUTION, ) diff --git a/tests/tracer/test_processors.py b/tests/tracer/test_processors.py index d06716e1825..a752275f3ab 100644 --- a/tests/tracer/test_processors.py +++ b/tests/tracer/test_processors.py @@ -26,6 +26,7 @@ from ddtrace.internal.processor.endpoint_call_counter import EndpointCallCounterProcessor from ddtrace.internal.sampling import SamplingMechanism from ddtrace.internal.sampling import SpanSamplingRule +from ddtrace.internal.telemetry.constants import TELEMETRY_NAMESPACE from tests.utils import DummyTracer from tests.utils import DummyWriter from tests.utils import override_global_config @@ -353,20 +354,34 @@ def test_span_creation_metrics(): mock_tm.assert_has_calls( [ - mock.call("tracers", "spans_created", 100, tags=(("integration_name", "datadog"),)), - mock.call("tracers", "spans_finished", 100, tags=(("integration_name", "datadog"),)), - mock.call("tracers", "spans_created", 100, tags=(("integration_name", "datadog"),)), - mock.call("tracers", "spans_finished", 100, tags=(("integration_name", "datadog"),)), - mock.call("tracers", "spans_created", 100, tags=(("integration_name", "datadog"),)), - mock.call("tracers", "spans_finished", 100, tags=(("integration_name", "datadog"),)), + mock.call( + TELEMETRY_NAMESPACE.TRACERS, "spans_created", 100, tags=(("integration_name", "datadog"),) + ), + mock.call( + TELEMETRY_NAMESPACE.TRACERS, "spans_finished", 100, tags=(("integration_name", "datadog"),) + ), + mock.call( + TELEMETRY_NAMESPACE.TRACERS, "spans_created", 100, tags=(("integration_name", "datadog"),) + ), + mock.call( + TELEMETRY_NAMESPACE.TRACERS, "spans_finished", 100, tags=(("integration_name", "datadog"),) + ), + mock.call( + TELEMETRY_NAMESPACE.TRACERS, "spans_created", 100, tags=(("integration_name", "datadog"),) + ), + mock.call( + TELEMETRY_NAMESPACE.TRACERS, "spans_finished", 100, tags=(("integration_name", "datadog"),) + ), ] ) mock_tm.reset_mock() aggr.shutdown(None) mock_tm.assert_has_calls( [ - mock.call("tracers", "spans_created", 1, tags=(("integration_name", "datadog"),)), - mock.call("tracers", "spans_finished", 1, tags=(("integration_name", "datadog"),)), + mock.call(TELEMETRY_NAMESPACE.TRACERS, "spans_created", 1, tags=(("integration_name", "datadog"),)), + mock.call( + TELEMETRY_NAMESPACE.TRACERS, "spans_finished", 1, tags=(("integration_name", "datadog"),) + ), ] ) From c734bdbe9107fe2ac6d8e8b57dfd1cec985d415b Mon Sep 17 00:00:00 2001 From: Zachary Groves <32471391+ZStriker19@users.noreply.github.com> Date: Wed, 15 Jan 2025 11:25:08 -0500 Subject: [PATCH 03/16] feat(propagation): add DD_TRACE_PROPAGATION_BEHAVIOR_EXTRACT to handle x-org propagation (#11631) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Configuration: DD_TRACE_PROPAGATION_BEHAVIOR_EXTRACT Values: - continue (default): The tracing library continues the trace from the incoming headers, if present. Also, incoming baggage is propagated. - restart: The tracing library always starts a new trace with a new trace-id. If there are distributed tracing headers, a span link will be added to reference the trace context. Also, incoming baggage is propagated. - ignore: The tracing library always starts a new trace with a new trace-id without creating any span links. Also, incoming baggage is dropped. Note: We do not need to implement this at the moment because the workaround we have in place DD_TRACE_PROPAGATION_STYLE_EXTRACT=none performs this exact function of completely ignoring incoming trace context headers and baggage headers. If we receive feedback, we can add this new option that we maintain. Result ✅ Head services: Customers can configure these services with the restart configuration so that they are always the root span, regardless of incoming distributed tracing headers. ✅ Internal services: By default, internal services will remain unchanged and continue to report complete traces to Datadog. If a user has configured an internal service with the restart configuration, this will lead to partial traces. Thankfully with Span Links we can easily direct the customer to the upstream trace (if it was sampled). ❌ Sometimes-head services: This solution does not solve for services that are sometimes head services and sometimes not. The best solution for these services is to set the restart configuration so that a new trace can begin and sampling priority can be recalculated, and in the case that this was not a head service then the upstream service can be found with a span link. ❌ Reentrant systems: This solution does not improve this scenario no matter how the reentrant service is configured. Under the continue configuration, the reentrant service is part of the same trace as the original root span but it is orphaned. Under the restart configuration, a new trace will be started and the span will be decorated with a span link containing the original trace-id but the contained span-id would not point to a span tracked in their Datadog organization. Note: This contains a fix, allowing only baggage to be propagated. ## Checklist - [x] PR author has checked that all the criteria below are met - The PR description includes an overview of the change - The PR description articulates the motivation for the change - The change includes tests OR the PR description describes a testing strategy - The PR description notes risks associated with the change, if any - Newly-added code is easy to change - The change follows the [library release note guidelines](https://ddtrace.readthedocs.io/en/stable/releasenotes.html) - The change includes or references documentation updates if necessary - Backport labels are set (if [applicable](https://ddtrace.readthedocs.io/en/latest/contributing.html#backporting)) ## Reviewer Checklist - [x] Reviewer has checked that all the criteria below are met - Title is accurate - All changes are related to the pull request's stated goal - Avoids breaking [API](https://ddtrace.readthedocs.io/en/stable/versioning.html#interfaces) changes - Testing strategy adequately addresses listed risks - Newly-added code is easy to change - Release note makes sense to a user of the library - If necessary, author has acknowledged and discussed the performance implications of this PR as reported in the benchmarks PR comment - Backport labels are set in a manner that is consistent with the [release branch maintenance policy](https://ddtrace.readthedocs.io/en/latest/contributing.html#backporting) --------- Co-authored-by: erikayasuda <153395705+erikayasuda@users.noreply.github.com> Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> Co-authored-by: Alberto Vara Co-authored-by: Christophe Papazian <114495376+christophe-papazian@users.noreply.github.com> Co-authored-by: Munir Abdinur --- ddtrace/_trace/tracer.py | 3 +- ddtrace/contrib/trace_utils.py | 13 +- ddtrace/internal/constants.py | 4 + ddtrace/propagation/http.py | 60 +++-- ddtrace/settings/config.py | 26 +- docs/configuration.rst | 20 ++ ...ion_behavior_extract-3d16765cfd07485b.yaml | 13 + tests/telemetry/test_writer.py | 3 + tests/tracer/test_propagation.py | 227 +++++++++++++++++- tests/utils.py | 1 + 10 files changed, 336 insertions(+), 34 deletions(-) create mode 100644 releasenotes/notes/propagation_behavior_extract-3d16765cfd07485b.yaml diff --git a/ddtrace/_trace/tracer.py b/ddtrace/_trace/tracer.py index 502e12ece07..9752d82ff83 100644 --- a/ddtrace/_trace/tracer.py +++ b/ddtrace/_trace/tracer.py @@ -775,8 +775,7 @@ def _start_span( service = config.service_mapping.get(service, service) links = context._span_links if not parent else [] - - if trace_id: + if trace_id or links or context._baggage: # child_of a non-empty context, so either a local child span or from a remote context span = Span( name=name, diff --git a/ddtrace/contrib/trace_utils.py b/ddtrace/contrib/trace_utils.py index db8509d8c35..644475b02ab 100644 --- a/ddtrace/contrib/trace_utils.py +++ b/ddtrace/contrib/trace_utils.py @@ -565,9 +565,9 @@ def activate_distributed_headers(tracer, int_config=None, request_headers=None, context = HTTPPropagator.extract(request_headers) # Only need to activate the new context if something was propagated - if not context.trace_id: + # The new context must have one of these values in order for it to be activated + if not context.trace_id and not context._baggage and not context._span_links: return None - # Do not reactivate a context with the same trace id # DEV: An example could be nested web frameworks, when one layer already # parsed request headers and activated them. @@ -577,7 +577,14 @@ def activate_distributed_headers(tracer, int_config=None, request_headers=None, # app = Flask(__name__) # Traced via Flask instrumentation # app = DDWSGIMiddleware(app) # Extra layer on top for WSGI current_context = tracer.current_trace_context() - if current_context and current_context.trace_id == context.trace_id: + + # We accept incoming contexts with only baggage or only span_links, however if we + # already have a current_context then an incoming context not + # containing a trace_id or containing the same trace_id + # should not be activated. + if current_context and ( + not context.trace_id or (context.trace_id and context.trace_id == current_context.trace_id) + ): log.debug( "will not activate extracted Context(trace_id=%r, span_id=%r), a context with that trace id is already active", # noqa: E501 context.trace_id, diff --git a/ddtrace/internal/constants.py b/ddtrace/internal/constants.py index 4efdc754ef3..c4255035c41 100644 --- a/ddtrace/internal/constants.py +++ b/ddtrace/internal/constants.py @@ -19,6 +19,10 @@ _PROPAGATION_STYLE_NONE, _PROPAGATION_STYLE_BAGGAGE, ) +_PROPAGATION_BEHAVIOR_CONTINUE = "continue" +_PROPAGATION_BEHAVIOR_IGNORE = "ignore" +_PROPAGATION_BEHAVIOR_RESTART = "restart" +_PROPAGATION_BEHAVIOR_DEFAULT = _PROPAGATION_BEHAVIOR_CONTINUE W3C_TRACESTATE_KEY = "tracestate" W3C_TRACEPARENT_KEY = "traceparent" W3C_TRACESTATE_PARENT_ID_KEY = "p" diff --git a/ddtrace/propagation/http.py b/ddtrace/propagation/http.py index 563ee838d84..0cd5b69db46 100644 --- a/ddtrace/propagation/http.py +++ b/ddtrace/propagation/http.py @@ -40,6 +40,7 @@ from ..internal._tagset import decode_tagset_string from ..internal._tagset import encode_tagset_values from ..internal.compat import ensure_text +from ..internal.constants import _PROPAGATION_BEHAVIOR_RESTART from ..internal.constants import _PROPAGATION_STYLE_BAGGAGE from ..internal.constants import _PROPAGATION_STYLE_NONE from ..internal.constants import _PROPAGATION_STYLE_W3C_TRACECONTEXT @@ -974,12 +975,12 @@ class HTTPPropagator(object): """ @staticmethod - def _extract_configured_contexts_avail(normalized_headers): + def _extract_configured_contexts_avail(normalized_headers: Dict[str, str]) -> Tuple[List[Context], List[str]]: contexts = [] styles_w_ctx = [] for prop_style in config._propagation_style_extract: propagator = _PROP_STYLES[prop_style] - context = propagator._extract(normalized_headers) + context = propagator._extract(normalized_headers) # type: ignore # baggage is handled separately if prop_style == _PROPAGATION_STYLE_BAGGAGE: continue @@ -988,6 +989,24 @@ def _extract_configured_contexts_avail(normalized_headers): styles_w_ctx.append(prop_style) return contexts, styles_w_ctx + @staticmethod + def _context_to_span_link(context: Context, style: str, reason: str) -> Optional[SpanLink]: + # encoding expects at least trace_id and span_id + if context.span_id and context.trace_id: + return SpanLink( + context.trace_id, + context.span_id, + flags=1 if context.sampling_priority and context.sampling_priority > 0 else 0, + tracestate=( + context._meta.get(W3C_TRACESTATE_KEY, "") if style == _PROPAGATION_STYLE_W3C_TRACECONTEXT else None + ), + attributes={ + "reason": reason, + "context_headers": style, + }, + ) + return None + @staticmethod def _resolve_contexts(contexts, styles_w_ctx, normalized_headers): primary_context = contexts[0] @@ -996,23 +1015,14 @@ def _resolve_contexts(contexts, styles_w_ctx, normalized_headers): for context in contexts[1:]: style_w_ctx = styles_w_ctx[contexts.index(context)] # encoding expects at least trace_id and span_id - if context.span_id and context.trace_id and context.trace_id != primary_context.trace_id: - links.append( - SpanLink( - context.trace_id, - context.span_id, - flags=1 if context.sampling_priority and context.sampling_priority > 0 else 0, - tracestate=( - context._meta.get(W3C_TRACESTATE_KEY, "") - if style_w_ctx == _PROPAGATION_STYLE_W3C_TRACECONTEXT - else None - ), - attributes={ - "reason": "terminated_context", - "context_headers": style_w_ctx, - }, - ) + if context.trace_id and context.trace_id != primary_context.trace_id: + link = HTTPPropagator._context_to_span_link( + context, + style_w_ctx, + "terminated_context", ) + if link: + links.append(link) # if trace_id matches and the propagation style is tracecontext # add the tracestate to the primary context elif style_w_ctx == _PROPAGATION_STYLE_W3C_TRACECONTEXT: @@ -1130,17 +1140,19 @@ def my_controller(url, headers): :param dict headers: HTTP headers to extract tracing attributes. :return: New `Context` with propagated attributes. """ + context = Context() if not headers: - return Context() + return context try: + style = "" normalized_headers = {name.lower(): v for name, v in headers.items()} - context = Context() # tracer configured to extract first only if config._propagation_extract_first: # loop through the extract propagation styles specified in order, return whatever context we get first for prop_style in config._propagation_style_extract: propagator = _PROP_STYLES[prop_style] context = propagator._extract(normalized_headers) + style = prop_style if config.propagation_http_baggage_enabled is True: _attach_baggage_to_context(normalized_headers, context) break @@ -1148,6 +1160,9 @@ def my_controller(url, headers): # loop through all extract propagation styles else: contexts, styles_w_ctx = HTTPPropagator._extract_configured_contexts_avail(normalized_headers) + # check that styles_w_ctx is not empty + if styles_w_ctx: + style = styles_w_ctx[0] if contexts: context = HTTPPropagator._resolve_contexts(contexts, styles_w_ctx, normalized_headers) @@ -1159,9 +1174,12 @@ def my_controller(url, headers): baggage_context = _BaggageHeader._extract(normalized_headers) if baggage_context._baggage != {}: if context: - context._baggage = baggage_context._baggage + context._baggage = baggage_context.get_all_baggage_items() else: context = baggage_context + if config._propagation_behavior_extract == _PROPAGATION_BEHAVIOR_RESTART: + link = HTTPPropagator._context_to_span_link(context, style, "propagation_behavior_extract") + context = Context(baggage=context.get_all_baggage_items(), span_links=[link] if link else []) return context diff --git a/ddtrace/settings/config.py b/ddtrace/settings/config.py index 65baf99ccb3..6ee75fbe6d8 100644 --- a/ddtrace/settings/config.py +++ b/ddtrace/settings/config.py @@ -20,7 +20,10 @@ from ddtrace.vendor.debtcollector import deprecate from ..internal import gitmetadata +from ..internal.constants import _PROPAGATION_BEHAVIOR_DEFAULT +from ..internal.constants import _PROPAGATION_BEHAVIOR_IGNORE from ..internal.constants import _PROPAGATION_STYLE_DEFAULT +from ..internal.constants import _PROPAGATION_STYLE_NONE from ..internal.constants import DEFAULT_BUFFER_SIZE from ..internal.constants import DEFAULT_MAX_PAYLOAD_SIZE from ..internal.constants import DEFAULT_PROCESSING_INTERVAL @@ -529,11 +532,23 @@ def __init__(self): # Propagation styles # DD_TRACE_PROPAGATION_STYLE_EXTRACT and DD_TRACE_PROPAGATION_STYLE_INJECT # take precedence over DD_TRACE_PROPAGATION_STYLE - self._propagation_style_extract = _parse_propagation_styles( - _get_config( - ["DD_TRACE_PROPAGATION_STYLE_EXTRACT", "DD_TRACE_PROPAGATION_STYLE"], _PROPAGATION_STYLE_DEFAULT - ) + # if DD_TRACE_PROPAGATION_BEHAVIOR_EXTRACT is set to ignore + # we set DD_TRACE_PROPAGATION_STYLE_EXTRACT to [_PROPAGATION_STYLE_NONE] since no extraction will heeded + self._propagation_behavior_extract = _get_config( + ["DD_TRACE_PROPAGATION_BEHAVIOR_EXTRACT"], _PROPAGATION_BEHAVIOR_DEFAULT, self._lower ) + if self._propagation_behavior_extract != _PROPAGATION_BEHAVIOR_IGNORE: + self._propagation_style_extract = _parse_propagation_styles( + _get_config( + ["DD_TRACE_PROPAGATION_STYLE_EXTRACT", "DD_TRACE_PROPAGATION_STYLE"], _PROPAGATION_STYLE_DEFAULT + ) + ) + else: + log.debug( + """DD_TRACE_PROPAGATION_BEHAVIOR_EXTRACT is set to ignore, + setting DD_TRACE_PROPAGATION_STYLE_EXTRACT to empty list""" + ) + self._propagation_style_extract = [_PROPAGATION_STYLE_NONE] self._propagation_style_inject = _parse_propagation_styles( _get_config(["DD_TRACE_PROPAGATION_STYLE_INJECT", "DD_TRACE_PROPAGATION_STYLE"], _PROPAGATION_STYLE_DEFAULT) ) @@ -978,3 +993,6 @@ def convert_rc_trace_sampling_rules(self, rc_rules: List[Dict[str, Any]]) -> Opt return json.dumps(rc_rules) else: return None + + def _lower(self, value): + return value.lower() diff --git a/docs/configuration.rst b/docs/configuration.rst index 35bb63fac20..455272f318d 100644 --- a/docs/configuration.rst +++ b/docs/configuration.rst @@ -368,6 +368,26 @@ Trace Context propagation version_added: v1.7.0: The ``b3multi`` propagation style was added and ``b3`` was deprecated in favor it. + DD_TRACE_PROPAGATION_BEHAVIOR_EXTRACT: + default: | + ``continue`` + + description: | + String for how to handle incoming request headers that are extracted for propagation of trace info. + + The supported values are ``continue``, ``restart``, and ``ignore``. + + After extracting the headers for propagation, this configuration determines what is done with them. + + The default value is ``continue`` which always propagates valid headers. + ``ignore`` ignores all incoming headers and ``restart`` turns the first extracted valid propagation header + into a span link and propagates baggage if present. + + Example: ``DD_TRACE_PROPAGATION_STYLE_EXTRACT="ignore"`` to ignore all incoming headers and to start a root span without a parent. + + version_added: + v2.20.0: + DD_TRACE_PROPAGATION_STYLE_INJECT: default: | ``tracecontext,datadog`` diff --git a/releasenotes/notes/propagation_behavior_extract-3d16765cfd07485b.yaml b/releasenotes/notes/propagation_behavior_extract-3d16765cfd07485b.yaml new file mode 100644 index 00000000000..6e1def89993 --- /dev/null +++ b/releasenotes/notes/propagation_behavior_extract-3d16765cfd07485b.yaml @@ -0,0 +1,13 @@ +--- +features: + - | + propagation: Introduces the environment variable ``DD_TRACE_PROPAGATION_BEHAVIOR_EXTRACT`` + to control the behavior of the extraction of distributed tracing headers. The values, ``continue`` (default), + ``ignore``, and ``restart``, are supported. The default value is ``continue`` which has no change from the current behavior of always propagating valid headers. + ``ignore`` ignores all incoming headers, never propagating the incoming trace information + and ``restart`` turns the first extracted propagation style into a span link and propagates baggage if extracted. + +fixes: + - | + propagation: Fixes an issue where the baggage header was not being propagated when the baggage header was the only header extracted. + With this fix, the baggage header is now propagated when it is the only header extracted. diff --git a/tests/telemetry/test_writer.py b/tests/telemetry/test_writer.py index 69fe7969fc7..8d4030c84a9 100644 --- a/tests/telemetry/test_writer.py +++ b/tests/telemetry/test_writer.py @@ -268,7 +268,9 @@ def test_app_started_event_configuration_override(test_agent_session, run_python env["DD_SPAN_SAMPLING_RULES_FILE"] = str(file) env["DD_TRACE_PARTIAL_FLUSH_ENABLED"] = "false" env["DD_TRACE_PARTIAL_FLUSH_MIN_SPANS"] = "3" + env["DD_TRACE_PROPAGATION_BEHAVIOR_EXTRACT"] = "restart" env["DD_SITE"] = "datadoghq.com" + # By default telemetry collection is enabled after 10 seconds, so we either need to # to sleep for 10 seconds or manually call _app_started() to generate the app started event. # This delay allows us to collect start up errors and dynamic configurations @@ -446,6 +448,7 @@ def test_app_started_event_configuration_override(test_agent_session, run_python {"name": "DD_TRACE_OTEL_ENABLED", "origin": "env_var", "value": True}, {"name": "DD_TRACE_PARTIAL_FLUSH_ENABLED", "origin": "env_var", "value": False}, {"name": "DD_TRACE_PARTIAL_FLUSH_MIN_SPANS", "origin": "env_var", "value": 3}, + {"name": "DD_TRACE_PROPAGATION_BEHAVIOR_EXTRACT", "origin": "env_var", "value": "restart"}, {"name": "DD_TRACE_PROPAGATION_EXTRACT_FIRST", "origin": "default", "value": False}, {"name": "DD_TRACE_PROPAGATION_HTTP_BAGGAGE_ENABLED", "origin": "default", "value": False}, {"name": "DD_TRACE_PROPAGATION_STYLE_EXTRACT", "origin": "env_var", "value": "tracecontext"}, diff --git a/tests/tracer/test_propagation.py b/tests/tracer/test_propagation.py index 0d4c5d7c01d..c15439ae825 100644 --- a/tests/tracer/test_propagation.py +++ b/tests/tracer/test_propagation.py @@ -16,6 +16,8 @@ from ddtrace.constants import AUTO_REJECT from ddtrace.constants import USER_KEEP from ddtrace.constants import USER_REJECT +from ddtrace.internal.constants import _PROPAGATION_BEHAVIOR_IGNORE +from ddtrace.internal.constants import _PROPAGATION_BEHAVIOR_RESTART from ddtrace.internal.constants import _PROPAGATION_STYLE_BAGGAGE from ddtrace.internal.constants import _PROPAGATION_STYLE_NONE from ddtrace.internal.constants import _PROPAGATION_STYLE_W3C_TRACECONTEXT @@ -1529,6 +1531,9 @@ def test_extract_tracecontext(headers, expected_context): HTTP_HEADER_PARENT_ID: "parent_id", HTTP_HEADER_SAMPLING_PRIORITY: "sample", } + +DATADOG_BAGGAGE_HEADERS_VALID = {**DATADOG_HEADERS_VALID, "baggage": "key1=val1,key2=val2"} + B3_HEADERS_VALID = { _HTTP_HEADER_B3_TRACE_ID: "80f198ee56343ba864fe8b2a57d3eff7", _HTTP_HEADER_B3_SPAN_ID: "a2fb4a1d1a96d312", @@ -1582,6 +1587,7 @@ def test_extract_tracecontext(headers, expected_context): ( "valid_datadog_default", None, + None, DATADOG_HEADERS_VALID, { "trace_id": 13088165645273925489, @@ -1594,6 +1600,7 @@ def test_extract_tracecontext(headers, expected_context): ( "valid_datadog_default_wsgi", None, + None, {get_wsgi_header(name): value for name, value in DATADOG_HEADERS_VALID.items()}, { "trace_id": 13088165645273925489, @@ -1606,6 +1613,7 @@ def test_extract_tracecontext(headers, expected_context): ( "valid_datadog_no_priority", None, + None, DATADOG_HEADERS_VALID_NO_PRIORITY, { "trace_id": 13088165645273925489, @@ -1618,12 +1626,14 @@ def test_extract_tracecontext(headers, expected_context): ( "invalid_datadog", [PROPAGATION_STYLE_DATADOG], + None, DATADOG_HEADERS_INVALID, CONTEXT_EMPTY, ), ( "valid_datadog_explicit_style", [PROPAGATION_STYLE_DATADOG], + None, DATADOG_HEADERS_VALID, { "trace_id": 13088165645273925489, @@ -1636,6 +1646,7 @@ def test_extract_tracecontext(headers, expected_context): ( "invalid_datadog_negative_trace_id", [PROPAGATION_STYLE_DATADOG], + None, { HTTP_HEADER_TRACE_ID: "-1", HTTP_HEADER_PARENT_ID: "5678", @@ -1647,6 +1658,7 @@ def test_extract_tracecontext(headers, expected_context): ( "valid_datadog_explicit_style_wsgi", [PROPAGATION_STYLE_DATADOG], + None, {get_wsgi_header(name): value for name, value in DATADOG_HEADERS_VALID.items()}, { "trace_id": 13088165645273925489, @@ -1659,6 +1671,7 @@ def test_extract_tracecontext(headers, expected_context): ( "valid_datadog_all_styles", [PROPAGATION_STYLE_DATADOG, PROPAGATION_STYLE_B3_MULTI, PROPAGATION_STYLE_B3_SINGLE], + None, DATADOG_HEADERS_VALID, { "trace_id": 13088165645273925489, @@ -1671,13 +1684,29 @@ def test_extract_tracecontext(headers, expected_context): ( "valid_datadog_no_datadog_style", [PROPAGATION_STYLE_B3_MULTI], + None, DATADOG_HEADERS_VALID, CONTEXT_EMPTY, ), + ( + "valid_datadog_and_baggage_default", + None, + None, + DATADOG_BAGGAGE_HEADERS_VALID, + { + "trace_id": 13088165645273925489, + "span_id": 5678, + "sampling_priority": 1, + "dd_origin": "synthetics", + "meta": {"_dd.p.dm": "-3"}, + "baggage": {"key1": "val1", "key2": "val2"}, + }, + ), # B3 headers ( "valid_b3_simple", [PROPAGATION_STYLE_B3_MULTI], + None, B3_HEADERS_VALID, { "trace_id": TRACE_ID, @@ -1689,6 +1718,7 @@ def test_extract_tracecontext(headers, expected_context): ( "valid_b3_wsgi", [PROPAGATION_STYLE_B3_MULTI], + None, {get_wsgi_header(name): value for name, value in B3_HEADERS_VALID.items()}, { "trace_id": TRACE_ID, @@ -1700,6 +1730,7 @@ def test_extract_tracecontext(headers, expected_context): ( "valid_b3_flags", [PROPAGATION_STYLE_B3_MULTI], + None, { _HTTP_HEADER_B3_TRACE_ID: B3_HEADERS_VALID[_HTTP_HEADER_B3_TRACE_ID], _HTTP_HEADER_B3_SPAN_ID: B3_HEADERS_VALID[_HTTP_HEADER_B3_SPAN_ID], @@ -1715,6 +1746,7 @@ def test_extract_tracecontext(headers, expected_context): ( "valid_b3_with_parent_id", [PROPAGATION_STYLE_B3_MULTI], + None, { _HTTP_HEADER_B3_TRACE_ID: B3_HEADERS_VALID[_HTTP_HEADER_B3_TRACE_ID], _HTTP_HEADER_B3_SPAN_ID: B3_HEADERS_VALID[_HTTP_HEADER_B3_SPAN_ID], @@ -1731,6 +1763,7 @@ def test_extract_tracecontext(headers, expected_context): ( "valid_b3_only_trace_and_span_id", [PROPAGATION_STYLE_B3_MULTI], + None, { _HTTP_HEADER_B3_TRACE_ID: B3_HEADERS_VALID[_HTTP_HEADER_B3_TRACE_ID], _HTTP_HEADER_B3_SPAN_ID: B3_HEADERS_VALID[_HTTP_HEADER_B3_SPAN_ID], @@ -1745,6 +1778,7 @@ def test_extract_tracecontext(headers, expected_context): ( "valid_b3_only_trace_id", [PROPAGATION_STYLE_B3_MULTI], + None, { _HTTP_HEADER_B3_TRACE_ID: B3_HEADERS_VALID[_HTTP_HEADER_B3_TRACE_ID], }, @@ -1758,24 +1792,28 @@ def test_extract_tracecontext(headers, expected_context): ( "invalid_b3", [PROPAGATION_STYLE_B3_MULTI], + None, B3_HEADERS_INVALID, CONTEXT_EMPTY, ), ( "valid_b3_default_style", None, + None, B3_HEADERS_VALID, CONTEXT_EMPTY, ), ( "valid_b3_no_b3_style", [PROPAGATION_STYLE_B3_SINGLE], + None, B3_HEADERS_VALID, CONTEXT_EMPTY, ), ( "valid_b3_all_styles", [PROPAGATION_STYLE_DATADOG, PROPAGATION_STYLE_B3_MULTI, PROPAGATION_STYLE_B3_SINGLE], + None, B3_HEADERS_VALID, { "trace_id": TRACE_ID, @@ -1788,6 +1826,7 @@ def test_extract_tracecontext(headers, expected_context): ( "valid_b3_single_header_simple", [PROPAGATION_STYLE_B3_SINGLE], + None, B3_SINGLE_HEADERS_VALID, { "trace_id": TRACE_ID, @@ -1799,6 +1838,7 @@ def test_extract_tracecontext(headers, expected_context): ( "valid_b3_single_header_simple", [PROPAGATION_STYLE_B3_SINGLE], + None, { get_wsgi_header(_HTTP_HEADER_B3_SINGLE): B3_SINGLE_HEADERS_VALID[_HTTP_HEADER_B3_SINGLE], }, @@ -1812,6 +1852,7 @@ def test_extract_tracecontext(headers, expected_context): ( "valid_b3_single_header_simple", [PROPAGATION_STYLE_B3_SINGLE], + None, { get_wsgi_header(_HTTP_HEADER_B3_SINGLE): B3_SINGLE_HEADERS_VALID[_HTTP_HEADER_B3_SINGLE], }, @@ -1825,6 +1866,7 @@ def test_extract_tracecontext(headers, expected_context): ( "valid_b3_single_header_only_sampled", [PROPAGATION_STYLE_B3_SINGLE], + None, { _HTTP_HEADER_B3_SINGLE: "1", }, @@ -1838,6 +1880,7 @@ def test_extract_tracecontext(headers, expected_context): ( "valid_b3_single_header_only_trace_and_span_id", [PROPAGATION_STYLE_B3_SINGLE], + None, { _HTTP_HEADER_B3_SINGLE: "80f198ee56343ba864fe8b2a57d3eff7-e457b5a2e4d86bd1", }, @@ -1851,12 +1894,14 @@ def test_extract_tracecontext(headers, expected_context): ( "invalid_b3_single_header", [PROPAGATION_STYLE_B3_SINGLE], + None, B3_SINGLE_HEADERS_INVALID, CONTEXT_EMPTY, ), ( "valid_b3_single_header_all_styles", [PROPAGATION_STYLE_DATADOG, PROPAGATION_STYLE_B3_MULTI, PROPAGATION_STYLE_B3_SINGLE], + None, B3_SINGLE_HEADERS_VALID, { "trace_id": TRACE_ID, @@ -1868,6 +1913,7 @@ def test_extract_tracecontext(headers, expected_context): ( "valid_b3_single_header_extra_data", [PROPAGATION_STYLE_B3_SINGLE], + None, {_HTTP_HEADER_B3_SINGLE: B3_SINGLE_HEADERS_VALID[_HTTP_HEADER_B3_SINGLE] + "-05e3ac9a4f6e3b90-extra-data-here"}, { "trace_id": TRACE_ID, @@ -1879,18 +1925,21 @@ def test_extract_tracecontext(headers, expected_context): ( "valid_b3_single_header_default_style", None, + None, B3_SINGLE_HEADERS_VALID, CONTEXT_EMPTY, ), ( "valid_b3_single_header_no_b3_single_header_style", [PROPAGATION_STYLE_B3_MULTI], + None, B3_SINGLE_HEADERS_VALID, CONTEXT_EMPTY, ), ( "baggage_case_insensitive", None, + None, {"BAgGage": "key1=val1,key2=val2"}, { "baggage": {"key1": "val1", "key2": "val2"}, @@ -1900,6 +1949,7 @@ def test_extract_tracecontext(headers, expected_context): ( "valid_all_headers_default_style", None, + None, ALL_HEADERS, { "trace_id": 13088165645273925489, @@ -1928,6 +1978,7 @@ def test_extract_tracecontext(headers, expected_context): PROPAGATION_STYLE_B3_SINGLE, _PROPAGATION_STYLE_W3C_TRACECONTEXT, ], + None, ALL_HEADERS, { "trace_id": 13088165645273925489, @@ -1968,6 +2019,7 @@ def test_extract_tracecontext(headers, expected_context): PROPAGATION_STYLE_B3_SINGLE, _PROPAGATION_STYLE_W3C_TRACECONTEXT, ], + None, {get_wsgi_header(name): value for name, value in ALL_HEADERS.items()}, { "trace_id": 13088165645273925489, @@ -2003,6 +2055,7 @@ def test_extract_tracecontext(headers, expected_context): ( "valid_all_headers_datadog_style", [PROPAGATION_STYLE_DATADOG], + None, ALL_HEADERS, { "trace_id": 13088165645273925489, @@ -2015,6 +2068,7 @@ def test_extract_tracecontext(headers, expected_context): ( "valid_all_headers_datadog_style_wsgi", [PROPAGATION_STYLE_DATADOG], + None, {get_wsgi_header(name): value for name, value in ALL_HEADERS.items()}, { "trace_id": 13088165645273925489, @@ -2027,6 +2081,7 @@ def test_extract_tracecontext(headers, expected_context): ( "valid_all_headers_b3_style", [PROPAGATION_STYLE_B3_MULTI], + None, ALL_HEADERS, { "trace_id": TRACE_ID, @@ -2038,6 +2093,7 @@ def test_extract_tracecontext(headers, expected_context): ( "valid_all_headers_b3_style_wsgi", [PROPAGATION_STYLE_B3_MULTI], + None, {get_wsgi_header(name): value for name, value in ALL_HEADERS.items()}, { "trace_id": TRACE_ID, @@ -2049,6 +2105,7 @@ def test_extract_tracecontext(headers, expected_context): ( "valid_all_headers_both_b3_styles", [PROPAGATION_STYLE_B3_MULTI, PROPAGATION_STYLE_B3_SINGLE], + None, ALL_HEADERS, { "trace_id": TRACE_ID, @@ -2060,6 +2117,7 @@ def test_extract_tracecontext(headers, expected_context): ( "valid_all_headers_b3_single_style", [PROPAGATION_STYLE_B3_SINGLE], + None, ALL_HEADERS, { "trace_id": TRACE_ID, @@ -2072,6 +2130,7 @@ def test_extract_tracecontext(headers, expected_context): # name, styles, headers, expected_context, "none_style", [_PROPAGATION_STYLE_NONE], + None, ALL_HEADERS, { "trace_id": None, @@ -2084,6 +2143,7 @@ def test_extract_tracecontext(headers, expected_context): # name, styles, headers, expected_context, "none_and_other_prop_style_still_extracts", [PROPAGATION_STYLE_DATADOG, _PROPAGATION_STYLE_NONE], + None, ALL_HEADERS, { "trace_id": 13088165645273925489, @@ -2097,6 +2157,7 @@ def test_extract_tracecontext(headers, expected_context): ( "order_matters_B3_SINGLE_HEADER_first", [PROPAGATION_STYLE_B3_SINGLE, PROPAGATION_STYLE_B3_MULTI, PROPAGATION_STYLE_DATADOG], + None, B3_SINGLE_HEADERS_VALID, { "trace_id": TRACE_ID, @@ -2113,6 +2174,7 @@ def test_extract_tracecontext(headers, expected_context): PROPAGATION_STYLE_DATADOG, _PROPAGATION_STYLE_W3C_TRACECONTEXT, ], + None, B3_HEADERS_VALID, { "trace_id": TRACE_ID, @@ -2124,6 +2186,7 @@ def test_extract_tracecontext(headers, expected_context): ( "order_matters_B3_second_no_Datadog_headers", [PROPAGATION_STYLE_DATADOG, PROPAGATION_STYLE_B3_MULTI], + None, B3_HEADERS_VALID, { "trace_id": TRACE_ID, @@ -2135,6 +2198,7 @@ def test_extract_tracecontext(headers, expected_context): ( "valid_all_headers_b3_single_style_wsgi", [PROPAGATION_STYLE_B3_SINGLE], + None, {get_wsgi_header(name): value for name, value in ALL_HEADERS.items()}, { "trace_id": TRACE_ID, @@ -2153,6 +2217,7 @@ def test_extract_tracecontext(headers, expected_context): _PROPAGATION_STYLE_W3C_TRACECONTEXT, PROPAGATION_STYLE_B3_SINGLE, ], + None, DATADOG_TRACECONTEXT_MATCHING_TRACE_ID_HEADERS, { "trace_id": _get_64_lowest_order_bits_as_int(TRACE_ID), @@ -2170,6 +2235,7 @@ def test_extract_tracecontext(headers, expected_context): ( "no_additional_tracestate_support_when_present_but_trace_ids_do_not_match", [PROPAGATION_STYLE_DATADOG, _PROPAGATION_STYLE_W3C_TRACECONTEXT], + None, {**DATADOG_HEADERS_VALID, **TRACECONTEXT_HEADERS_VALID_RUM_NO_SAMPLING_DECISION}, { "trace_id": 13088165645273925489, @@ -2191,18 +2257,21 @@ def test_extract_tracecontext(headers, expected_context): ( "valid_all_headers_no_style", [], + None, ALL_HEADERS, CONTEXT_EMPTY, ), ( "valid_all_headers_no_style_wsgi", [], + None, {get_wsgi_header(name): value for name, value in ALL_HEADERS.items()}, CONTEXT_EMPTY, ), ( "datadog_tracecontext_conflicting_span_ids", [PROPAGATION_STYLE_DATADOG, _PROPAGATION_STYLE_W3C_TRACECONTEXT], + None, { HTTP_HEADER_TRACE_ID: "9291375655657946024", HTTP_HEADER_PARENT_ID: "15", @@ -2215,6 +2284,135 @@ def test_extract_tracecontext(headers, expected_context): "meta": {"_dd.p.dm": "-3", LAST_DD_PARENT_ID_KEY: "000000000000000f"}, }, ), + ( + "valid_datadog_default_w_restart_behavior", + None, + _PROPAGATION_BEHAVIOR_RESTART, + DATADOG_HEADERS_VALID, + { + "trace_id": None, + "span_id": None, + "sampling_priority": None, + "dd_origin": None, + "span_links": [ + SpanLink( + trace_id=13088165645273925489, + span_id=5678, + tracestate=None, + flags=1, + attributes={"reason": "propagation_behavior_extract", "context_headers": "datadog"}, + ) + ], + }, + ), + ( + "valid_datadog_tracecontext_and_baggage_default_w_restart_behavior", + None, + _PROPAGATION_BEHAVIOR_RESTART, + {**DATADOG_BAGGAGE_HEADERS_VALID, **TRACECONTEXT_HEADERS_VALID}, + { + "trace_id": None, + "span_id": None, + "sampling_priority": None, + "dd_origin": None, + "baggage": {"key1": "val1", "key2": "val2"}, + "span_links": [ + SpanLink( + trace_id=13088165645273925489, + span_id=5678, + tracestate=None, + flags=1, + attributes={"reason": "propagation_behavior_extract", "context_headers": "datadog"}, + ) + ], + }, + ), + # All valid headers + ( + "valid_all_headers_default_style_w_restart_behavior", + None, + _PROPAGATION_BEHAVIOR_RESTART, + ALL_HEADERS, + { + "trace_id": None, + "span_id": None, + "sampling_priority": None, + "dd_origin": None, + "span_links": [ + SpanLink( + trace_id=13088165645273925489, + span_id=5678, + tracestate=None, + flags=1, + attributes={"reason": "propagation_behavior_extract", "context_headers": "datadog"}, + ) + ], + }, + ), + ( + "valid_all_headers_trace_context_datadog_style_w_restart_behavior", + [_PROPAGATION_STYLE_W3C_TRACECONTEXT, PROPAGATION_STYLE_DATADOG], + _PROPAGATION_BEHAVIOR_RESTART, + ALL_HEADERS, + { + "trace_id": None, + "span_id": None, + "sampling_priority": None, + "dd_origin": None, + "span_links": [ + SpanLink( + trace_id=171395628812617415352188477958425669623, + span_id=67667974448284343, + tracestate="dd=s:2;o:rum;t.dm:-4;t.usr.id:baz64,congo=t61rcWkgMzE", + flags=1, + attributes={"reason": "propagation_behavior_extract", "context_headers": "tracecontext"}, + ) + ], + }, + ), + ( + "valid_all_headers_all_styles_w_restart_behavior", + [PROPAGATION_STYLE_B3_MULTI, PROPAGATION_STYLE_B3_SINGLE, _PROPAGATION_STYLE_W3C_TRACECONTEXT], + _PROPAGATION_BEHAVIOR_RESTART, + ALL_HEADERS, + { + "trace_id": None, + "span_id": None, + "sampling_priority": None, + "dd_origin": None, + "span_links": [ + SpanLink( + trace_id=171395628812617415352188477958425669623, + span_id=67667974448284343, + tracestate=None, + flags=1, + attributes={"reason": "propagation_behavior_extract", "context_headers": "b3multi"}, + ) + ], + }, + ), + ( + "valid_all_headers_and_baggage_trace_context_datadog_style_w_restart_behavior", + None, + _PROPAGATION_BEHAVIOR_RESTART, + {**ALL_HEADERS, **DATADOG_BAGGAGE_HEADERS_VALID}, + { + "trace_id": None, + "span_id": None, + "sampling_priority": None, + "dd_origin": None, + "baggage": {"key1": "val1", "key2": "val2"}, + "span_links": [ + SpanLink( + trace_id=13088165645273925489, + span_id=5678, + tracestate=None, + flags=1, + attributes={"reason": "propagation_behavior_extract", "context_headers": "datadog"}, + ) + ], + }, + ), ] # Only add fixtures here if they can't pass both test_propagation_extract_env @@ -2225,6 +2423,7 @@ def test_extract_tracecontext(headers, expected_context): # can't be tested correctly via test_propagation_extract_w_config. It is tested separately "valid_tracecontext_simple", [_PROPAGATION_STYLE_W3C_TRACECONTEXT], + None, TRACECONTEXT_HEADERS_VALID_BASIC, { "trace_id": TRACE_ID, @@ -2241,6 +2440,7 @@ def test_extract_tracecontext(headers, expected_context): ( "valid_tracecontext_rum_no_sampling_decision", [_PROPAGATION_STYLE_W3C_TRACECONTEXT], + None, TRACECONTEXT_HEADERS_VALID_RUM_NO_SAMPLING_DECISION, { "trace_id": TRACE_ID, @@ -2252,11 +2452,24 @@ def test_extract_tracecontext(headers, expected_context): }, }, ), + # Only works for env since config is modified at startup to set + # propagation_style_extract to [None] if DD_TRACE_PROPAGATION_BEHAVIOR_EXTRACT is set to ignore + ( + "valid_datadog_default_w_ignore_behavior", + None, + _PROPAGATION_BEHAVIOR_IGNORE, + DATADOG_HEADERS_VALID, + CONTEXT_EMPTY, + ), ] -@pytest.mark.parametrize("name,styles,headers,expected_context", EXTRACT_FIXTURES + EXTRACT_FIXTURES_ENV_ONLY) -def test_propagation_extract_env(name, styles, headers, expected_context, run_python_code_in_subprocess): +@pytest.mark.parametrize( + "name,styles,extract_behavior,headers,expected_context", EXTRACT_FIXTURES + EXTRACT_FIXTURES_ENV_ONLY +) +def test_propagation_extract_env( + name, styles, extract_behavior, headers, expected_context, run_python_code_in_subprocess +): # Execute the test code in isolation to ensure env variables work as expected code = """ import json @@ -2274,18 +2487,24 @@ def test_propagation_extract_env(name, styles, headers, expected_context, run_py env = os.environ.copy() if styles is not None: env["DD_TRACE_PROPAGATION_STYLE"] = ",".join(styles) + if extract_behavior is not None: + env["DD_TRACE_PROPAGATION_BEHAVIOR_EXTRACT"] = extract_behavior stdout, stderr, status, _ = run_python_code_in_subprocess(code=code, env=env) print(stderr, stdout) assert status == 0, (stdout, stderr) -@pytest.mark.parametrize("name,styles,headers,expected_context", EXTRACT_FIXTURES) -def test_propagation_extract_w_config(name, styles, headers, expected_context, run_python_code_in_subprocess): +@pytest.mark.parametrize("name,styles,extract_behavior,headers,expected_context", EXTRACT_FIXTURES) +def test_propagation_extract_w_config( + name, styles, extract_behavior, headers, expected_context, run_python_code_in_subprocess +): # Setting via ddtrace.config works as expected too # DEV: This also helps us get code coverage reporting overrides = {} if styles is not None: overrides["_propagation_style_extract"] = styles + if extract_behavior is not None: + overrides["_propagation_behavior_extract"] = extract_behavior with override_global_config(overrides): context = HTTPPropagator.extract(headers) if not expected_context.get("tracestate"): diff --git a/tests/utils.py b/tests/utils.py index de0129f75a3..5b98fe42d1b 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -123,6 +123,7 @@ def override_global_config(values): "_health_metrics_enabled", "_propagation_style_extract", "_propagation_style_inject", + "_propagation_behavior_extract", "_x_datadog_tags_max_length", "_128_bit_trace_id_enabled", "_x_datadog_tags_enabled", From c1fedaa67af7ecb516d972a9f551a0f433e1b57f Mon Sep 17 00:00:00 2001 From: Federico Mon Date: Wed, 15 Jan 2025 17:28:22 +0100 Subject: [PATCH 04/16] ci: add healthcheck while setting up test http server process (#11929) --- tests/appsec/iast/conftest.py | 31 +++++++++++++++++++++++++------ 1 file changed, 25 insertions(+), 6 deletions(-) diff --git a/tests/appsec/iast/conftest.py b/tests/appsec/iast/conftest.py index 5718fed357a..85d516dd154 100644 --- a/tests/appsec/iast/conftest.py +++ b/tests/appsec/iast/conftest.py @@ -2,6 +2,7 @@ import os import re import subprocess +import time import pytest @@ -26,6 +27,8 @@ from ddtrace.appsec._iast.taint_sinks.weak_hash import unpatch_iast as weak_hash_unpatch from ddtrace.contrib.internal.sqlite3.patch import patch as sqli_sqlite_patch from ddtrace.contrib.internal.sqlite3.patch import unpatch as sqli_sqlite_unpatch +from ddtrace.internal.utils.http import Response +from ddtrace.internal.utils.http import get_connection from tests.utils import override_env from tests.utils import override_global_config @@ -162,11 +165,27 @@ def check_native_code_exception_in_each_python_aspect_test(request, caplog): @pytest.fixture(scope="session") def configuration_endpoint(): current_dir = os.path.dirname(__file__) - cmd = [ - "python", - os.path.join(current_dir, "fixtures", "integration", "http_config_server.py"), - CONFIG_SERVER_PORT, - ] - process = subprocess.Popen(cmd, cwd=current_dir) + status = None + retries = 0 + while status != 200 and retries < 5: + cmd = [ + "python", + os.path.join(current_dir, "fixtures", "integration", "http_config_server.py"), + CONFIG_SERVER_PORT, + ] + process = subprocess.Popen(cmd, cwd=current_dir) + time.sleep(0.2) + + url = f"http://localhost:{CONFIG_SERVER_PORT}/" + conn = get_connection(url) + conn.request("GET", "/") + response = conn.getresponse() + result = Response.from_http_response(response) + status = result.status + retries += 1 + + if retries == 5: + pytest.skip("Failed to start the configuration server") + yield process.kill() From 560aa89e29a5c9184d2bde44f8fcd382bee60ea0 Mon Sep 17 00:00:00 2001 From: erikayasuda <153395705+erikayasuda@users.noreply.github.com> Date: Wed, 15 Jan 2025 11:42:12 -0500 Subject: [PATCH 05/16] chore(ci): pin `greenlet` version to 3.1.0 for 3.8 (#11953) ## Context Failing tests slipped into the 2.19 branch, one of them being [this gevent test failure](https://gitlab.ddbuild.io/DataDog/apm-reliability/dd-trace-py/-/jobs/765798135). This was addressed in a fix PR to the main branch, and was [backported to 2.19](https://github.com/DataDog/dd-trace-py/pull/11920). However, once backporting this fix, [ci_visibility tests on python 3.8 started to fail consistently](https://gitlab.ddbuild.io/DataDog/apm-reliability/dd-trace-py/-/jobs/765777070). After some digging, it turns out that in the backported fix for gevent, the [greenlet dependency was bumped from 3.0.3 to 3.1.1](https://github.com/DataDog/dd-trace-py/pull/11920/files#diff-939f705fbaed2d1386345b41d7c68406808ecbc5609022a59505453df9ba4b63), which according to greenlet documentation, [3.1.0 was the last release to support 3.7 and 3.8](https://greenlet.readthedocs.io/en/latest/changes.html#id2). Looks like pip didn't catch this during the upgrade because the [minimum runtime version is still pinned to 3.7 in greenlet](https://github.com/python-greenlet/greenlet/blob/3.1.1/setup.py#L263). ## Description This PR pins greenlet to 3.1.0 for python<=3.8, and will use latest for 3.9+. The failing ci_visibility tests will pass on 3.8 now. ## Checklist - [x] PR author has checked that all the criteria below are met - The PR description includes an overview of the change - The PR description articulates the motivation for the change - The change includes tests OR the PR description describes a testing strategy - The PR description notes risks associated with the change, if any - Newly-added code is easy to change - The change follows the [library release note guidelines](https://ddtrace.readthedocs.io/en/stable/releasenotes.html) - The change includes or references documentation updates if necessary - Backport labels are set (if [applicable](https://ddtrace.readthedocs.io/en/latest/contributing.html#backporting)) ## Reviewer Checklist - [ ] Reviewer has checked that all the criteria below are met - Title is accurate - All changes are related to the pull request's stated goal - Avoids breaking [API](https://ddtrace.readthedocs.io/en/stable/versioning.html#interfaces) changes - Testing strategy adequately addresses listed risks - Newly-added code is easy to change - Release note makes sense to a user of the library - If necessary, author has acknowledged and discussed the performance implications of this PR as reported in the benchmarks PR comment - Backport labels are set in a manner that is consistent with the [release branch maintenance policy](https://ddtrace.readthedocs.io/en/latest/contributing.html#backporting) --- .riot/requirements/1d4e95e.txt | 27 ++++++++++++++++ .riot/requirements/2d19e52.txt | 32 ------------------- .../requirements/{27d0ff3.txt => 6bec1ec.txt} | 4 +-- riotfile.py | 11 +++++++ 4 files changed, 40 insertions(+), 34 deletions(-) create mode 100644 .riot/requirements/1d4e95e.txt delete mode 100644 .riot/requirements/2d19e52.txt rename .riot/requirements/{27d0ff3.txt => 6bec1ec.txt} (95%) diff --git a/.riot/requirements/1d4e95e.txt b/.riot/requirements/1d4e95e.txt new file mode 100644 index 00000000000..9d2871696ae --- /dev/null +++ b/.riot/requirements/1d4e95e.txt @@ -0,0 +1,27 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/1d4e95e.in +# +attrs==24.3.0 +coverage[toml]==7.6.10 +gevent==24.11.1 +greenlet==3.1.1 +hypothesis==6.45.0 +iniconfig==2.0.0 +mock==5.1.0 +msgpack==1.1.0 +opentracing==2.4.0 +packaging==24.2 +pluggy==1.5.0 +pytest==8.3.4 +pytest-cov==6.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.16.0 +sortedcontainers==2.4.0 +zope-event==5.0 +zope-interface==7.2 + +# The following packages are considered to be unsafe in a requirements file: +setuptools==75.8.0 diff --git a/.riot/requirements/2d19e52.txt b/.riot/requirements/2d19e52.txt deleted file mode 100644 index 8de360e7316..00000000000 --- a/.riot/requirements/2d19e52.txt +++ /dev/null @@ -1,32 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.7 -# by the following command: -# -# pip-compile --allow-unsafe --config=pyproject.toml --no-annotate --resolver=backtracking .riot/requirements/2d19e52.in -# -attrs==24.2.0 -coverage[toml]==7.2.7 -exceptiongroup==1.2.2 -gevent==22.10.2 -greenlet==3.1.1 -hypothesis==6.45.0 -importlib-metadata==6.7.0 -iniconfig==2.0.0 -mock==5.1.0 -msgpack==1.0.5 -opentracing==2.4.0 -packaging==24.0 -pluggy==1.2.0 -pytest==7.4.4 -pytest-cov==4.1.0 -pytest-mock==3.11.1 -pytest-randomly==3.12.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -typing-extensions==4.7.1 -zipp==3.15.0 -zope-event==5.0 -zope-interface==6.4.post2 - -# The following packages are considered to be unsafe in a requirements file: -setuptools==68.0.0 diff --git a/.riot/requirements/27d0ff3.txt b/.riot/requirements/6bec1ec.txt similarity index 95% rename from .riot/requirements/27d0ff3.txt rename to .riot/requirements/6bec1ec.txt index c03419edbdb..3e128a77c79 100644 --- a/.riot/requirements/27d0ff3.txt +++ b/.riot/requirements/6bec1ec.txt @@ -2,13 +2,13 @@ # This file is autogenerated by pip-compile with Python 3.8 # by the following command: # -# pip-compile --allow-unsafe --no-annotate .riot/requirements/27d0ff3.in +# pip-compile --allow-unsafe --no-annotate .riot/requirements/6bec1ec.in # attrs==24.3.0 coverage[toml]==7.6.1 exceptiongroup==1.2.2 gevent==24.2.1 -greenlet==3.1.1 +greenlet==3.1.0 hypothesis==6.45.0 importlib-metadata==8.5.0 iniconfig==2.0.0 diff --git a/riotfile.py b/riotfile.py index f5d2263bbee..42deb36b813 100644 --- a/riotfile.py +++ b/riotfile.py @@ -2943,6 +2943,17 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT env={ "DD_AGENT_PORT": "9126", }, + venvs=[ + # Python 3.8 + Venv( + pys=["3.8"], + pkgs={"greenlet": "==3.1.0"}, + ), + # Python 3.9+ + Venv( + pys=select_pys(min_version="3.9"), + ), + ], ), Venv( name="subprocess", From 21d50d49b90a089e75070edba56c2880343a31c7 Mon Sep 17 00:00:00 2001 From: Munir Abdinur Date: Wed, 15 Jan 2025 12:19:43 -0500 Subject: [PATCH 06/16] chore(tracing): move Pin and TraceFilter from ddtrace to ddtrace.trace [3.0] (#11899) ## Motivation In 3.0 all tracing specific objects should be defined in either `ddtrace.trace` or `ddtrace._trace` packages. This will allow us to better decouple tracing from other Datadog products. ## Changes - Exposes `Pin` and `TraceFilter` classes in `ddtrace.trace.__init__.py`. This keeps these objects in the public API. - Internalize the implementation details of the `Pin` and `TraceFilter` classes by moving `pin.py` and `filters.py` to the `ddtrace._trace` package. - Maintains backwards compatibility by continuing to export all objects in `ddtrace._trace.[pin/filters].py` in `ddtrace.[pin/filters].py`. - Logs a warning if the deprecated filters and pin modules are used. ## Checklist - [ ] PR author has checked that all the criteria below are met - The PR description includes an overview of the change - The PR description articulates the motivation for the change - The change includes tests OR the PR description describes a testing strategy - The PR description notes risks associated with the change, if any - Newly-added code is easy to change - The change follows the [library release note guidelines](https://ddtrace.readthedocs.io/en/stable/releasenotes.html) - The change includes or references documentation updates if necessary - Backport labels are set (if [applicable](https://ddtrace.readthedocs.io/en/latest/contributing.html#backporting)) ## Reviewer Checklist - [x] Reviewer has checked that all the criteria below are met - Title is accurate - All changes are related to the pull request's stated goal - Avoids breaking [API](https://ddtrace.readthedocs.io/en/stable/versioning.html#interfaces) changes - Testing strategy adequately addresses listed risks - Newly-added code is easy to change - Release note makes sense to a user of the library - If necessary, author has acknowledged and discussed the performance implications of this PR as reported in the benchmarks PR comment - Backport labels are set in a manner that is consistent with the [release branch maintenance policy](https://ddtrace.readthedocs.io/en/latest/contributing.html#backporting) --- benchmarks/bm/utils.py | 2 +- ddtrace/__init__.py | 3 +- ddtrace/_trace/filters.py | 72 ++++++ ddtrace/_trace/pin.py | 209 +++++++++++++++++ ddtrace/_trace/tracer.py | 2 +- ddtrace/contrib/aiomysql/__init__.py | 2 +- ddtrace/contrib/aiopg/__init__.py | 3 +- ddtrace/contrib/aioredis/__init__.py | 2 +- ddtrace/contrib/anthropic/__init__.py | 3 +- ddtrace/contrib/aredis/__init__.py | 2 +- ddtrace/contrib/asyncpg/__init__.py | 2 +- ddtrace/contrib/cassandra/__init__.py | 3 +- ddtrace/contrib/consul/__init__.py | 3 +- ddtrace/contrib/dbapi/__init__.py | 2 +- ddtrace/contrib/dbapi_async/__init__.py | 2 +- .../contrib/google_generativeai/__init__.py | 3 +- ddtrace/contrib/graphql/__init__.py | 2 +- ddtrace/contrib/grpc/__init__.py | 6 +- ddtrace/contrib/httpx/__init__.py | 2 +- ddtrace/contrib/internal/aiobotocore/patch.py | 2 +- ddtrace/contrib/internal/aiohttp/patch.py | 2 +- .../contrib/internal/aiohttp_jinja2/patch.py | 2 +- ddtrace/contrib/internal/aiomysql/patch.py | 2 +- ddtrace/contrib/internal/aiopg/connection.py | 2 +- ddtrace/contrib/internal/aioredis/patch.py | 2 +- .../contrib/internal/algoliasearch/patch.py | 2 +- ddtrace/contrib/internal/anthropic/patch.py | 2 +- ddtrace/contrib/internal/aredis/patch.py | 2 +- ddtrace/contrib/internal/asgi/middleware.py | 8 +- ddtrace/contrib/internal/asyncio/patch.py | 2 +- ddtrace/contrib/internal/asyncpg/patch.py | 2 +- ddtrace/contrib/internal/avro/patch.py | 2 +- .../contrib/internal/azure_functions/patch.py | 2 +- ddtrace/contrib/internal/boto/patch.py | 2 +- ddtrace/contrib/internal/botocore/patch.py | 2 +- ddtrace/contrib/internal/cassandra/session.py | 2 +- ddtrace/contrib/internal/celery/app.py | 4 +- ddtrace/contrib/internal/celery/signals.py | 2 +- ddtrace/contrib/internal/consul/patch.py | 2 +- ddtrace/contrib/internal/django/patch.py | 2 +- .../contrib/internal/dogpile_cache/lock.py | 2 +- .../contrib/internal/dogpile_cache/patch.py | 6 +- .../contrib/internal/dogpile_cache/region.py | 2 +- .../contrib/internal/elasticsearch/patch.py | 2 +- ddtrace/contrib/internal/fastapi/patch.py | 2 +- ddtrace/contrib/internal/flask/patch.py | 2 +- ddtrace/contrib/internal/flask/wrappers.py | 2 +- .../internal/google_generativeai/patch.py | 2 +- ddtrace/contrib/internal/graphql/patch.py | 2 +- .../internal/grpc/aio_client_interceptor.py | 2 +- .../internal/grpc/aio_server_interceptor.py | 2 +- ddtrace/contrib/internal/grpc/patch.py | 2 +- ddtrace/contrib/internal/httplib/patch.py | 2 +- ddtrace/contrib/internal/httpx/patch.py | 2 +- ddtrace/contrib/internal/jinja2/patch.py | 2 +- ddtrace/contrib/internal/kafka/patch.py | 2 +- ddtrace/contrib/internal/kombu/patch.py | 2 +- ddtrace/contrib/internal/langchain/patch.py | 2 +- ddtrace/contrib/internal/mako/patch.py | 2 +- ddtrace/contrib/internal/mariadb/patch.py | 2 +- ddtrace/contrib/internal/molten/patch.py | 2 +- ddtrace/contrib/internal/molten/wrappers.py | 2 +- ddtrace/contrib/internal/mysql/patch.py | 2 +- ddtrace/contrib/internal/mysqldb/patch.py | 2 +- ddtrace/contrib/internal/openai/patch.py | 2 +- ddtrace/contrib/internal/protobuf/patch.py | 2 +- .../internal/psycopg/async_connection.py | 2 +- .../contrib/internal/psycopg/connection.py | 2 +- ddtrace/contrib/internal/psycopg/patch.py | 2 +- ddtrace/contrib/internal/pymemcache/client.py | 2 +- ddtrace/contrib/internal/pymemcache/patch.py | 6 +- ddtrace/contrib/internal/pymongo/client.py | 2 +- ddtrace/contrib/internal/pymongo/patch.py | 2 +- ddtrace/contrib/internal/pymysql/patch.py | 2 +- ddtrace/contrib/internal/pynamodb/patch.py | 2 +- ddtrace/contrib/internal/pyodbc/patch.py | 2 +- .../contrib/internal/redis/asyncio_patch.py | 2 +- ddtrace/contrib/internal/redis/patch.py | 2 +- .../contrib/internal/rediscluster/patch.py | 2 +- ddtrace/contrib/internal/requests/patch.py | 2 +- ddtrace/contrib/internal/requests/session.py | 2 +- ddtrace/contrib/internal/rq/patch.py | 2 +- ddtrace/contrib/internal/sanic/patch.py | 2 +- ddtrace/contrib/internal/snowflake/patch.py | 2 +- ddtrace/contrib/internal/sqlalchemy/engine.py | 2 +- ddtrace/contrib/internal/sqlite3/patch.py | 2 +- ddtrace/contrib/internal/starlette/patch.py | 2 +- ddtrace/contrib/internal/subprocess/patch.py | 2 +- ddtrace/contrib/internal/tornado/template.py | 2 +- ddtrace/contrib/internal/urllib3/patch.py | 2 +- ddtrace/contrib/internal/vertexai/patch.py | 2 +- ddtrace/contrib/internal/vertica/patch.py | 2 +- ddtrace/contrib/internal/wsgi/wsgi.py | 2 +- ddtrace/contrib/internal/yaaredis/patch.py | 2 +- ddtrace/contrib/kafka/__init__.py | 2 +- ddtrace/contrib/kombu/__init__.py | 3 +- ddtrace/contrib/mariadb/__init__.py | 2 +- ddtrace/contrib/mongoengine/__init__.py | 3 +- ddtrace/contrib/mysql/__init__.py | 2 +- ddtrace/contrib/mysqldb/__init__.py | 2 +- ddtrace/contrib/openai/__init__.py | 3 +- ddtrace/contrib/psycopg/__init__.py | 2 +- ddtrace/contrib/pylibmc/__init__.py | 3 +- ddtrace/contrib/pymemcache/__init__.py | 3 +- ddtrace/contrib/pymongo/__init__.py | 3 +- ddtrace/contrib/pymysql/__init__.py | 2 +- ddtrace/contrib/pyodbc/__init__.py | 2 +- ddtrace/contrib/redis/__init__.py | 2 +- ddtrace/contrib/rediscluster/__init__.py | 3 +- ddtrace/contrib/rq/__init__.py | 2 +- ddtrace/contrib/snowflake/__init__.py | 2 +- ddtrace/contrib/sqlalchemy/__init__.py | 3 +- ddtrace/contrib/sqlite3/__init__.py | 2 +- ddtrace/contrib/trace_utils.py | 2 +- ddtrace/contrib/trace_utils_async.py | 2 +- ddtrace/contrib/vertexai/__init__.py | 3 +- ddtrace/contrib/vertica/__init__.py | 3 +- ddtrace/contrib/yaaredis/__init__.py | 2 +- ddtrace/filters.py | 78 +------ ddtrace/internal/ci_visibility/filters.py | 2 +- ddtrace/llmobs/_integrations/base.py | 2 +- ddtrace/llmobs/_integrations/openai.py | 2 +- ddtrace/pin.py | 215 +----------------- ddtrace/settings/config.py | 2 +- ddtrace/trace/__init__.py | 9 +- docker-compose.yml | 1 + docs/advanced_usage.rst | 4 +- docs/contributing-integrations.rst | 2 +- docs/troubleshooting.rst | 2 +- ...ers-to-trace-package-2f47fa2d2592b413.yaml | 6 + .../appsec/contrib_appsec/django_app/urls.py | 2 +- .../appsec/contrib_appsec/fastapi_app/app.py | 2 +- tests/appsec/contrib_appsec/flask_app/app.py | 2 +- tests/appsec/contrib_appsec/test_flask.py | 2 +- tests/appsec/contrib_appsec/utils.py | 6 +- tests/commands/ddtrace_run_integration.py | 2 +- tests/contrib/aiobotocore/utils.py | 2 +- tests/contrib/aiohttp/test_aiohttp_client.py | 6 +- tests/contrib/aiohttp_jinja2/conftest.py | 2 +- .../aiohttp_jinja2/test_aiohttp_jinja2.py | 2 +- tests/contrib/aiomysql/test_aiomysql.py | 2 +- tests/contrib/aiopg/test.py | 5 +- tests/contrib/algoliasearch/test.py | 2 +- tests/contrib/anthropic/conftest.py | 2 +- tests/contrib/aredis/test_aredis.py | 4 +- tests/contrib/asyncpg/test_asyncpg.py | 2 +- tests/contrib/avro/test_avro.py | 2 +- tests/contrib/boto/test.py | 5 +- tests/contrib/botocore/test.py | 2 +- tests/contrib/botocore/test_bedrock.py | 2 +- tests/contrib/botocore/test_bedrock_llmobs.py | 2 +- tests/contrib/botocore/test_stepfunctions.py | 2 +- tests/contrib/cassandra/test.py | 2 +- tests/contrib/celery/autopatch.py | 2 +- tests/contrib/celery/base.py | 2 +- tests/contrib/celery/test_app.py | 2 +- tests/contrib/celery/test_integration.py | 2 +- tests/contrib/celery/test_patch.py | 2 +- tests/contrib/celery/test_tagging.py | 2 +- tests/contrib/consul/test.py | 2 +- tests/contrib/dbapi/test_dbapi.py | 2 +- tests/contrib/dbapi/test_dbapi_appsec.py | 2 +- tests/contrib/dbapi_async/test_dbapi_async.py | 2 +- tests/contrib/django/conftest.py | 2 +- tests/contrib/django/test_django_dbm.py | 2 +- tests/contrib/dogpile_cache/test_tracing.py | 2 +- tests/contrib/dramatiq/test_integration.py | 2 +- .../elasticsearch/test_elasticsearch.py | 2 +- tests/contrib/flask/__init__.py | 2 +- tests/contrib/flask/test_blueprint.py | 2 +- tests/contrib/flask/test_flask_helpers.py | 2 +- tests/contrib/flask/test_signals.py | 2 +- tests/contrib/flask/test_template.py | 2 +- .../flask_autopatch/test_flask_autopatch.py | 2 +- tests/contrib/google_generativeai/conftest.py | 2 +- tests/contrib/grpc/common.py | 2 +- tests/contrib/grpc/test_grpc.py | 4 +- tests/contrib/grpc_aio/test_grpc_aio.py | 2 +- tests/contrib/httplib/test_httplib.py | 2 +- .../httplib/test_httplib_distributed.py | 2 +- tests/contrib/httpx/test_httpx.py | 2 +- tests/contrib/httpx/test_httpx_pre_0_11.py | 2 +- tests/contrib/jinja2/test_jinja2.py | 2 +- tests/contrib/kafka/test_kafka.py | 12 +- tests/contrib/kombu/test.py | 2 +- tests/contrib/langchain/conftest.py | 2 +- tests/contrib/mako/test_mako.py | 2 +- tests/contrib/mariadb/test_mariadb.py | 2 +- tests/contrib/molten/test_molten.py | 2 +- tests/contrib/molten/test_molten_di.py | 2 +- tests/contrib/mongoengine/test.py | 2 +- tests/contrib/mysql/test_mysql.py | 2 +- tests/contrib/mysqldb/test_mysqldb.py | 2 +- tests/contrib/openai/conftest.py | 4 +- tests/contrib/openai/test_openai_v0.py | 6 +- tests/contrib/openai/test_openai_v1.py | 6 +- tests/contrib/psycopg/test_psycopg.py | 2 +- tests/contrib/psycopg/test_psycopg_async.py | 2 +- tests/contrib/psycopg2/test_psycopg.py | 2 +- tests/contrib/pylibmc/test.py | 5 +- tests/contrib/pymemcache/test_client.py | 5 +- .../pymemcache/test_client_defaults.py | 5 +- tests/contrib/pymemcache/test_client_mixin.py | 5 +- tests/contrib/pymongo/test.py | 5 +- tests/contrib/pymysql/test_pymysql.py | 2 +- tests/contrib/pynamodb/test_pynamodb.py | 2 +- tests/contrib/pyodbc/test_pyodbc.py | 2 +- .../pyramid/pserve_app/app/__init__.py | 2 +- tests/contrib/pytest/test_pytest.py | 2 +- tests/contrib/redis/test_redis.py | 2 +- tests/contrib/redis/test_redis_asyncio.py | 2 +- tests/contrib/redis/test_redis_cluster.py | 2 +- .../redis/test_redis_cluster_asyncio.py | 16 +- tests/contrib/rediscluster/test.py | 2 +- tests/contrib/rq/test_rq.py | 2 +- tests/contrib/shared_tests.py | 2 +- tests/contrib/shared_tests_async.py | 2 +- tests/contrib/snowflake/test_snowflake.py | 2 +- tests/contrib/sqlalchemy/test_patch.py | 2 +- tests/contrib/sqlite3/test_sqlite3.py | 2 +- tests/contrib/starlette/test_starlette.py | 2 +- tests/contrib/subprocess/test_subprocess.py | 2 +- tests/contrib/tornado/test_config.py | 2 +- tests/contrib/urllib3/test_urllib3.py | 2 +- tests/contrib/vertexai/conftest.py | 2 +- tests/contrib/vertica/test_vertica.py | 2 +- tests/contrib/yaaredis/test_yaaredis.py | 2 +- tests/telemetry/test_telemetry.py | 2 +- tests/tracer/runtime/test_tag_collectors.py | 2 +- tests/tracer/test_filters.py | 4 +- tests/tracer/test_instance_config.py | 2 +- tests/tracer/test_pin.py | 2 +- tests/tracer/test_trace_utils.py | 2 +- tests/webclient.py | 2 +- 234 files changed, 605 insertions(+), 546 deletions(-) create mode 100644 ddtrace/_trace/filters.py create mode 100644 ddtrace/_trace/pin.py create mode 100644 releasenotes/notes/move-pin-and-filters-to-trace-package-2f47fa2d2592b413.yaml diff --git a/benchmarks/bm/utils.py b/benchmarks/bm/utils.py index ba6461336b5..dd7b4991c57 100644 --- a/benchmarks/bm/utils.py +++ b/benchmarks/bm/utils.py @@ -8,8 +8,8 @@ from ddtrace import __version__ as ddtrace_version from ddtrace._trace.span import Span -from ddtrace.filters import TraceFilter from ddtrace.internal import telemetry +from ddtrace.trace import TraceFilter _Span = Span diff --git a/ddtrace/__init__.py b/ddtrace/__init__.py index 1f2049cd0a5..835291fadb7 100644 --- a/ddtrace/__init__.py +++ b/ddtrace/__init__.py @@ -26,7 +26,7 @@ from ._monkey import patch_all # noqa: E402 from .internal.compat import PYTHON_VERSION_INFO # noqa: E402 from .internal.utils.deprecations import DDTraceDeprecationWarning # noqa: E402 -from .pin import Pin # noqa: E402 +from ddtrace._trace.pin import Pin # noqa: E402 from ddtrace._trace.span import Span # noqa: E402 from ddtrace._trace.tracer import Tracer # noqa: E402 from ddtrace.vendor import debtcollector @@ -67,6 +67,7 @@ _DEPRECATED_MODULE_ATTRIBUTES = [ "Span", "Tracer", + "Pin", ] diff --git a/ddtrace/_trace/filters.py b/ddtrace/_trace/filters.py new file mode 100644 index 00000000000..a2e6884f05c --- /dev/null +++ b/ddtrace/_trace/filters.py @@ -0,0 +1,72 @@ +import abc +import re +from typing import TYPE_CHECKING # noqa:F401 +from typing import List # noqa:F401 +from typing import Optional # noqa:F401 +from typing import Union # noqa:F401 + +from ddtrace._trace.processor import TraceProcessor +from ddtrace.ext import http + + +if TYPE_CHECKING: # pragma: no cover + from ddtrace._trace.span import Span # noqa:F401 + + +class TraceFilter(TraceProcessor): + @abc.abstractmethod + def process_trace(self, trace): + # type: (List[Span]) -> Optional[List[Span]] + """Processes a trace. + + None can be returned to prevent the trace from being exported. + """ + pass + + +class FilterRequestsOnUrl(TraceFilter): + r"""Filter out traces from incoming http requests based on the request's url. + + This class takes as argument a list of regular expression patterns + representing the urls to be excluded from tracing. A trace will be excluded + if its root span contains a ``http.url`` tag and if this tag matches any of + the provided regular expression using the standard python regexp match + semantic (https://docs.python.org/3/library/re.html#re.match). + + :param list regexps: a list of regular expressions (or a single string) defining + the urls that should be filtered out. + + Examples: + To filter out http calls to domain api.example.com:: + + FilterRequestsOnUrl(r'http://api\\.example\\.com') + + To filter out http calls to all first level subdomains from example.com:: + + FilterRequestOnUrl(r'http://.*+\\.example\\.com') + + To filter out calls to both http://test.example.com and http://example.com/healthcheck:: + + FilterRequestOnUrl([r'http://test\\.example\\.com', r'http://example\\.com/healthcheck']) + """ + + def __init__(self, regexps: Union[str, List[str]]): + if isinstance(regexps, str): + regexps = [regexps] + self._regexps = [re.compile(regexp) for regexp in regexps] + + def process_trace(self, trace): + # type: (List[Span]) -> Optional[List[Span]] + """ + When the filter is registered in the tracer, process_trace is called by + on each trace before it is sent to the agent, the returned value will + be fed to the next filter in the list. If process_trace returns None, + the whole trace is discarded. + """ + for span in trace: + url = span.get_tag(http.URL) + if span.parent_id is None and url is not None: + for regexp in self._regexps: + if regexp.match(url): + return None + return trace diff --git a/ddtrace/_trace/pin.py b/ddtrace/_trace/pin.py new file mode 100644 index 00000000000..d12303a57ea --- /dev/null +++ b/ddtrace/_trace/pin.py @@ -0,0 +1,209 @@ +from typing import TYPE_CHECKING # noqa:F401 +from typing import Any # noqa:F401 +from typing import Dict # noqa:F401 +from typing import Optional # noqa:F401 + +import wrapt + +import ddtrace + +from ..internal.logger import get_logger + + +log = get_logger(__name__) + + +# To set attributes on wrapt proxy objects use this prefix: +# http://wrapt.readthedocs.io/en/latest/wrappers.html +_DD_PIN_NAME = "_datadog_pin" +_DD_PIN_PROXY_NAME = "_self_" + _DD_PIN_NAME + + +class Pin(object): + """Pin (a.k.a Patch INfo) is a small class which is used to + set tracing metadata on a particular traced connection. + This is useful if you wanted to, say, trace two different + database clusters. + + >>> conn = sqlite.connect('/tmp/user.db') + >>> # Override a pin for a specific connection + >>> pin = Pin.override(conn, service='user-db') + >>> conn = sqlite.connect('/tmp/image.db') + """ + + __slots__ = ["tags", "tracer", "_target", "_config", "_initialized"] + + def __init__( + self, + service=None, # type: Optional[str] + tags=None, # type: Optional[Dict[str, str]] + tracer=None, + _config=None, # type: Optional[Dict[str, Any]] + ): + # type: (...) -> None + tracer = tracer or ddtrace.tracer + self.tags = tags + self.tracer = tracer + self._target = None # type: Optional[int] + # keep the configuration attribute internal because the + # public API to access it is not the Pin class + self._config = _config or {} # type: Dict[str, Any] + # [Backward compatibility]: service argument updates the `Pin` config + self._config["service_name"] = service + self._initialized = True + + @property + def service(self): + # type: () -> str + """Backward compatibility: accessing to `pin.service` returns the underlying + configuration value. + """ + return self._config["service_name"] + + def __setattr__(self, name, value): + if getattr(self, "_initialized", False) and name != "_target": + raise AttributeError("can't mutate a pin, use override() or clone() instead") + super(Pin, self).__setattr__(name, value) + + def __repr__(self): + return "Pin(service=%s, tags=%s, tracer=%s)" % (self.service, self.tags, self.tracer) + + @staticmethod + def _find(*objs): + # type: (Any) -> Optional[Pin] + """ + Return the first :class:`ddtrace.trace.Pin` found on any of the provided objects or `None` if none were found + + + >>> pin = Pin._find(wrapper, instance, conn) + + :param objs: The objects to search for a :class:`ddtrace.trace.Pin` on + :type objs: List of objects + :rtype: :class:`ddtrace.trace.Pin`, None + :returns: The first found :class:`ddtrace.trace.Pin` or `None` is none was found + """ + for obj in objs: + pin = Pin.get_from(obj) + if pin: + return pin + return None + + @staticmethod + def get_from(obj): + # type: (Any) -> Optional[Pin] + """Return the pin associated with the given object. If a pin is attached to + `obj` but the instance is not the owner of the pin, a new pin is cloned and + attached. This ensures that a pin inherited from a class is a copy for the new + instance, avoiding that a specific instance overrides other pins values. + + >>> pin = Pin.get_from(conn) + + :param obj: The object to look for a :class:`ddtrace.trace.Pin` on + :type obj: object + :rtype: :class:`ddtrace.trace.Pin`, None + :returns: :class:`ddtrace.trace.Pin` associated with the object or None + """ + if hasattr(obj, "__getddpin__"): + return obj.__getddpin__() + + pin_name = _DD_PIN_PROXY_NAME if isinstance(obj, wrapt.ObjectProxy) else _DD_PIN_NAME + pin = getattr(obj, pin_name, None) + # detect if the PIN has been inherited from a class + if pin is not None and pin._target != id(obj): + pin = pin.clone() + pin.onto(obj) + return pin + + @classmethod + def override( + cls, + obj, # type: Any + service=None, # type: Optional[str] + tags=None, # type: Optional[Dict[str, str]] + tracer=None, + ): + # type: (...) -> None + """Override an object with the given attributes. + + That's the recommended way to customize an already instrumented client, without + losing existing attributes. + + >>> conn = sqlite.connect('/tmp/user.db') + >>> # Override a pin for a specific connection + >>> Pin.override(conn, service='user-db') + """ + if not obj: + return + + pin = cls.get_from(obj) + if pin is None: + Pin(service=service, tags=tags, tracer=tracer).onto(obj) + else: + pin.clone(service=service, tags=tags, tracer=tracer).onto(obj) + + def enabled(self): + # type: () -> bool + """Return true if this pin's tracer is enabled.""" + # inline to avoid circular imports + from ddtrace.settings.asm import config as asm_config + + return bool(self.tracer) and (self.tracer.enabled or asm_config._apm_opt_out) + + def onto(self, obj, send=True): + # type: (Any, bool) -> None + """Patch this pin onto the given object. If send is true, it will also + queue the metadata to be sent to the server. + """ + # Actually patch it on the object. + try: + if hasattr(obj, "__setddpin__"): + return obj.__setddpin__(self) + + pin_name = _DD_PIN_PROXY_NAME if isinstance(obj, wrapt.ObjectProxy) else _DD_PIN_NAME + + # set the target reference; any get_from, clones and retarget the new PIN + self._target = id(obj) + if self.service: + ddtrace.config._add_extra_service(self.service) + return setattr(obj, pin_name, self) + except AttributeError: + log.debug("can't pin onto object. skipping", exc_info=True) + + def remove_from(self, obj): + # type: (Any) -> None + # Remove pin from the object. + try: + pin_name = _DD_PIN_PROXY_NAME if isinstance(obj, wrapt.ObjectProxy) else _DD_PIN_NAME + + pin = Pin.get_from(obj) + if pin is not None: + delattr(obj, pin_name) + except AttributeError: + log.debug("can't remove pin from object. skipping", exc_info=True) + + def clone( + self, + service=None, # type: Optional[str] + tags=None, # type: Optional[Dict[str, str]] + tracer=None, + ): + # type: (...) -> Pin + """Return a clone of the pin with the given attributes replaced.""" + # do a shallow copy of Pin dicts + if not tags and self.tags: + tags = self.tags.copy() + + # we use a copy instead of a deepcopy because we expect configurations + # to have only a root level dictionary without nested objects. Using + # deepcopy introduces a big overhead: + # + # copy: 0.00654911994934082 + # deepcopy: 0.2787208557128906 + config = self._config.copy() + + return Pin( + service=service or self.service, + tags=tags, + tracer=tracer or self.tracer, # do not clone the Tracer + _config=config, + ) diff --git a/ddtrace/_trace/tracer.py b/ddtrace/_trace/tracer.py index 9752d82ff83..46c00b0c515 100644 --- a/ddtrace/_trace/tracer.py +++ b/ddtrace/_trace/tracer.py @@ -34,7 +34,6 @@ from ddtrace.constants import HOSTNAME_KEY from ddtrace.constants import PID from ddtrace.constants import VERSION_KEY -from ddtrace.filters import TraceFilter from ddtrace.internal import agent from ddtrace.internal import atexit from ddtrace.internal import compat @@ -69,6 +68,7 @@ from ddtrace.settings import Config from ddtrace.settings.asm import config as asm_config from ddtrace.settings.peer_service import _ps_config +from ddtrace.trace import TraceFilter from ddtrace.vendor.debtcollector import deprecate diff --git a/ddtrace/contrib/aiomysql/__init__.py b/ddtrace/contrib/aiomysql/__init__.py index 98f78d3f3ab..06cd9987d81 100644 --- a/ddtrace/contrib/aiomysql/__init__.py +++ b/ddtrace/contrib/aiomysql/__init__.py @@ -19,7 +19,7 @@ To configure the integration on an per-connection basis use the ``Pin`` API:: - from ddtrace import Pin + from ddtrace.trace import Pin import asyncio import aiomysql diff --git a/ddtrace/contrib/aiopg/__init__.py b/ddtrace/contrib/aiopg/__init__.py index 11a572d12ed..c4cd51fdaa2 100644 --- a/ddtrace/contrib/aiopg/__init__.py +++ b/ddtrace/contrib/aiopg/__init__.py @@ -1,7 +1,8 @@ """ Instrument aiopg to report a span for each executed Postgres queries:: - from ddtrace import Pin, patch + from ddtrace import patch + from ddtrace.trace import Pin import aiopg # If not patched yet, you can patch aiopg specifically diff --git a/ddtrace/contrib/aioredis/__init__.py b/ddtrace/contrib/aioredis/__init__.py index 2bc3669a340..b390185d48d 100644 --- a/ddtrace/contrib/aioredis/__init__.py +++ b/ddtrace/contrib/aioredis/__init__.py @@ -64,7 +64,7 @@ ``Pin`` API:: import aioredis - from ddtrace import Pin + from ddtrace.trace import Pin myaioredis = aioredis.Aioredis() Pin.override(myaioredis, service="myaioredis") diff --git a/ddtrace/contrib/anthropic/__init__.py b/ddtrace/contrib/anthropic/__init__.py index c43ee4bb43c..f2d8ea8f353 100644 --- a/ddtrace/contrib/anthropic/__init__.py +++ b/ddtrace/contrib/anthropic/__init__.py @@ -76,7 +76,8 @@ ``Pin`` API:: import anthropic - from ddtrace import Pin, config + from ddtrace import config + from ddtrace.trace import Pin Pin.override(anthropic, service="my-anthropic-service") """ # noqa: E501 diff --git a/ddtrace/contrib/aredis/__init__.py b/ddtrace/contrib/aredis/__init__.py index 1af4c5db664..8448740104f 100644 --- a/ddtrace/contrib/aredis/__init__.py +++ b/ddtrace/contrib/aredis/__init__.py @@ -53,7 +53,7 @@ To configure particular aredis instances use the :class:`Pin ` API:: import aredis - from ddtrace import Pin + from ddtrace.trace import Pin client = aredis.StrictRedis(host="localhost", port=6379) diff --git a/ddtrace/contrib/asyncpg/__init__.py b/ddtrace/contrib/asyncpg/__init__.py index c8e56511469..029cfd97790 100644 --- a/ddtrace/contrib/asyncpg/__init__.py +++ b/ddtrace/contrib/asyncpg/__init__.py @@ -38,7 +38,7 @@ basis use the ``Pin`` API:: import asyncpg - from ddtrace import Pin + from ddtrace.trace import Pin conn = asyncpg.connect("postgres://localhost:5432") Pin.override(conn, service="custom-service") diff --git a/ddtrace/contrib/cassandra/__init__.py b/ddtrace/contrib/cassandra/__init__.py index 1d0b6ad0afd..bcce866ad27 100644 --- a/ddtrace/contrib/cassandra/__init__.py +++ b/ddtrace/contrib/cassandra/__init__.py @@ -3,7 +3,8 @@ ``import ddtrace.auto`` will automatically patch your Cluster instance to make it work. :: - from ddtrace import Pin, patch + from ddtrace import patch + from ddtrace.trace import Pin from cassandra.cluster import Cluster # If not patched yet, you can patch cassandra specifically diff --git a/ddtrace/contrib/consul/__init__.py b/ddtrace/contrib/consul/__init__.py index a6317d0bce0..433c70c0e80 100644 --- a/ddtrace/contrib/consul/__init__.py +++ b/ddtrace/contrib/consul/__init__.py @@ -5,7 +5,8 @@ ``import ddtrace.auto`` will automatically patch your Consul client to make it work. :: - from ddtrace import Pin, patch + from ddtrace import patch + from ddtrace.trace import Pin import consul # If not patched yet, you can patch consul specifically diff --git a/ddtrace/contrib/dbapi/__init__.py b/ddtrace/contrib/dbapi/__init__.py index 358b928eadd..0b772ac04ec 100644 --- a/ddtrace/contrib/dbapi/__init__.py +++ b/ddtrace/contrib/dbapi/__init__.py @@ -20,7 +20,7 @@ from ...ext import SpanTypes from ...ext import db from ...ext import sql -from ...pin import Pin +from ...trace import Pin from ..trace_utils import ext_service from ..trace_utils import iswrapped diff --git a/ddtrace/contrib/dbapi_async/__init__.py b/ddtrace/contrib/dbapi_async/__init__.py index 6528d2b348a..d0c43fc1c2b 100644 --- a/ddtrace/contrib/dbapi_async/__init__.py +++ b/ddtrace/contrib/dbapi_async/__init__.py @@ -13,7 +13,7 @@ from ...constants import SPAN_MEASURED_KEY from ...ext import SpanKind from ...ext import SpanTypes -from ...pin import Pin +from ...trace import Pin from ..dbapi import TracedConnection from ..dbapi import TracedCursor from ..trace_utils import ext_service diff --git a/ddtrace/contrib/google_generativeai/__init__.py b/ddtrace/contrib/google_generativeai/__init__.py index d63a1134ab2..5066fc4f9a2 100644 --- a/ddtrace/contrib/google_generativeai/__init__.py +++ b/ddtrace/contrib/google_generativeai/__init__.py @@ -73,7 +73,8 @@ ``Pin`` API:: import google.generativeai as genai - from ddtrace import Pin, config + from ddtrace import config + from ddtrace.trace import Pin Pin.override(genai, service="my-gemini-service") """ # noqa: E501 diff --git a/ddtrace/contrib/graphql/__init__.py b/ddtrace/contrib/graphql/__init__.py index 5394f243533..e7ad66745d4 100644 --- a/ddtrace/contrib/graphql/__init__.py +++ b/ddtrace/contrib/graphql/__init__.py @@ -39,7 +39,7 @@ To configure the graphql integration using the ``Pin`` API:: - from ddtrace import Pin + from ddtrace.trace import Pin import graphql Pin.override(graphql, service="mygraphql") diff --git a/ddtrace/contrib/grpc/__init__.py b/ddtrace/contrib/grpc/__init__.py index ff5adb86aea..c746edb17cc 100644 --- a/ddtrace/contrib/grpc/__init__.py +++ b/ddtrace/contrib/grpc/__init__.py @@ -45,7 +45,8 @@ ``Pin`` API:: import grpc - from ddtrace import Pin, patch, Tracer + from ddtrace import patch + from ddtrace.trace import Pin, Tracer patch(grpc=True) custom_tracer = Tracer() @@ -61,7 +62,8 @@ import grpc from grpc.framework.foundation import logging_pool - from ddtrace import Pin, patch, Tracer + from ddtrace import patch + from ddtrace.trace import Pin, Tracer patch(grpc=True) custom_tracer = Tracer() diff --git a/ddtrace/contrib/httpx/__init__.py b/ddtrace/contrib/httpx/__init__.py index 118d0a738b5..28621de44f2 100644 --- a/ddtrace/contrib/httpx/__init__.py +++ b/ddtrace/contrib/httpx/__init__.py @@ -60,7 +60,7 @@ To configure particular ``httpx`` client instances use the :class:`Pin ` API:: import httpx - from ddtrace import Pin + from ddtrace.trace import Pin client = httpx.Client() # Override service name for this instance diff --git a/ddtrace/contrib/internal/aiobotocore/patch.py b/ddtrace/contrib/internal/aiobotocore/patch.py index c5dafcaaa41..7431bd5c592 100644 --- a/ddtrace/contrib/internal/aiobotocore/patch.py +++ b/ddtrace/contrib/internal/aiobotocore/patch.py @@ -21,7 +21,7 @@ from ddtrace.internal.utils.formats import asbool from ddtrace.internal.utils.formats import deep_getattr from ddtrace.internal.utils.version import parse_version -from ddtrace.pin import Pin +from ddtrace.trace import Pin aiobotocore_version_str = getattr(aiobotocore, "__version__", "") diff --git a/ddtrace/contrib/internal/aiohttp/patch.py b/ddtrace/contrib/internal/aiohttp/patch.py index 13b55ecb4fb..e0f0bc869e9 100644 --- a/ddtrace/contrib/internal/aiohttp/patch.py +++ b/ddtrace/contrib/internal/aiohttp/patch.py @@ -21,8 +21,8 @@ from ddtrace.internal.schema.span_attribute_schema import SpanDirection from ddtrace.internal.utils import get_argument_value from ddtrace.internal.utils.formats import asbool -from ddtrace.pin import Pin from ddtrace.propagation.http import HTTPPropagator +from ddtrace.trace import Pin log = get_logger(__name__) diff --git a/ddtrace/contrib/internal/aiohttp_jinja2/patch.py b/ddtrace/contrib/internal/aiohttp_jinja2/patch.py index 284352b54f0..84553899c39 100644 --- a/ddtrace/contrib/internal/aiohttp_jinja2/patch.py +++ b/ddtrace/contrib/internal/aiohttp_jinja2/patch.py @@ -1,6 +1,5 @@ import aiohttp_jinja2 -from ddtrace import Pin from ddtrace import config from ddtrace.contrib.trace_utils import unwrap from ddtrace.contrib.trace_utils import with_traced_module @@ -8,6 +7,7 @@ from ddtrace.ext import SpanTypes from ddtrace.internal.constants import COMPONENT from ddtrace.internal.utils import get_argument_value +from ddtrace.trace import Pin config._add( diff --git a/ddtrace/contrib/internal/aiomysql/patch.py b/ddtrace/contrib/internal/aiomysql/patch.py index 7f090b4c71d..0053e4f8a5b 100644 --- a/ddtrace/contrib/internal/aiomysql/patch.py +++ b/ddtrace/contrib/internal/aiomysql/patch.py @@ -1,7 +1,6 @@ import aiomysql import wrapt -from ddtrace import Pin from ddtrace import config from ddtrace.constants import _ANALYTICS_SAMPLE_RATE_KEY from ddtrace.constants import SPAN_KIND @@ -18,6 +17,7 @@ from ddtrace.internal.schema import schematize_service_name from ddtrace.internal.utils.wrappers import unwrap from ddtrace.propagation._database_monitoring import _DBM_Propagator +from ddtrace.trace import Pin config._add( diff --git a/ddtrace/contrib/internal/aiopg/connection.py b/ddtrace/contrib/internal/aiopg/connection.py index b2522ae3888..1daf84b2987 100644 --- a/ddtrace/contrib/internal/aiopg/connection.py +++ b/ddtrace/contrib/internal/aiopg/connection.py @@ -15,7 +15,7 @@ from ddtrace.internal.schema import schematize_database_operation from ddtrace.internal.schema import schematize_service_name from ddtrace.internal.utils.version import parse_version -from ddtrace.pin import Pin +from ddtrace.trace import Pin AIOPG_VERSION = parse_version(__version__) diff --git a/ddtrace/contrib/internal/aioredis/patch.py b/ddtrace/contrib/internal/aioredis/patch.py index 7915f652641..dc6004b9caa 100644 --- a/ddtrace/contrib/internal/aioredis/patch.py +++ b/ddtrace/contrib/internal/aioredis/patch.py @@ -27,7 +27,7 @@ from ddtrace.internal.utils.formats import asbool from ddtrace.internal.utils.formats import stringify_cache_args from ddtrace.internal.utils.wrappers import unwrap as _u -from ddtrace.pin import Pin +from ddtrace.trace import Pin from ddtrace.vendor.packaging.version import parse as parse_version diff --git a/ddtrace/contrib/internal/algoliasearch/patch.py b/ddtrace/contrib/internal/algoliasearch/patch.py index 5217861409e..e3074225570 100644 --- a/ddtrace/contrib/internal/algoliasearch/patch.py +++ b/ddtrace/contrib/internal/algoliasearch/patch.py @@ -10,7 +10,7 @@ from ddtrace.internal.schema import schematize_cloud_api_operation from ddtrace.internal.schema import schematize_service_name from ddtrace.internal.utils.wrappers import unwrap as _u -from ddtrace.pin import Pin +from ddtrace.trace import Pin from ddtrace.vendor.packaging.version import parse as parse_version diff --git a/ddtrace/contrib/internal/anthropic/patch.py b/ddtrace/contrib/internal/anthropic/patch.py index e82c4421e78..24f72f2b511 100644 --- a/ddtrace/contrib/internal/anthropic/patch.py +++ b/ddtrace/contrib/internal/anthropic/patch.py @@ -18,7 +18,7 @@ from ddtrace.internal.utils import get_argument_value from ddtrace.llmobs._integrations import AnthropicIntegration from ddtrace.llmobs._utils import _get_attr -from ddtrace.pin import Pin +from ddtrace.trace import Pin log = get_logger(__name__) diff --git a/ddtrace/contrib/internal/aredis/patch.py b/ddtrace/contrib/internal/aredis/patch.py index c9ba000ea36..bd8c5b4c750 100644 --- a/ddtrace/contrib/internal/aredis/patch.py +++ b/ddtrace/contrib/internal/aredis/patch.py @@ -12,7 +12,7 @@ from ddtrace.internal.utils.formats import asbool from ddtrace.internal.utils.formats import stringify_cache_args from ddtrace.internal.utils.wrappers import unwrap -from ddtrace.pin import Pin +from ddtrace.trace import Pin config._add( diff --git a/ddtrace/contrib/internal/asgi/middleware.py b/ddtrace/contrib/internal/asgi/middleware.py index 98d352cf75f..2b3e23eb78b 100644 --- a/ddtrace/contrib/internal/asgi/middleware.py +++ b/ddtrace/contrib/internal/asgi/middleware.py @@ -150,7 +150,13 @@ async def __call__(self, scope, receive, send): if scope["type"] == "http": operation_name = schematize_url_operation(operation_name, direction=SpanDirection.INBOUND, protocol="http") - pin = ddtrace.pin.Pin(service="asgi", tracer=self.tracer) + # Calling ddtrace.trace.Pin(...) with the `tracer` argument is deprecated + # Remove this if statement when the `tracer` argument is removed + if self.tracer is ddtrace.tracer: + pin = ddtrace.trace.Pin(service="asgi") + else: + pin = ddtrace.trace.Pin(service="asgi", tracer=self.tracer) + with core.context_with_data( "asgi.__call__", remote_addr=scope.get("REMOTE_ADDR"), diff --git a/ddtrace/contrib/internal/asyncio/patch.py b/ddtrace/contrib/internal/asyncio/patch.py index 83f1918e9eb..ed64ca1bf5d 100644 --- a/ddtrace/contrib/internal/asyncio/patch.py +++ b/ddtrace/contrib/internal/asyncio/patch.py @@ -1,10 +1,10 @@ import asyncio -from ddtrace import Pin from ddtrace.internal.utils import get_argument_value from ddtrace.internal.utils import set_argument_value from ddtrace.internal.wrapping import unwrap from ddtrace.internal.wrapping import wrap +from ddtrace.trace import Pin def get_version(): diff --git a/ddtrace/contrib/internal/asyncpg/patch.py b/ddtrace/contrib/internal/asyncpg/patch.py index 7b2b269d5f2..ac1347a7de6 100644 --- a/ddtrace/contrib/internal/asyncpg/patch.py +++ b/ddtrace/contrib/internal/asyncpg/patch.py @@ -2,7 +2,7 @@ from types import ModuleType import asyncpg -from ddtrace import Pin +from ddtrace.trace import Pin from ddtrace import config from ddtrace.internal import core from ddtrace.internal.constants import COMPONENT diff --git a/ddtrace/contrib/internal/avro/patch.py b/ddtrace/contrib/internal/avro/patch.py index 6e66fbe20b0..3ef2adbcb0c 100644 --- a/ddtrace/contrib/internal/avro/patch.py +++ b/ddtrace/contrib/internal/avro/patch.py @@ -3,7 +3,7 @@ from ddtrace import config from ddtrace.internal.utils.wrappers import unwrap -from ddtrace.pin import Pin +from ddtrace.trace import Pin from .schema_iterator import SchemaExtractor diff --git a/ddtrace/contrib/internal/azure_functions/patch.py b/ddtrace/contrib/internal/azure_functions/patch.py index 15089a2e733..1c0c658a9eb 100644 --- a/ddtrace/contrib/internal/azure_functions/patch.py +++ b/ddtrace/contrib/internal/azure_functions/patch.py @@ -8,7 +8,7 @@ from ddtrace.internal import core from ddtrace.internal.schema import schematize_cloud_faas_operation from ddtrace.internal.schema import schematize_service_name -from ddtrace.pin import Pin +from ddtrace.trace import Pin config._add( diff --git a/ddtrace/contrib/internal/boto/patch.py b/ddtrace/contrib/internal/boto/patch.py index 8551056dfb3..e7418aba878 100644 --- a/ddtrace/contrib/internal/boto/patch.py +++ b/ddtrace/contrib/internal/boto/patch.py @@ -19,7 +19,7 @@ from ddtrace.internal.utils import get_argument_value from ddtrace.internal.utils.formats import asbool from ddtrace.internal.utils.wrappers import unwrap -from ddtrace.pin import Pin +from ddtrace.trace import Pin # Original boto client class diff --git a/ddtrace/contrib/internal/botocore/patch.py b/ddtrace/contrib/internal/botocore/patch.py index febad29f982..07c0bd403e4 100644 --- a/ddtrace/contrib/internal/botocore/patch.py +++ b/ddtrace/contrib/internal/botocore/patch.py @@ -33,8 +33,8 @@ from ddtrace.internal.utils.formats import asbool from ddtrace.internal.utils.formats import deep_getattr from ddtrace.llmobs._integrations import BedrockIntegration -from ddtrace.pin import Pin from ddtrace.settings.config import Config +from ddtrace.trace import Pin from .services.bedrock import patched_bedrock_api_call from .services.kinesis import patched_kinesis_api_call diff --git a/ddtrace/contrib/internal/cassandra/session.py b/ddtrace/contrib/internal/cassandra/session.py index 33d307c13c7..7f02d8c0af6 100644 --- a/ddtrace/contrib/internal/cassandra/session.py +++ b/ddtrace/contrib/internal/cassandra/session.py @@ -39,7 +39,7 @@ from ddtrace.internal.schema import schematize_service_name from ddtrace.internal.utils import get_argument_value from ddtrace.internal.utils.formats import deep_getattr -from ddtrace.pin import Pin +from ddtrace.trace import Pin log = get_logger(__name__) diff --git a/ddtrace/contrib/internal/celery/app.py b/ddtrace/contrib/internal/celery/app.py index 42eed2cb468..54ad5834769 100644 --- a/ddtrace/contrib/internal/celery/app.py +++ b/ddtrace/contrib/internal/celery/app.py @@ -3,8 +3,8 @@ import celery from celery import signals -from ddtrace import Pin from ddtrace import config +from ddtrace._trace.pin import _DD_PIN_NAME from ddtrace.constants import _ANALYTICS_SAMPLE_RATE_KEY from ddtrace.constants import SPAN_KIND from ddtrace.constants import SPAN_MEASURED_KEY @@ -19,7 +19,7 @@ from ddtrace.ext import SpanTypes from ddtrace.internal import core from ddtrace.internal.logger import get_logger -from ddtrace.pin import _DD_PIN_NAME +from ddtrace.trace import Pin log = get_logger(__name__) diff --git a/ddtrace/contrib/internal/celery/signals.py b/ddtrace/contrib/internal/celery/signals.py index 8f27fcc53b0..ea9d8c15863 100644 --- a/ddtrace/contrib/internal/celery/signals.py +++ b/ddtrace/contrib/internal/celery/signals.py @@ -3,7 +3,6 @@ from celery import current_app from celery import registry -from ddtrace import Pin from ddtrace import config from ddtrace.constants import _ANALYTICS_SAMPLE_RATE_KEY from ddtrace.constants import SPAN_KIND @@ -24,6 +23,7 @@ from ddtrace.internal.constants import COMPONENT from ddtrace.internal.logger import get_logger from ddtrace.propagation.http import HTTPPropagator +from ddtrace.trace import Pin log = get_logger(__name__) diff --git a/ddtrace/contrib/internal/consul/patch.py b/ddtrace/contrib/internal/consul/patch.py index b4725e807ba..b24b138b632 100644 --- a/ddtrace/contrib/internal/consul/patch.py +++ b/ddtrace/contrib/internal/consul/patch.py @@ -15,7 +15,7 @@ from ddtrace.internal.schema.span_attribute_schema import SpanDirection from ddtrace.internal.utils import get_argument_value from ddtrace.internal.utils.wrappers import unwrap as _u -from ddtrace.pin import Pin +from ddtrace.trace import Pin _KV_FUNCS = ["put", "get", "delete"] diff --git a/ddtrace/contrib/internal/django/patch.py b/ddtrace/contrib/internal/django/patch.py index d4b14487e39..98a6163a6e5 100644 --- a/ddtrace/contrib/internal/django/patch.py +++ b/ddtrace/contrib/internal/django/patch.py @@ -17,7 +17,6 @@ import wrapt from wrapt.importer import when_imported -from ddtrace import Pin from ddtrace import config from ddtrace.appsec._utils import _UserInfoRetriever from ddtrace.constants import SPAN_KIND @@ -49,6 +48,7 @@ from ddtrace.propagation._database_monitoring import _DBM_Propagator from ddtrace.settings.asm import config as asm_config from ddtrace.settings.integration import IntegrationConfig +from ddtrace.trace import Pin from ddtrace.vendor.packaging.version import parse as parse_version diff --git a/ddtrace/contrib/internal/dogpile_cache/lock.py b/ddtrace/contrib/internal/dogpile_cache/lock.py index c592562f94f..76cdc2eb839 100644 --- a/ddtrace/contrib/internal/dogpile_cache/lock.py +++ b/ddtrace/contrib/internal/dogpile_cache/lock.py @@ -1,7 +1,7 @@ import dogpile from ddtrace.internal.utils.formats import asbool -from ddtrace.pin import Pin +from ddtrace.trace import Pin def _wrap_lock_ctor(func, instance, args, kwargs): diff --git a/ddtrace/contrib/internal/dogpile_cache/patch.py b/ddtrace/contrib/internal/dogpile_cache/patch.py index f4f41284a29..f78ea5cb23f 100644 --- a/ddtrace/contrib/internal/dogpile_cache/patch.py +++ b/ddtrace/contrib/internal/dogpile_cache/patch.py @@ -7,10 +7,10 @@ from wrapt import wrap_function_wrapper as _w +from ddtrace._trace.pin import _DD_PIN_NAME +from ddtrace._trace.pin import _DD_PIN_PROXY_NAME from ddtrace.internal.schema import schematize_service_name -from ddtrace.pin import _DD_PIN_NAME -from ddtrace.pin import _DD_PIN_PROXY_NAME -from ddtrace.pin import Pin +from ddtrace.trace import Pin from .lock import _wrap_lock_ctor from .region import _wrap_get_create diff --git a/ddtrace/contrib/internal/dogpile_cache/region.py b/ddtrace/contrib/internal/dogpile_cache/region.py index 04b70402e3d..0c89d2d84d9 100644 --- a/ddtrace/contrib/internal/dogpile_cache/region.py +++ b/ddtrace/contrib/internal/dogpile_cache/region.py @@ -7,7 +7,7 @@ from ddtrace.internal.schema import schematize_cache_operation from ddtrace.internal.schema import schematize_service_name from ddtrace.internal.utils import get_argument_value -from ddtrace.pin import Pin +from ddtrace.trace import Pin def _wrap_get_create(func, instance, args, kwargs): diff --git a/ddtrace/contrib/internal/elasticsearch/patch.py b/ddtrace/contrib/internal/elasticsearch/patch.py index 455d0678d02..7c408db55a5 100644 --- a/ddtrace/contrib/internal/elasticsearch/patch.py +++ b/ddtrace/contrib/internal/elasticsearch/patch.py @@ -21,7 +21,7 @@ from ddtrace.internal.logger import get_logger from ddtrace.internal.schema import schematize_service_name from ddtrace.internal.utils.wrappers import unwrap as _u -from ddtrace.pin import Pin +from ddtrace.trace import Pin log = get_logger(__name__) diff --git a/ddtrace/contrib/internal/fastapi/patch.py b/ddtrace/contrib/internal/fastapi/patch.py index b431f3c83f8..485c0424a5f 100644 --- a/ddtrace/contrib/internal/fastapi/patch.py +++ b/ddtrace/contrib/internal/fastapi/patch.py @@ -5,7 +5,6 @@ from wrapt import ObjectProxy from wrapt import wrap_function_wrapper as _w -from ddtrace import Pin from ddtrace import config from ddtrace.appsec._iast._utils import _is_iast_enabled from ddtrace.contrib.internal.asgi.middleware import TraceMiddleware @@ -15,6 +14,7 @@ from ddtrace.internal.logger import get_logger from ddtrace.internal.schema import schematize_service_name from ddtrace.internal.utils.wrappers import unwrap as _u +from ddtrace.trace import Pin log = get_logger(__name__) diff --git a/ddtrace/contrib/internal/flask/patch.py b/ddtrace/contrib/internal/flask/patch.py index 429a9d05667..010df5218c5 100644 --- a/ddtrace/contrib/internal/flask/patch.py +++ b/ddtrace/contrib/internal/flask/patch.py @@ -29,7 +29,6 @@ from wrapt import wrap_function_wrapper as _w -from ddtrace import Pin from ddtrace import config from ddtrace.contrib.internal.wsgi.wsgi import _DDWSGIMiddlewareBase from ddtrace.contrib.trace_utils import unwrap as _u @@ -37,6 +36,7 @@ from ddtrace.internal.utils import get_argument_value from ddtrace.internal.utils.importlib import func_name from ddtrace.internal.utils.version import parse_version +from ddtrace.trace import Pin from .wrappers import _wrap_call_with_pin_check from .wrappers import get_current_app diff --git a/ddtrace/contrib/internal/flask/wrappers.py b/ddtrace/contrib/internal/flask/wrappers.py index 3aca2a1466a..d65697224ba 100644 --- a/ddtrace/contrib/internal/flask/wrappers.py +++ b/ddtrace/contrib/internal/flask/wrappers.py @@ -7,7 +7,7 @@ from ddtrace.internal.constants import COMPONENT from ddtrace.internal.logger import get_logger from ddtrace.internal.utils.importlib import func_name -from ddtrace.pin import Pin +from ddtrace.trace import Pin log = get_logger(__name__) diff --git a/ddtrace/contrib/internal/google_generativeai/patch.py b/ddtrace/contrib/internal/google_generativeai/patch.py index 29bc18dc756..3564f9ec1ec 100644 --- a/ddtrace/contrib/internal/google_generativeai/patch.py +++ b/ddtrace/contrib/internal/google_generativeai/patch.py @@ -14,7 +14,7 @@ from ddtrace.contrib.trace_utils import wrap from ddtrace.llmobs._integrations import GeminiIntegration from ddtrace.llmobs._integrations.utils import extract_model_name_google -from ddtrace.pin import Pin +from ddtrace.trace import Pin config._add( diff --git a/ddtrace/contrib/internal/graphql/patch.py b/ddtrace/contrib/internal/graphql/patch.py index b54df97e520..18916f4222a 100644 --- a/ddtrace/contrib/internal/graphql/patch.py +++ b/ddtrace/contrib/internal/graphql/patch.py @@ -39,7 +39,7 @@ from ddtrace.internal.utils.version import parse_version from ddtrace.internal.wrapping import unwrap from ddtrace.internal.wrapping import wrap -from ddtrace.pin import Pin +from ddtrace.trace import Pin _graphql_version_str = graphql.__version__ diff --git a/ddtrace/contrib/internal/grpc/aio_client_interceptor.py b/ddtrace/contrib/internal/grpc/aio_client_interceptor.py index bf6f156de7e..5c03d1b8527 100644 --- a/ddtrace/contrib/internal/grpc/aio_client_interceptor.py +++ b/ddtrace/contrib/internal/grpc/aio_client_interceptor.py @@ -11,7 +11,6 @@ from grpc.aio._typing import ResponseIterableType from grpc.aio._typing import ResponseType -from ddtrace import Pin from ddtrace import Span from ddtrace import config from ddtrace.constants import _ANALYTICS_SAMPLE_RATE_KEY @@ -30,6 +29,7 @@ from ddtrace.internal.schema import schematize_url_operation from ddtrace.internal.schema.span_attribute_schema import SpanDirection from ddtrace.propagation.http import HTTPPropagator +from ddtrace.trace import Pin log = get_logger(__name__) diff --git a/ddtrace/contrib/internal/grpc/aio_server_interceptor.py b/ddtrace/contrib/internal/grpc/aio_server_interceptor.py index 2361e3c3be9..d5ec9ed32ab 100644 --- a/ddtrace/contrib/internal/grpc/aio_server_interceptor.py +++ b/ddtrace/contrib/internal/grpc/aio_server_interceptor.py @@ -13,7 +13,6 @@ from grpc.aio._typing import ResponseType import wrapt -from ddtrace import Pin # noqa:F401 from ddtrace import Span # noqa:F401 from ddtrace import config from ddtrace.constants import _ANALYTICS_SAMPLE_RATE_KEY @@ -30,6 +29,7 @@ from ddtrace.internal.constants import COMPONENT from ddtrace.internal.schema import schematize_url_operation from ddtrace.internal.schema.span_attribute_schema import SpanDirection +from ddtrace.trace import Pin # noqa:F401 Continuation = Callable[[grpc.HandlerCallDetails], Awaitable[grpc.RpcMethodHandler]] diff --git a/ddtrace/contrib/internal/grpc/patch.py b/ddtrace/contrib/internal/grpc/patch.py index 9c41c5cc342..122893b030f 100644 --- a/ddtrace/contrib/internal/grpc/patch.py +++ b/ddtrace/contrib/internal/grpc/patch.py @@ -1,7 +1,6 @@ import grpc from wrapt import wrap_function_wrapper as _w -from ddtrace import Pin from ddtrace import config from ddtrace.contrib.internal.grpc import constants from ddtrace.contrib.internal.grpc import utils @@ -13,6 +12,7 @@ from ddtrace.internal.schema import schematize_service_name from ddtrace.internal.utils import get_argument_value from ddtrace.internal.utils import set_argument_value +from ddtrace.trace import Pin log = get_logger(__name__) diff --git a/ddtrace/contrib/internal/httplib/patch.py b/ddtrace/contrib/internal/httplib/patch.py index e42241a2ca2..3e354aeedea 100644 --- a/ddtrace/contrib/internal/httplib/patch.py +++ b/ddtrace/contrib/internal/httplib/patch.py @@ -20,9 +20,9 @@ from ddtrace.internal.schema import schematize_url_operation from ddtrace.internal.schema.span_attribute_schema import SpanDirection from ddtrace.internal.utils.formats import asbool -from ddtrace.pin import Pin from ddtrace.propagation.http import HTTPPropagator from ddtrace.settings.asm import config as asm_config +from ddtrace.trace import Pin span_name = "http.client.request" diff --git a/ddtrace/contrib/internal/httpx/patch.py b/ddtrace/contrib/internal/httpx/patch.py index e6d1893880f..8a9e4eebc3a 100644 --- a/ddtrace/contrib/internal/httpx/patch.py +++ b/ddtrace/contrib/internal/httpx/patch.py @@ -22,8 +22,8 @@ from ddtrace.internal.utils.formats import asbool from ddtrace.internal.utils.version import parse_version from ddtrace.internal.utils.wrappers import unwrap as _u -from ddtrace.pin import Pin from ddtrace.propagation.http import HTTPPropagator +from ddtrace.trace import Pin HTTPX_VERSION = parse_version(httpx.__version__) diff --git a/ddtrace/contrib/internal/jinja2/patch.py b/ddtrace/contrib/internal/jinja2/patch.py index 83aad083747..cdf1254527d 100644 --- a/ddtrace/contrib/internal/jinja2/patch.py +++ b/ddtrace/contrib/internal/jinja2/patch.py @@ -10,7 +10,7 @@ from ddtrace.internal.constants import COMPONENT from ddtrace.internal.utils import ArgumentError from ddtrace.internal.utils import get_argument_value -from ddtrace.pin import Pin +from ddtrace.trace import Pin from .constants import DEFAULT_TEMPLATE_NAME diff --git a/ddtrace/contrib/internal/kafka/patch.py b/ddtrace/contrib/internal/kafka/patch.py index 339e2469914..6f69cda3239 100644 --- a/ddtrace/contrib/internal/kafka/patch.py +++ b/ddtrace/contrib/internal/kafka/patch.py @@ -24,8 +24,8 @@ from ddtrace.internal.utils import set_argument_value from ddtrace.internal.utils.formats import asbool from ddtrace.internal.utils.version import parse_version -from ddtrace.pin import Pin from ddtrace.propagation.http import HTTPPropagator as Propagator +from ddtrace.trace import Pin _Producer = confluent_kafka.Producer diff --git a/ddtrace/contrib/internal/kombu/patch.py b/ddtrace/contrib/internal/kombu/patch.py index fd571fd445c..fa63e5c4f86 100644 --- a/ddtrace/contrib/internal/kombu/patch.py +++ b/ddtrace/contrib/internal/kombu/patch.py @@ -22,8 +22,8 @@ from ddtrace.internal.utils import get_argument_value from ddtrace.internal.utils.formats import asbool from ddtrace.internal.utils.wrappers import unwrap -from ddtrace.pin import Pin from ddtrace.propagation.http import HTTPPropagator +from ddtrace.trace import Pin from .constants import DEFAULT_SERVICE from .utils import HEADER_POS diff --git a/ddtrace/contrib/internal/langchain/patch.py b/ddtrace/contrib/internal/langchain/patch.py index b7513539da7..f9c58249cb2 100644 --- a/ddtrace/contrib/internal/langchain/patch.py +++ b/ddtrace/contrib/internal/langchain/patch.py @@ -62,7 +62,7 @@ from ddtrace.internal.utils.version import parse_version from ddtrace.llmobs._integrations import LangChainIntegration from ddtrace.llmobs._utils import safe_json -from ddtrace.pin import Pin +from ddtrace.trace import Pin log = get_logger(__name__) diff --git a/ddtrace/contrib/internal/mako/patch.py b/ddtrace/contrib/internal/mako/patch.py index 7db3b2e47df..d39a51238a2 100644 --- a/ddtrace/contrib/internal/mako/patch.py +++ b/ddtrace/contrib/internal/mako/patch.py @@ -11,7 +11,7 @@ from ddtrace.internal.constants import COMPONENT from ddtrace.internal.schema import schematize_service_name from ddtrace.internal.utils.importlib import func_name -from ddtrace.pin import Pin +from ddtrace.trace import Pin from .constants import DEFAULT_TEMPLATE_NAME diff --git a/ddtrace/contrib/internal/mariadb/patch.py b/ddtrace/contrib/internal/mariadb/patch.py index b4ab267c5e3..1307403f6d4 100644 --- a/ddtrace/contrib/internal/mariadb/patch.py +++ b/ddtrace/contrib/internal/mariadb/patch.py @@ -3,7 +3,6 @@ import mariadb import wrapt -from ddtrace import Pin from ddtrace import config from ddtrace.contrib.dbapi import TracedConnection from ddtrace.ext import db @@ -11,6 +10,7 @@ from ddtrace.internal.schema import schematize_service_name from ddtrace.internal.utils.formats import asbool from ddtrace.internal.utils.wrappers import unwrap +from ddtrace.trace import Pin config._add( diff --git a/ddtrace/contrib/internal/molten/patch.py b/ddtrace/contrib/internal/molten/patch.py index fd6fa53b195..7c60d37d0d6 100644 --- a/ddtrace/contrib/internal/molten/patch.py +++ b/ddtrace/contrib/internal/molten/patch.py @@ -4,7 +4,6 @@ import wrapt from wrapt import wrap_function_wrapper as _w -from ddtrace import Pin from ddtrace import config from ddtrace.constants import _ANALYTICS_SAMPLE_RATE_KEY from ddtrace.constants import SPAN_KIND @@ -21,6 +20,7 @@ from ddtrace.internal.utils.formats import asbool from ddtrace.internal.utils.importlib import func_name from ddtrace.internal.utils.version import parse_version +from ddtrace.trace import Pin from .wrappers import MOLTEN_ROUTE from .wrappers import WrapperComponent diff --git a/ddtrace/contrib/internal/molten/wrappers.py b/ddtrace/contrib/internal/molten/wrappers.py index 7446224fe45..0a3e325ca0b 100644 --- a/ddtrace/contrib/internal/molten/wrappers.py +++ b/ddtrace/contrib/internal/molten/wrappers.py @@ -1,7 +1,6 @@ import molten import wrapt -from ddtrace import Pin from ddtrace import config from ddtrace.constants import SPAN_KIND from ddtrace.contrib import trace_utils @@ -9,6 +8,7 @@ from ddtrace.ext import http from ddtrace.internal.constants import COMPONENT from ddtrace.internal.utils.importlib import func_name +from ddtrace.trace import Pin MOLTEN_ROUTE = "molten.route" diff --git a/ddtrace/contrib/internal/mysql/patch.py b/ddtrace/contrib/internal/mysql/patch.py index 2d5a8500cb3..d18d357d107 100644 --- a/ddtrace/contrib/internal/mysql/patch.py +++ b/ddtrace/contrib/internal/mysql/patch.py @@ -3,7 +3,6 @@ import mysql.connector import wrapt -from ddtrace import Pin from ddtrace import config from ddtrace.appsec._iast._metrics import _set_metric_iast_instrumented_sink from ddtrace.appsec._iast.constants import VULN_SQL_INJECTION @@ -16,6 +15,7 @@ from ddtrace.internal.utils.formats import asbool from ddtrace.propagation._database_monitoring import _DBM_Propagator from ddtrace.settings.asm import config as asm_config +from ddtrace.trace import Pin config._add( diff --git a/ddtrace/contrib/internal/mysqldb/patch.py b/ddtrace/contrib/internal/mysqldb/patch.py index 291d6cb865e..8b6aa7bb7f2 100644 --- a/ddtrace/contrib/internal/mysqldb/patch.py +++ b/ddtrace/contrib/internal/mysqldb/patch.py @@ -3,7 +3,6 @@ import MySQLdb from wrapt import wrap_function_wrapper as _w -from ddtrace import Pin from ddtrace import config from ddtrace.appsec._iast._metrics import _set_metric_iast_instrumented_sink from ddtrace.appsec._iast.constants import VULN_SQL_INJECTION @@ -23,6 +22,7 @@ from ddtrace.internal.utils.wrappers import unwrap as _u from ddtrace.propagation._database_monitoring import _DBM_Propagator from ddtrace.settings.asm import config as asm_config +from ddtrace.trace import Pin config._add( diff --git a/ddtrace/contrib/internal/openai/patch.py b/ddtrace/contrib/internal/openai/patch.py index 4ad76a17084..d87b06b3aba 100644 --- a/ddtrace/contrib/internal/openai/patch.py +++ b/ddtrace/contrib/internal/openai/patch.py @@ -13,7 +13,7 @@ from ddtrace.internal.utils.version import parse_version from ddtrace.internal.wrapping import wrap from ddtrace.llmobs._integrations import OpenAIIntegration -from ddtrace.pin import Pin +from ddtrace.trace import Pin log = get_logger(__name__) diff --git a/ddtrace/contrib/internal/protobuf/patch.py b/ddtrace/contrib/internal/protobuf/patch.py index 607c29eb1c0..8ecdd7aefa5 100644 --- a/ddtrace/contrib/internal/protobuf/patch.py +++ b/ddtrace/contrib/internal/protobuf/patch.py @@ -4,7 +4,7 @@ from ddtrace import config from ddtrace.internal.utils.wrappers import unwrap -from ddtrace.pin import Pin +from ddtrace.trace import Pin from .schema_iterator import SchemaExtractor diff --git a/ddtrace/contrib/internal/psycopg/async_connection.py b/ddtrace/contrib/internal/psycopg/async_connection.py index 14ec854ffd1..72c8d70e7ec 100644 --- a/ddtrace/contrib/internal/psycopg/async_connection.py +++ b/ddtrace/contrib/internal/psycopg/async_connection.py @@ -1,4 +1,3 @@ -from ddtrace import Pin from ddtrace import config from ddtrace.constants import SPAN_KIND from ddtrace.constants import SPAN_MEASURED_KEY @@ -11,6 +10,7 @@ from ddtrace.ext import SpanTypes from ddtrace.ext import db from ddtrace.internal.constants import COMPONENT +from ddtrace.trace import Pin class Psycopg3TracedAsyncConnection(dbapi_async.TracedAsyncConnection): diff --git a/ddtrace/contrib/internal/psycopg/connection.py b/ddtrace/contrib/internal/psycopg/connection.py index c823e17dc61..a5e5353ad13 100644 --- a/ddtrace/contrib/internal/psycopg/connection.py +++ b/ddtrace/contrib/internal/psycopg/connection.py @@ -1,4 +1,3 @@ -from ddtrace import Pin from ddtrace import config from ddtrace.constants import SPAN_KIND from ddtrace.constants import SPAN_MEASURED_KEY @@ -15,6 +14,7 @@ from ddtrace.ext import net from ddtrace.ext import sql from ddtrace.internal.constants import COMPONENT +from ddtrace.trace import Pin class Psycopg3TracedConnection(dbapi.TracedConnection): diff --git a/ddtrace/contrib/internal/psycopg/patch.py b/ddtrace/contrib/internal/psycopg/patch.py index 7da5c1c73c7..9e24cee6696 100644 --- a/ddtrace/contrib/internal/psycopg/patch.py +++ b/ddtrace/contrib/internal/psycopg/patch.py @@ -3,9 +3,9 @@ import os from typing import List # noqa:F401 -from ddtrace import Pin from ddtrace import config from ddtrace.contrib import dbapi +from ddtrace.trace import Pin try: diff --git a/ddtrace/contrib/internal/pymemcache/client.py b/ddtrace/contrib/internal/pymemcache/client.py index 18a46a41053..37e14842a94 100644 --- a/ddtrace/contrib/internal/pymemcache/client.py +++ b/ddtrace/contrib/internal/pymemcache/client.py @@ -29,7 +29,7 @@ from ddtrace.internal.logger import get_logger from ddtrace.internal.schema import schematize_cache_operation from ddtrace.internal.utils.formats import asbool -from ddtrace.pin import Pin +from ddtrace.trace import Pin log = get_logger(__name__) diff --git a/ddtrace/contrib/internal/pymemcache/patch.py b/ddtrace/contrib/internal/pymemcache/patch.py index 07402680e9e..dd3687cd248 100644 --- a/ddtrace/contrib/internal/pymemcache/patch.py +++ b/ddtrace/contrib/internal/pymemcache/patch.py @@ -1,11 +1,11 @@ import pymemcache import pymemcache.client.hash +from ddtrace._trace.pin import _DD_PIN_NAME +from ddtrace._trace.pin import _DD_PIN_PROXY_NAME +from ddtrace._trace.pin import Pin from ddtrace.ext import memcached as memcachedx from ddtrace.internal.schema import schematize_service_name -from ddtrace.pin import _DD_PIN_NAME -from ddtrace.pin import _DD_PIN_PROXY_NAME -from ddtrace.pin import Pin from .client import WrappedClient from .client import WrappedHashClient diff --git a/ddtrace/contrib/internal/pymongo/client.py b/ddtrace/contrib/internal/pymongo/client.py index d5b2530d1f7..426d205f9da 100644 --- a/ddtrace/contrib/internal/pymongo/client.py +++ b/ddtrace/contrib/internal/pymongo/client.py @@ -10,7 +10,6 @@ # project import ddtrace -from ddtrace import Pin from ddtrace import config from ddtrace.constants import _ANALYTICS_SAMPLE_RATE_KEY from ddtrace.constants import SPAN_KIND @@ -26,6 +25,7 @@ from ddtrace.internal.schema import schematize_database_operation from ddtrace.internal.schema import schematize_service_name from ddtrace.internal.utils import get_argument_value +from ddtrace.trace import Pin from .parse import parse_msg from .parse import parse_query diff --git a/ddtrace/contrib/internal/pymongo/patch.py b/ddtrace/contrib/internal/pymongo/patch.py index 0c0927ffea1..200a4a902b8 100644 --- a/ddtrace/contrib/internal/pymongo/patch.py +++ b/ddtrace/contrib/internal/pymongo/patch.py @@ -2,7 +2,6 @@ import pymongo -from ddtrace import Pin from ddtrace import config from ddtrace.constants import SPAN_KIND from ddtrace.constants import SPAN_MEASURED_KEY @@ -15,6 +14,7 @@ from ddtrace.internal.utils import get_argument_value from ddtrace.internal.wrapping import unwrap as _u from ddtrace.internal.wrapping import wrap as _w +from ddtrace.trace import Pin from ....internal.schema import schematize_service_name diff --git a/ddtrace/contrib/internal/pymysql/patch.py b/ddtrace/contrib/internal/pymysql/patch.py index 00fee4f5ad7..a9a16d50608 100644 --- a/ddtrace/contrib/internal/pymysql/patch.py +++ b/ddtrace/contrib/internal/pymysql/patch.py @@ -3,7 +3,6 @@ import pymysql import wrapt -from ddtrace import Pin from ddtrace import config from ddtrace.contrib.dbapi import TracedConnection from ddtrace.contrib.trace_utils import _convert_to_string @@ -13,6 +12,7 @@ from ddtrace.internal.schema import schematize_service_name from ddtrace.internal.utils.formats import asbool from ddtrace.propagation._database_monitoring import _DBM_Propagator +from ddtrace.trace import Pin config._add( diff --git a/ddtrace/contrib/internal/pynamodb/patch.py b/ddtrace/contrib/internal/pynamodb/patch.py index 15e1874ee77..be4ba00c893 100644 --- a/ddtrace/contrib/internal/pynamodb/patch.py +++ b/ddtrace/contrib/internal/pynamodb/patch.py @@ -20,7 +20,7 @@ from ddtrace.internal.utils import ArgumentError from ddtrace.internal.utils import get_argument_value from ddtrace.internal.utils.formats import deep_getattr -from ddtrace.pin import Pin +from ddtrace.trace import Pin # Pynamodb connection class diff --git a/ddtrace/contrib/internal/pyodbc/patch.py b/ddtrace/contrib/internal/pyodbc/patch.py index 40b561d2f53..180895a202e 100644 --- a/ddtrace/contrib/internal/pyodbc/patch.py +++ b/ddtrace/contrib/internal/pyodbc/patch.py @@ -2,7 +2,6 @@ import pyodbc -from ddtrace import Pin from ddtrace import config from ddtrace.contrib.dbapi import TracedConnection from ddtrace.contrib.dbapi import TracedCursor @@ -11,6 +10,7 @@ from ddtrace.ext import db from ddtrace.internal.schema import schematize_service_name from ddtrace.internal.utils.formats import asbool +from ddtrace.trace import Pin config._add( diff --git a/ddtrace/contrib/internal/redis/asyncio_patch.py b/ddtrace/contrib/internal/redis/asyncio_patch.py index 0115096ba0f..7c5bad354ab 100644 --- a/ddtrace/contrib/internal/redis/asyncio_patch.py +++ b/ddtrace/contrib/internal/redis/asyncio_patch.py @@ -4,7 +4,7 @@ from ddtrace._trace.utils_redis import _instrument_redis_execute_pipeline from ddtrace.contrib.redis_utils import _run_redis_command_async from ddtrace.internal.utils.formats import stringify_cache_args -from ddtrace.pin import Pin +from ddtrace.trace import Pin async def instrumented_async_execute_command(func, instance, args, kwargs): diff --git a/ddtrace/contrib/internal/redis/patch.py b/ddtrace/contrib/internal/redis/patch.py index 18b23fd68fa..33520e5894d 100644 --- a/ddtrace/contrib/internal/redis/patch.py +++ b/ddtrace/contrib/internal/redis/patch.py @@ -14,7 +14,7 @@ from ddtrace.internal.utils.formats import CMD_MAX_LEN from ddtrace.internal.utils.formats import asbool from ddtrace.internal.utils.formats import stringify_cache_args -from ddtrace.pin import Pin +from ddtrace.trace import Pin config._add( diff --git a/ddtrace/contrib/internal/rediscluster/patch.py b/ddtrace/contrib/internal/rediscluster/patch.py index a415096ef10..c550df7e9ea 100644 --- a/ddtrace/contrib/internal/rediscluster/patch.py +++ b/ddtrace/contrib/internal/rediscluster/patch.py @@ -23,7 +23,7 @@ from ddtrace.internal.utils.formats import asbool from ddtrace.internal.utils.formats import stringify_cache_args from ddtrace.internal.utils.wrappers import unwrap -from ddtrace.pin import Pin +from ddtrace.trace import Pin # DEV: In `2.0.0` `__version__` is a string and `VERSION` is a tuple, diff --git a/ddtrace/contrib/internal/requests/patch.py b/ddtrace/contrib/internal/requests/patch.py index a5867662d78..d4ec1f5182d 100644 --- a/ddtrace/contrib/internal/requests/patch.py +++ b/ddtrace/contrib/internal/requests/patch.py @@ -10,8 +10,8 @@ from ddtrace.contrib.trace_utils import unwrap as _u from ddtrace.internal.schema import schematize_service_name from ddtrace.internal.utils.formats import asbool -from ddtrace.pin import Pin from ddtrace.settings.asm import config as asm_config +from ddtrace.trace import Pin from .connection import _wrap_send diff --git a/ddtrace/contrib/internal/requests/session.py b/ddtrace/contrib/internal/requests/session.py index 9551c70226c..783dda4ff7a 100644 --- a/ddtrace/contrib/internal/requests/session.py +++ b/ddtrace/contrib/internal/requests/session.py @@ -1,8 +1,8 @@ import requests from wrapt import wrap_function_wrapper as _w -from ddtrace import Pin from ddtrace import config +from ddtrace.trace import Pin from .connection import _wrap_send diff --git a/ddtrace/contrib/internal/rq/patch.py b/ddtrace/contrib/internal/rq/patch.py index a6b54b28f27..c1f39431f57 100644 --- a/ddtrace/contrib/internal/rq/patch.py +++ b/ddtrace/contrib/internal/rq/patch.py @@ -1,6 +1,5 @@ import os -from ddtrace import Pin from ddtrace import config from ddtrace.constants import SPAN_KIND from ddtrace.internal import core @@ -10,6 +9,7 @@ from ddtrace.internal.schema.span_attribute_schema import SpanDirection from ddtrace.internal.utils import get_argument_value from ddtrace.internal.utils.formats import asbool +from ddtrace.trace import Pin from ....ext import SpanKind from ....ext import SpanTypes diff --git a/ddtrace/contrib/internal/sanic/patch.py b/ddtrace/contrib/internal/sanic/patch.py index 826267cd341..5d105cf2f32 100644 --- a/ddtrace/contrib/internal/sanic/patch.py +++ b/ddtrace/contrib/internal/sanic/patch.py @@ -17,7 +17,7 @@ from ddtrace.internal.schema import schematize_url_operation from ddtrace.internal.schema.span_attribute_schema import SpanDirection from ddtrace.internal.utils.wrappers import unwrap as _u -from ddtrace.pin import Pin +from ddtrace.trace import Pin log = get_logger(__name__) diff --git a/ddtrace/contrib/internal/snowflake/patch.py b/ddtrace/contrib/internal/snowflake/patch.py index 87896aab109..d28844ea992 100644 --- a/ddtrace/contrib/internal/snowflake/patch.py +++ b/ddtrace/contrib/internal/snowflake/patch.py @@ -2,7 +2,6 @@ import wrapt -from ddtrace import Pin from ddtrace import config from ddtrace.contrib.dbapi import TracedConnection from ddtrace.contrib.dbapi import TracedCursor @@ -11,6 +10,7 @@ from ddtrace.ext import net from ddtrace.internal.schema import schematize_service_name from ddtrace.internal.utils.formats import asbool +from ddtrace.trace import Pin config._add( diff --git a/ddtrace/contrib/internal/sqlalchemy/engine.py b/ddtrace/contrib/internal/sqlalchemy/engine.py index 05bdb7ca0f1..57b6db4e9fc 100644 --- a/ddtrace/contrib/internal/sqlalchemy/engine.py +++ b/ddtrace/contrib/internal/sqlalchemy/engine.py @@ -29,7 +29,7 @@ from ddtrace.internal.constants import COMPONENT from ddtrace.internal.schema import schematize_database_operation from ddtrace.internal.schema import schematize_service_name -from ddtrace.pin import Pin +from ddtrace.trace import Pin def trace_engine(engine, tracer=None, service=None): diff --git a/ddtrace/contrib/internal/sqlite3/patch.py b/ddtrace/contrib/internal/sqlite3/patch.py index dedf92c6297..f47906146bc 100644 --- a/ddtrace/contrib/internal/sqlite3/patch.py +++ b/ddtrace/contrib/internal/sqlite3/patch.py @@ -15,8 +15,8 @@ from ddtrace.internal.schema import schematize_database_operation from ddtrace.internal.schema import schematize_service_name from ddtrace.internal.utils.formats import asbool -from ddtrace.pin import Pin from ddtrace.settings.asm import config as asm_config +from ddtrace.trace import Pin # Original connect method diff --git a/ddtrace/contrib/internal/starlette/patch.py b/ddtrace/contrib/internal/starlette/patch.py index b872a77ecd7..064722b67f1 100644 --- a/ddtrace/contrib/internal/starlette/patch.py +++ b/ddtrace/contrib/internal/starlette/patch.py @@ -12,7 +12,6 @@ from wrapt import ObjectProxy from wrapt import wrap_function_wrapper as _w -from ddtrace import Pin from ddtrace import config from ddtrace._trace.span import Span # noqa:F401 from ddtrace.appsec._iast import _is_iast_enabled @@ -28,6 +27,7 @@ from ddtrace.internal.utils import get_blocked from ddtrace.internal.utils import set_argument_value from ddtrace.internal.utils.wrappers import unwrap as _u +from ddtrace.trace import Pin from ddtrace.vendor.packaging.version import parse as parse_version diff --git a/ddtrace/contrib/internal/subprocess/patch.py b/ddtrace/contrib/internal/subprocess/patch.py index 76530c195df..80d05b107bb 100644 --- a/ddtrace/contrib/internal/subprocess/patch.py +++ b/ddtrace/contrib/internal/subprocess/patch.py @@ -14,7 +14,6 @@ from typing import Union # noqa:F401 from typing import cast # noqa:F401 -from ddtrace import Pin from ddtrace import config from ddtrace.contrib import trace_utils from ddtrace.contrib.internal.subprocess.constants import COMMANDS @@ -23,6 +22,7 @@ from ddtrace.internal.compat import shjoin from ddtrace.internal.logger import get_logger from ddtrace.settings.asm import config as asm_config +from ddtrace.trace import Pin log = get_logger(__name__) diff --git a/ddtrace/contrib/internal/tornado/template.py b/ddtrace/contrib/internal/tornado/template.py index 5d94d2a358c..a47ee53b9e4 100644 --- a/ddtrace/contrib/internal/tornado/template.py +++ b/ddtrace/contrib/internal/tornado/template.py @@ -1,9 +1,9 @@ from tornado import template -from ddtrace import Pin from ddtrace import config from ddtrace.ext import SpanTypes from ddtrace.internal.constants import COMPONENT +from ddtrace.trace import Pin def generate(func, renderer, args, kwargs): diff --git a/ddtrace/contrib/internal/urllib3/patch.py b/ddtrace/contrib/internal/urllib3/patch.py index 624dd9efbc6..6c10526c125 100644 --- a/ddtrace/contrib/internal/urllib3/patch.py +++ b/ddtrace/contrib/internal/urllib3/patch.py @@ -22,9 +22,9 @@ from ddtrace.internal.utils import get_argument_value from ddtrace.internal.utils.formats import asbool from ddtrace.internal.utils.wrappers import unwrap as _u -from ddtrace.pin import Pin from ddtrace.propagation.http import HTTPPropagator from ddtrace.settings.asm import config as asm_config +from ddtrace.trace import Pin # Ports which, if set, will not be used in hostnames/service names diff --git a/ddtrace/contrib/internal/vertexai/patch.py b/ddtrace/contrib/internal/vertexai/patch.py index 2dbce060234..bc6e46903c3 100644 --- a/ddtrace/contrib/internal/vertexai/patch.py +++ b/ddtrace/contrib/internal/vertexai/patch.py @@ -13,7 +13,7 @@ from ddtrace.contrib.trace_utils import wrap from ddtrace.llmobs._integrations import VertexAIIntegration from ddtrace.llmobs._integrations.utils import extract_model_name_google -from ddtrace.pin import Pin +from ddtrace.trace import Pin config._add( diff --git a/ddtrace/contrib/internal/vertica/patch.py b/ddtrace/contrib/internal/vertica/patch.py index 8e820248f14..b365ade8c05 100644 --- a/ddtrace/contrib/internal/vertica/patch.py +++ b/ddtrace/contrib/internal/vertica/patch.py @@ -18,7 +18,7 @@ from ddtrace.internal.schema import schematize_service_name from ddtrace.internal.utils import get_argument_value from ddtrace.internal.utils.wrappers import unwrap -from ddtrace.pin import Pin +from ddtrace.trace import Pin log = get_logger(__name__) diff --git a/ddtrace/contrib/internal/wsgi/wsgi.py b/ddtrace/contrib/internal/wsgi/wsgi.py index da86aa8f21e..44e1646f5f9 100644 --- a/ddtrace/contrib/internal/wsgi/wsgi.py +++ b/ddtrace/contrib/internal/wsgi/wsgi.py @@ -11,10 +11,10 @@ from typing import Mapping # noqa:F401 from typing import Optional # noqa:F401 - from ddtrace import Pin # noqa:F401 from ddtrace import Span # noqa:F401 from ddtrace import Tracer # noqa:F401 from ddtrace.settings import Config # noqa:F401 + from ddtrace.trace import Pin # noqa:F401 from urllib.parse import quote diff --git a/ddtrace/contrib/internal/yaaredis/patch.py b/ddtrace/contrib/internal/yaaredis/patch.py index eeba29994f6..58c5a47bda4 100644 --- a/ddtrace/contrib/internal/yaaredis/patch.py +++ b/ddtrace/contrib/internal/yaaredis/patch.py @@ -13,7 +13,7 @@ from ddtrace.internal.utils.formats import asbool from ddtrace.internal.utils.formats import stringify_cache_args from ddtrace.internal.utils.wrappers import unwrap -from ddtrace.pin import Pin +from ddtrace.trace import Pin from ddtrace.vendor.debtcollector import deprecate diff --git a/ddtrace/contrib/kafka/__init__.py b/ddtrace/contrib/kafka/__init__.py index f3cf66f6f23..355d3a99f48 100644 --- a/ddtrace/contrib/kafka/__init__.py +++ b/ddtrace/contrib/kafka/__init__.py @@ -31,7 +31,7 @@ To configure the kafka integration using the ``Pin`` API:: - from ddtrace import Pin + from ddtrace.trace import Pin from ddtrace import patch # Make sure to patch before importing confluent_kafka diff --git a/ddtrace/contrib/kombu/__init__.py b/ddtrace/contrib/kombu/__init__.py index 49f3485f2ee..1a010892a7b 100644 --- a/ddtrace/contrib/kombu/__init__.py +++ b/ddtrace/contrib/kombu/__init__.py @@ -11,7 +11,8 @@ without the whole trace being dropped. :: - from ddtrace import Pin, patch + from ddtrace import patch + from ddtrace.trace import Pin import kombu # If not patched yet, you can patch kombu specifically diff --git a/ddtrace/contrib/mariadb/__init__.py b/ddtrace/contrib/mariadb/__init__.py index e5c7139ee74..ea245ab37e7 100644 --- a/ddtrace/contrib/mariadb/__init__.py +++ b/ddtrace/contrib/mariadb/__init__.py @@ -34,7 +34,7 @@ To configure the mariadb integration on an per-connection basis use the ``Pin`` API:: - from ddtrace import Pin + from ddtrace.trace import Pin from ddtrace import patch # Make sure to patch before importing mariadb diff --git a/ddtrace/contrib/mongoengine/__init__.py b/ddtrace/contrib/mongoengine/__init__.py index 1522ac1438b..eed76b32f4c 100644 --- a/ddtrace/contrib/mongoengine/__init__.py +++ b/ddtrace/contrib/mongoengine/__init__.py @@ -3,7 +3,8 @@ ``import ddtrace.auto`` will automatically patch your mongoengine connect method to make it work. :: - from ddtrace import Pin, patch + from ddtrace import patch + from ddtrace.trace import Pin import mongoengine # If not patched yet, you can patch mongoengine specifically diff --git a/ddtrace/contrib/mysql/__init__.py b/ddtrace/contrib/mysql/__init__.py index 1c3f6064e55..5fa835be709 100644 --- a/ddtrace/contrib/mysql/__init__.py +++ b/ddtrace/contrib/mysql/__init__.py @@ -41,7 +41,7 @@ To configure the mysql integration on an per-connection basis use the ``Pin`` API:: - from ddtrace import Pin + from ddtrace.trace import Pin # Make sure to import mysql.connector and not the 'connect' function, # otherwise you won't have access to the patched version import mysql.connector diff --git a/ddtrace/contrib/mysqldb/__init__.py b/ddtrace/contrib/mysqldb/__init__.py index 17bb76b08a8..81dd4b62c37 100644 --- a/ddtrace/contrib/mysqldb/__init__.py +++ b/ddtrace/contrib/mysqldb/__init__.py @@ -55,7 +55,7 @@ # Make sure to import MySQLdb and not the 'connect' function, # otherwise you won't have access to the patched version - from ddtrace import Pin + from ddtrace.trace import Pin import MySQLdb # This will report a span with the default settings diff --git a/ddtrace/contrib/openai/__init__.py b/ddtrace/contrib/openai/__init__.py index 79a5b488834..bf482c93913 100644 --- a/ddtrace/contrib/openai/__init__.py +++ b/ddtrace/contrib/openai/__init__.py @@ -242,7 +242,8 @@ ``Pin`` API:: import openai - from ddtrace import Pin, config + from ddtrace import config + from ddtrace.trace import Pin Pin.override(openai, service="my-openai-service") """ # noqa: E501 diff --git a/ddtrace/contrib/psycopg/__init__.py b/ddtrace/contrib/psycopg/__init__.py index 48869af6cd7..a747be2310a 100644 --- a/ddtrace/contrib/psycopg/__init__.py +++ b/ddtrace/contrib/psycopg/__init__.py @@ -50,7 +50,7 @@ To configure the psycopg integration on an per-connection basis use the ``Pin`` API:: - from ddtrace import Pin + from ddtrace.trace import Pin import psycopg db = psycopg.connect(connection_factory=factory) diff --git a/ddtrace/contrib/pylibmc/__init__.py b/ddtrace/contrib/pylibmc/__init__.py index 5689fcd9070..a4ca6aa7692 100644 --- a/ddtrace/contrib/pylibmc/__init__.py +++ b/ddtrace/contrib/pylibmc/__init__.py @@ -5,7 +5,8 @@ # Be sure to import pylibmc and not pylibmc.Client directly, # otherwise you won't have access to the patched version - from ddtrace import Pin, patch + from ddtrace import patch + from ddtrace.trace import Pin import pylibmc # If not patched yet, you can patch pylibmc specifically diff --git a/ddtrace/contrib/pymemcache/__init__.py b/ddtrace/contrib/pymemcache/__init__.py index 871d8ee0f6c..894359fb007 100644 --- a/ddtrace/contrib/pymemcache/__init__.py +++ b/ddtrace/contrib/pymemcache/__init__.py @@ -2,7 +2,8 @@ ``import ddtrace.auto`` will automatically patch the pymemcache ``Client``:: - from ddtrace import Pin, patch + from ddtrace import patch + from ddtrace.trace import Pin # If not patched yet, patch pymemcache specifically patch(pymemcache=True) diff --git a/ddtrace/contrib/pymongo/__init__.py b/ddtrace/contrib/pymongo/__init__.py index 60394b6c2f3..a9363e65a04 100644 --- a/ddtrace/contrib/pymongo/__init__.py +++ b/ddtrace/contrib/pymongo/__init__.py @@ -8,7 +8,8 @@ # Be sure to import pymongo and not pymongo.MongoClient directly, # otherwise you won't have access to the patched version - from ddtrace import Pin, patch + from ddtrace import patch + from ddtrace.trace import Pin import pymongo # If not patched yet, you can patch pymongo specifically diff --git a/ddtrace/contrib/pymysql/__init__.py b/ddtrace/contrib/pymysql/__init__.py index d4b24e2cd5f..bd0e36c6be8 100644 --- a/ddtrace/contrib/pymysql/__init__.py +++ b/ddtrace/contrib/pymysql/__init__.py @@ -41,7 +41,7 @@ To configure the integration on an per-connection basis use the ``Pin`` API:: - from ddtrace import Pin + from ddtrace.trace import Pin from pymysql import connect # This will report a span with the default settings diff --git a/ddtrace/contrib/pyodbc/__init__.py b/ddtrace/contrib/pyodbc/__init__.py index 44605b7cdc9..bc7d2b3e9b3 100644 --- a/ddtrace/contrib/pyodbc/__init__.py +++ b/ddtrace/contrib/pyodbc/__init__.py @@ -41,7 +41,7 @@ To configure the integration on an per-connection basis use the ``Pin`` API:: - from ddtrace import Pin + from ddtrace.trace import Pin import pyodbc # This will report a span with the default settings diff --git a/ddtrace/contrib/redis/__init__.py b/ddtrace/contrib/redis/__init__.py index 4fddef1c742..638d08b0a79 100644 --- a/ddtrace/contrib/redis/__init__.py +++ b/ddtrace/contrib/redis/__init__.py @@ -55,7 +55,7 @@ To configure particular redis instances use the :class:`Pin ` API:: import redis - from ddtrace import Pin + from ddtrace.trace import Pin client = redis.StrictRedis(host="localhost", port=6379) diff --git a/ddtrace/contrib/rediscluster/__init__.py b/ddtrace/contrib/rediscluster/__init__.py index cb14eb9aa30..65209053b97 100644 --- a/ddtrace/contrib/rediscluster/__init__.py +++ b/ddtrace/contrib/rediscluster/__init__.py @@ -3,7 +3,8 @@ ``import ddtrace.auto`` will automatically patch your Redis Cluster client to make it work. :: - from ddtrace import Pin, patch + from ddtrace import patch + from ddtrace.trace import Pin import rediscluster # If not patched yet, you can patch redis specifically diff --git a/ddtrace/contrib/rq/__init__.py b/ddtrace/contrib/rq/__init__.py index 7dd6ba48fcb..0ce23d17984 100644 --- a/ddtrace/contrib/rq/__init__.py +++ b/ddtrace/contrib/rq/__init__.py @@ -28,7 +28,7 @@ To override the service name for a queue:: - from ddtrace import Pin + from ddtrace.trace import Pin connection = redis.Redis() queue = rq.Queue(connection=connection) diff --git a/ddtrace/contrib/snowflake/__init__.py b/ddtrace/contrib/snowflake/__init__.py index bbc892a5362..db137130214 100644 --- a/ddtrace/contrib/snowflake/__init__.py +++ b/ddtrace/contrib/snowflake/__init__.py @@ -51,7 +51,7 @@ To configure the integration on an per-connection basis use the ``Pin`` API:: - from ddtrace import Pin + from ddtrace.trace import Pin from snowflake.connector import connect # This will report a span with the default settings diff --git a/ddtrace/contrib/sqlalchemy/__init__.py b/ddtrace/contrib/sqlalchemy/__init__.py index c294b8c976c..cc4d1775a44 100644 --- a/ddtrace/contrib/sqlalchemy/__init__.py +++ b/ddtrace/contrib/sqlalchemy/__init__.py @@ -7,7 +7,8 @@ using the patch method that **must be called before** importing sqlalchemy:: # patch before importing `create_engine` - from ddtrace import Pin, patch + from ddtrace import patch + from ddtrace.trace import Pin patch(sqlalchemy=True) # use SQLAlchemy as usual diff --git a/ddtrace/contrib/sqlite3/__init__.py b/ddtrace/contrib/sqlite3/__init__.py index 42499cf0447..adf3ea061d6 100644 --- a/ddtrace/contrib/sqlite3/__init__.py +++ b/ddtrace/contrib/sqlite3/__init__.py @@ -41,7 +41,7 @@ To configure the integration on an per-connection basis use the ``Pin`` API:: - from ddtrace import Pin + from ddtrace.trace import Pin import sqlite3 # This will report a span with the default settings diff --git a/ddtrace/contrib/trace_utils.py b/ddtrace/contrib/trace_utils.py index 644475b02ab..56901934e83 100644 --- a/ddtrace/contrib/trace_utils.py +++ b/ddtrace/contrib/trace_utils.py @@ -20,7 +20,6 @@ import wrapt -from ddtrace import Pin from ddtrace import config from ddtrace.ext import http from ddtrace.ext import net @@ -37,6 +36,7 @@ import ddtrace.internal.utils.wrappers from ddtrace.propagation.http import HTTPPropagator from ddtrace.settings.asm import config as asm_config +from ddtrace.trace import Pin if TYPE_CHECKING: # pragma: no cover diff --git a/ddtrace/contrib/trace_utils_async.py b/ddtrace/contrib/trace_utils_async.py index 63a3325db50..f58cc4e34bb 100644 --- a/ddtrace/contrib/trace_utils_async.py +++ b/ddtrace/contrib/trace_utils_async.py @@ -3,8 +3,8 @@ Note that this module should only be imported in Python 3.5+. """ -from ddtrace import Pin from ddtrace.internal.logger import get_logger +from ddtrace.trace import Pin log = get_logger(__name__) diff --git a/ddtrace/contrib/vertexai/__init__.py b/ddtrace/contrib/vertexai/__init__.py index f472d28790d..80597e0c8ff 100644 --- a/ddtrace/contrib/vertexai/__init__.py +++ b/ddtrace/contrib/vertexai/__init__.py @@ -77,7 +77,8 @@ ``Pin`` API:: import vertexai - from ddtrace import Pin, config + from ddtrace import config + from ddtrace.trace import Pin Pin.override(vertexai, service="my-vertexai-service") """ # noqa: E501 diff --git a/ddtrace/contrib/vertica/__init__.py b/ddtrace/contrib/vertica/__init__.py index 3ec424fbb53..e4fc457ac13 100644 --- a/ddtrace/contrib/vertica/__init__.py +++ b/ddtrace/contrib/vertica/__init__.py @@ -27,7 +27,8 @@ To configure the Vertica integration on an instance-per-instance basis use the ``Pin`` API:: - from ddtrace import Pin, patch, Tracer + from ddtrace import patch + from ddtrace.trace import Pin, Tracer patch(vertica=True) import vertica_python diff --git a/ddtrace/contrib/yaaredis/__init__.py b/ddtrace/contrib/yaaredis/__init__.py index 03d76db11c0..2eefb3beb93 100644 --- a/ddtrace/contrib/yaaredis/__init__.py +++ b/ddtrace/contrib/yaaredis/__init__.py @@ -53,7 +53,7 @@ To configure particular yaaredis instances use the :class:`Pin ` API:: import yaaredis - from ddtrace import Pin + from ddtrace.trace import Pin client = yaaredis.StrictRedis(host="localhost", port=6379) diff --git a/ddtrace/filters.py b/ddtrace/filters.py index a2e6884f05c..3c9c42892b8 100644 --- a/ddtrace/filters.py +++ b/ddtrace/filters.py @@ -1,72 +1,10 @@ -import abc -import re -from typing import TYPE_CHECKING # noqa:F401 -from typing import List # noqa:F401 -from typing import Optional # noqa:F401 -from typing import Union # noqa:F401 +from ddtrace._trace.filters import * # noqa: F403 +from ddtrace.internal.utils.deprecations import DDTraceDeprecationWarning +from ddtrace.vendor.debtcollector import deprecate -from ddtrace._trace.processor import TraceProcessor -from ddtrace.ext import http - -if TYPE_CHECKING: # pragma: no cover - from ddtrace._trace.span import Span # noqa:F401 - - -class TraceFilter(TraceProcessor): - @abc.abstractmethod - def process_trace(self, trace): - # type: (List[Span]) -> Optional[List[Span]] - """Processes a trace. - - None can be returned to prevent the trace from being exported. - """ - pass - - -class FilterRequestsOnUrl(TraceFilter): - r"""Filter out traces from incoming http requests based on the request's url. - - This class takes as argument a list of regular expression patterns - representing the urls to be excluded from tracing. A trace will be excluded - if its root span contains a ``http.url`` tag and if this tag matches any of - the provided regular expression using the standard python regexp match - semantic (https://docs.python.org/3/library/re.html#re.match). - - :param list regexps: a list of regular expressions (or a single string) defining - the urls that should be filtered out. - - Examples: - To filter out http calls to domain api.example.com:: - - FilterRequestsOnUrl(r'http://api\\.example\\.com') - - To filter out http calls to all first level subdomains from example.com:: - - FilterRequestOnUrl(r'http://.*+\\.example\\.com') - - To filter out calls to both http://test.example.com and http://example.com/healthcheck:: - - FilterRequestOnUrl([r'http://test\\.example\\.com', r'http://example\\.com/healthcheck']) - """ - - def __init__(self, regexps: Union[str, List[str]]): - if isinstance(regexps, str): - regexps = [regexps] - self._regexps = [re.compile(regexp) for regexp in regexps] - - def process_trace(self, trace): - # type: (List[Span]) -> Optional[List[Span]] - """ - When the filter is registered in the tracer, process_trace is called by - on each trace before it is sent to the agent, the returned value will - be fed to the next filter in the list. If process_trace returns None, - the whole trace is discarded. - """ - for span in trace: - url = span.get_tag(http.URL) - if span.parent_id is None and url is not None: - for regexp in self._regexps: - if regexp.match(url): - return None - return trace +deprecate( + "The ddtrace.filters module is deprecated and will be removed.", + message="Import ``TraceFilter`` and/or ``FilterRequestsOnUrl`` from the ddtrace.trace package.", + category=DDTraceDeprecationWarning, +) diff --git a/ddtrace/internal/ci_visibility/filters.py b/ddtrace/internal/ci_visibility/filters.py index c90e7324533..f1b22d97e13 100644 --- a/ddtrace/internal/ci_visibility/filters.py +++ b/ddtrace/internal/ci_visibility/filters.py @@ -8,7 +8,7 @@ from ddtrace.constants import AUTO_KEEP from ddtrace.ext import SpanTypes from ddtrace.ext import ci -from ddtrace.filters import TraceFilter +from ddtrace.trace import TraceFilter if TYPE_CHECKING: diff --git a/ddtrace/llmobs/_integrations/base.py b/ddtrace/llmobs/_integrations/base.py index f01c19f173a..2e892904720 100644 --- a/ddtrace/llmobs/_integrations/base.py +++ b/ddtrace/llmobs/_integrations/base.py @@ -6,7 +6,6 @@ from typing import List # noqa:F401 from typing import Optional # noqa:F401 -from ddtrace import Pin from ddtrace import config from ddtrace._trace.sampler import RateSampler from ddtrace._trace.span import Span @@ -24,6 +23,7 @@ from ddtrace.llmobs._log_writer import V2LogWriter from ddtrace.llmobs._utils import _get_llmobs_parent_id from ddtrace.settings import IntegrationConfig +from ddtrace.trace import Pin log = get_logger(__name__) diff --git a/ddtrace/llmobs/_integrations/openai.py b/ddtrace/llmobs/_integrations/openai.py index bd727b1a5a2..ea660f53f68 100644 --- a/ddtrace/llmobs/_integrations/openai.py +++ b/ddtrace/llmobs/_integrations/openai.py @@ -24,7 +24,7 @@ from ddtrace.llmobs._integrations.base import BaseLLMIntegration from ddtrace.llmobs._utils import _get_attr from ddtrace.llmobs.utils import Document -from ddtrace.pin import Pin +from ddtrace.trace import Pin class OpenAIIntegration(BaseLLMIntegration): diff --git a/ddtrace/pin.py b/ddtrace/pin.py index 926918b6cea..0e683b3b22e 100644 --- a/ddtrace/pin.py +++ b/ddtrace/pin.py @@ -1,209 +1,10 @@ -from typing import TYPE_CHECKING # noqa:F401 -from typing import Any # noqa:F401 -from typing import Dict # noqa:F401 -from typing import Optional # noqa:F401 +from ddtrace._trace.pin import * # noqa: F403 +from ddtrace.internal.utils.deprecations import DDTraceDeprecationWarning +from ddtrace.vendor.debtcollector import deprecate -import wrapt -import ddtrace - -from .internal.logger import get_logger - - -log = get_logger(__name__) - - -# To set attributes on wrapt proxy objects use this prefix: -# http://wrapt.readthedocs.io/en/latest/wrappers.html -_DD_PIN_NAME = "_datadog_pin" -_DD_PIN_PROXY_NAME = "_self_" + _DD_PIN_NAME - - -class Pin(object): - """Pin (a.k.a Patch INfo) is a small class which is used to - set tracing metadata on a particular traced connection. - This is useful if you wanted to, say, trace two different - database clusters. - - >>> conn = sqlite.connect('/tmp/user.db') - >>> # Override a pin for a specific connection - >>> pin = Pin.override(conn, service='user-db') - >>> conn = sqlite.connect('/tmp/image.db') - """ - - __slots__ = ["tags", "tracer", "_target", "_config", "_initialized"] - - def __init__( - self, - service=None, # type: Optional[str] - tags=None, # type: Optional[Dict[str, str]] - tracer=None, - _config=None, # type: Optional[Dict[str, Any]] - ): - # type: (...) -> None - tracer = tracer or ddtrace.tracer - self.tags = tags - self.tracer = tracer - self._target = None # type: Optional[int] - # keep the configuration attribute internal because the - # public API to access it is not the Pin class - self._config = _config or {} # type: Dict[str, Any] - # [Backward compatibility]: service argument updates the `Pin` config - self._config["service_name"] = service - self._initialized = True - - @property - def service(self): - # type: () -> str - """Backward compatibility: accessing to `pin.service` returns the underlying - configuration value. - """ - return self._config["service_name"] - - def __setattr__(self, name, value): - if getattr(self, "_initialized", False) and name != "_target": - raise AttributeError("can't mutate a pin, use override() or clone() instead") - super(Pin, self).__setattr__(name, value) - - def __repr__(self): - return "Pin(service=%s, tags=%s, tracer=%s)" % (self.service, self.tags, self.tracer) - - @staticmethod - def _find(*objs): - # type: (Any) -> Optional[Pin] - """ - Return the first :class:`ddtrace.pin.Pin` found on any of the provided objects or `None` if none were found - - - >>> pin = Pin._find(wrapper, instance, conn) - - :param objs: The objects to search for a :class:`ddtrace.pin.Pin` on - :type objs: List of objects - :rtype: :class:`ddtrace.pin.Pin`, None - :returns: The first found :class:`ddtrace.pin.Pin` or `None` is none was found - """ - for obj in objs: - pin = Pin.get_from(obj) - if pin: - return pin - return None - - @staticmethod - def get_from(obj): - # type: (Any) -> Optional[Pin] - """Return the pin associated with the given object. If a pin is attached to - `obj` but the instance is not the owner of the pin, a new pin is cloned and - attached. This ensures that a pin inherited from a class is a copy for the new - instance, avoiding that a specific instance overrides other pins values. - - >>> pin = Pin.get_from(conn) - - :param obj: The object to look for a :class:`ddtrace.pin.Pin` on - :type obj: object - :rtype: :class:`ddtrace.pin.Pin`, None - :returns: :class:`ddtrace.pin.Pin` associated with the object, or None if none was found - """ - if hasattr(obj, "__getddpin__"): - return obj.__getddpin__() - - pin_name = _DD_PIN_PROXY_NAME if isinstance(obj, wrapt.ObjectProxy) else _DD_PIN_NAME - pin = getattr(obj, pin_name, None) - # detect if the PIN has been inherited from a class - if pin is not None and pin._target != id(obj): - pin = pin.clone() - pin.onto(obj) - return pin - - @classmethod - def override( - cls, - obj, # type: Any - service=None, # type: Optional[str] - tags=None, # type: Optional[Dict[str, str]] - tracer=None, - ): - # type: (...) -> None - """Override an object with the given attributes. - - That's the recommended way to customize an already instrumented client, without - losing existing attributes. - - >>> conn = sqlite.connect('/tmp/user.db') - >>> # Override a pin for a specific connection - >>> Pin.override(conn, service='user-db') - """ - if not obj: - return - - pin = cls.get_from(obj) - if pin is None: - Pin(service=service, tags=tags, tracer=tracer).onto(obj) - else: - pin.clone(service=service, tags=tags, tracer=tracer).onto(obj) - - def enabled(self): - # type: () -> bool - """Return true if this pin's tracer is enabled.""" - # inline to avoid circular imports - from ddtrace.settings.asm import config as asm_config - - return bool(self.tracer) and (self.tracer.enabled or asm_config._apm_opt_out) - - def onto(self, obj, send=True): - # type: (Any, bool) -> None - """Patch this pin onto the given object. If send is true, it will also - queue the metadata to be sent to the server. - """ - # Actually patch it on the object. - try: - if hasattr(obj, "__setddpin__"): - return obj.__setddpin__(self) - - pin_name = _DD_PIN_PROXY_NAME if isinstance(obj, wrapt.ObjectProxy) else _DD_PIN_NAME - - # set the target reference; any get_from, clones and retarget the new PIN - self._target = id(obj) - if self.service: - ddtrace.config._add_extra_service(self.service) - return setattr(obj, pin_name, self) - except AttributeError: - log.debug("can't pin onto object. skipping", exc_info=True) - - def remove_from(self, obj): - # type: (Any) -> None - # Remove pin from the object. - try: - pin_name = _DD_PIN_PROXY_NAME if isinstance(obj, wrapt.ObjectProxy) else _DD_PIN_NAME - - pin = Pin.get_from(obj) - if pin is not None: - delattr(obj, pin_name) - except AttributeError: - log.debug("can't remove pin from object. skipping", exc_info=True) - - def clone( - self, - service=None, # type: Optional[str] - tags=None, # type: Optional[Dict[str, str]] - tracer=None, - ): - # type: (...) -> Pin - """Return a clone of the pin with the given attributes replaced.""" - # do a shallow copy of Pin dicts - if not tags and self.tags: - tags = self.tags.copy() - - # we use a copy instead of a deepcopy because we expect configurations - # to have only a root level dictionary without nested objects. Using - # deepcopy introduces a big overhead: - # - # copy: 0.00654911994934082 - # deepcopy: 0.2787208557128906 - config = self._config.copy() - - return Pin( - service=service or self.service, - tags=tags, - tracer=tracer or self.tracer, # do not clone the Tracer - _config=config, - ) +deprecate( + "The ddtrace.trace.Pin module is deprecated and will be removed.", + message="Import ``Pin`` from the ddtrace.trace package.", + category=DDTraceDeprecationWarning, +) diff --git a/ddtrace/settings/config.py b/ddtrace/settings/config.py index 6ee75fbe6d8..adcd9505a52 100644 --- a/ddtrace/settings/config.py +++ b/ddtrace/settings/config.py @@ -19,6 +19,7 @@ from ddtrace.internal.utils.deprecations import DDTraceDeprecationWarning from ddtrace.vendor.debtcollector import deprecate +from .._trace.pin import Pin from ..internal import gitmetadata from ..internal.constants import _PROPAGATION_BEHAVIOR_DEFAULT from ..internal.constants import _PROPAGATION_BEHAVIOR_IGNORE @@ -37,7 +38,6 @@ from ..internal.serverless import in_aws_lambda from ..internal.utils.formats import asbool from ..internal.utils.formats import parse_tags_str -from ..pin import Pin from ._core import get_config as _get_config from ._inferred_base_service import detect_service from ._otel_remapper import otel_remapping as _otel_remapping diff --git a/ddtrace/trace/__init__.py b/ddtrace/trace/__init__.py index 90c662cebc7..dcd3aeb928e 100644 --- a/ddtrace/trace/__init__.py +++ b/ddtrace/trace/__init__.py @@ -1,7 +1,8 @@ from ddtrace._trace.context import Context +from ddtrace._trace.filters import FilterRequestsOnUrl +from ddtrace._trace.filters import TraceFilter +from ddtrace._trace.pin import Pin -# TODO: Move `ddtrace.Pin`, `ddtrace.Tracer`, `ddtrace.Span`, and `ddtrace.tracer` to this module -__all__ = [ - "Context", -] +# TODO: Move `ddtrace.Tracer`, `ddtrace.Span`, and `ddtrace.tracer` to this module +__all__ = ["Context", "Pin", "TraceFilter", "FilterRequestsOnUrl"] diff --git a/docker-compose.yml b/docker-compose.yml index cf40a4a256d..118ad8cc5db 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -142,6 +142,7 @@ services: - DD_DISABLE_ERROR_RESPONSES=true - ENABLED_CHECKS=trace_content_length,trace_stall,meta_tracer_version_header,trace_count_header,trace_peer_service,trace_dd_service - SNAPSHOT_IGNORED_ATTRS=span_id,trace_id,parent_id,duration,start,metrics.system.pid,metrics.system.process_id,metrics.process_id,meta.runtime-id,meta._dd.p.tid,meta.pathway.hash,metrics._dd.tracer_kr,meta._dd.parent_id,meta.kafka.cluster_id + vertica: image: vertica/vertica-ce environment: diff --git a/docs/advanced_usage.rst b/docs/advanced_usage.rst index 309b6178c56..9906fddea89 100644 --- a/docs/advanced_usage.rst +++ b/docs/advanced_usage.rst @@ -347,7 +347,7 @@ and the resulting trace will either be sent to the Agent or discarded. The library comes with a ``FilterRequestsOnUrl`` filter that can be used to filter out incoming requests to specific urls: -.. autoclass:: ddtrace.filters.FilterRequestsOnUrl +.. autoclass:: ddtrace.trace.FilterRequestsOnUrl :members: **Writing a custom filter** @@ -358,7 +358,7 @@ providing it to the filters parameter of :meth:`ddtrace.Tracer.configure()`. the pipeline or ``None`` if the trace should be discarded:: from ddtrace import Span, tracer - from ddtrace.filters import TraceFilter + from ddtrace.trace import TraceFilter class FilterExample(TraceFilter): def process_trace(self, trace): diff --git a/docs/contributing-integrations.rst b/docs/contributing-integrations.rst index 9d7d2d202ee..0dab68b5053 100644 --- a/docs/contributing-integrations.rst +++ b/docs/contributing-integrations.rst @@ -30,7 +30,7 @@ into the runtime execution of third-party libraries. The essential task of writi the functions in the third-party library that would serve as useful entrypoints and wrapping them with ``wrap_function_wrapper``. There are exceptions, but this is generally a useful starting point. -The Pin API in ``ddtrace.pin`` is used to configure the instrumentation at runtime. It provides a ``Pin`` class +The Pin API in ``ddtrace.trace.Pin`` is used to configure the instrumentation at runtime. It provides a ``Pin`` class that can store configuration data in memory in a manner that is accessible from within functions wrapped by Wrapt. ``Pin`` objects are most often used for storing configuration data scoped to a given integration, such as enable/disable flags and service name overrides. diff --git a/docs/troubleshooting.rst b/docs/troubleshooting.rst index 73dde525de9..04bd56d0ba9 100644 --- a/docs/troubleshooting.rst +++ b/docs/troubleshooting.rst @@ -67,7 +67,7 @@ This can be a problem for users who want to see error details from a child span While this is default behavior for integrations, users can add a trace filter to propagate the error details up to the root span:: from ddtrace import Span, tracer - from ddtrace.filters import TraceFilter + from ddtrace.trace import TraceFilter class ErrorFilter(TraceFilter): diff --git a/releasenotes/notes/move-pin-and-filters-to-trace-package-2f47fa2d2592b413.yaml b/releasenotes/notes/move-pin-and-filters-to-trace-package-2f47fa2d2592b413.yaml new file mode 100644 index 00000000000..29b3ebe277d --- /dev/null +++ b/releasenotes/notes/move-pin-and-filters-to-trace-package-2f47fa2d2592b413.yaml @@ -0,0 +1,6 @@ +--- +deprecations: + - | + tracing: Deprecates ``ddtrace.pin`` module and moves the ``Pin`` class to ``ddtrace.trace`` package. In v3.0.0 the ``ddtrace/pin.py`` will be removed. + - | + tracing: Deprecates ``ddtrace.filters`` module and moves the ``TraceFilter`` and ``FilterRequestsOnUrl`` classes to ``ddtrace.trace`` package. In v3.0.0 the ``ddtrace/filters.py`` will be removed. \ No newline at end of file diff --git a/tests/appsec/contrib_appsec/django_app/urls.py b/tests/appsec/contrib_appsec/django_app/urls.py index aaff69169b5..1b691d43a53 100644 --- a/tests/appsec/contrib_appsec/django_app/urls.py +++ b/tests/appsec/contrib_appsec/django_app/urls.py @@ -196,7 +196,7 @@ def login_user(request): def new_service(request, service_name: str): import ddtrace - ddtrace.Pin.override(django, service=service_name, tracer=ddtrace.tracer) + ddtrace.trace.Pin.override(django, service=service_name, tracer=ddtrace.tracer) return HttpResponse(service_name, status=200) diff --git a/tests/appsec/contrib_appsec/fastapi_app/app.py b/tests/appsec/contrib_appsec/fastapi_app/app.py index c5b765c4bbb..2a97a919395 100644 --- a/tests/appsec/contrib_appsec/fastapi_app/app.py +++ b/tests/appsec/contrib_appsec/fastapi_app/app.py @@ -104,7 +104,7 @@ async def multi_view_no_param(request: Request): # noqa: B008 async def new_service(service_name: str, request: Request): # noqa: B008 import ddtrace - ddtrace.Pin.override(app, service=service_name, tracer=ddtrace.tracer) + ddtrace.trace.Pin.override(app, service=service_name, tracer=ddtrace.tracer) return HTMLResponse(service_name, 200) async def slow_numbers(minimum, maximum): diff --git a/tests/appsec/contrib_appsec/flask_app/app.py b/tests/appsec/contrib_appsec/flask_app/app.py index 939a7cad678..5a5776cd098 100644 --- a/tests/appsec/contrib_appsec/flask_app/app.py +++ b/tests/appsec/contrib_appsec/flask_app/app.py @@ -60,7 +60,7 @@ def multi_view(param_int=0, param_str=""): def new_service(service_name: str): import ddtrace - ddtrace.Pin.override(Flask, service=service_name, tracer=ddtrace.tracer) + ddtrace.trace.Pin.override(Flask, service=service_name, tracer=ddtrace.tracer) return service_name diff --git a/tests/appsec/contrib_appsec/test_flask.py b/tests/appsec/contrib_appsec/test_flask.py index 90a35ac0c88..b497de98bf9 100644 --- a/tests/appsec/contrib_appsec/test_flask.py +++ b/tests/appsec/contrib_appsec/test_flask.py @@ -1,8 +1,8 @@ from flask.testing import FlaskClient import pytest -from ddtrace import Pin from ddtrace.internal.packages import get_version_for_package +from ddtrace.trace import Pin from tests.appsec.contrib_appsec import utils from tests.utils import TracerTestCase diff --git a/tests/appsec/contrib_appsec/utils.py b/tests/appsec/contrib_appsec/utils.py index d7aa077052f..312c15b5fed 100644 --- a/tests/appsec/contrib_appsec/utils.py +++ b/tests/appsec/contrib_appsec/utils.py @@ -1560,8 +1560,8 @@ def test_tracer(): @contextmanager def post_tracer(interface): - original_tracer = getattr(ddtrace.Pin.get_from(interface.framework), "tracer", None) - ddtrace.Pin.override(interface.framework, tracer=interface.tracer) + original_tracer = getattr(ddtrace.trace.Pin.get_from(interface.framework), "tracer", None) + ddtrace.trace.Pin.override(interface.framework, tracer=interface.tracer) yield if original_tracer is not None: - ddtrace.Pin.override(interface.framework, tracer=original_tracer) + ddtrace.trace.Pin.override(interface.framework, tracer=original_tracer) diff --git a/tests/commands/ddtrace_run_integration.py b/tests/commands/ddtrace_run_integration.py index e52a0c9b8b0..12f8ab857e2 100644 --- a/tests/commands/ddtrace_run_integration.py +++ b/tests/commands/ddtrace_run_integration.py @@ -5,7 +5,7 @@ import redis -from ddtrace import Pin +from ddtrace.trace import Pin from tests.contrib.config import REDIS_CONFIG from tests.utils import DummyWriter diff --git a/tests/contrib/aiobotocore/utils.py b/tests/contrib/aiobotocore/utils.py index 061f8d37847..b51b6550327 100644 --- a/tests/contrib/aiobotocore/utils.py +++ b/tests/contrib/aiobotocore/utils.py @@ -3,7 +3,7 @@ from async_generator import asynccontextmanager from async_generator import yield_ -from ddtrace import Pin +from ddtrace.trace import Pin LOCALSTACK_ENDPOINT_URL = { diff --git a/tests/contrib/aiohttp/test_aiohttp_client.py b/tests/contrib/aiohttp/test_aiohttp_client.py index f490bd85ada..2b2b51c2650 100644 --- a/tests/contrib/aiohttp/test_aiohttp_client.py +++ b/tests/contrib/aiohttp/test_aiohttp_client.py @@ -3,10 +3,10 @@ import aiohttp import pytest -from ddtrace import Pin from ddtrace.contrib.internal.aiohttp.patch import extract_netloc_and_query_info_from_url from ddtrace.contrib.internal.aiohttp.patch import patch from ddtrace.contrib.internal.aiohttp.patch import unpatch +from ddtrace.trace import Pin from tests.utils import override_config from tests.utils import override_http_config @@ -101,7 +101,7 @@ async def test_distributed_tracing_disabled(ddtrace_run_python_code_in_subproces import asyncio import sys import aiohttp -from ddtrace import Pin +from ddtrace.trace import Pin from tests.contrib.aiohttp.test_aiohttp_client import URL async def test(): @@ -184,7 +184,7 @@ def test_configure_service_name_pin(ddtrace_run_python_code_in_subprocess): import asyncio import sys import aiohttp -from ddtrace import Pin +from ddtrace.trace import Pin from tests.contrib.aiohttp.test_aiohttp_client import URL_200 async def test(): diff --git a/tests/contrib/aiohttp_jinja2/conftest.py b/tests/contrib/aiohttp_jinja2/conftest.py index 92c49f015f3..a58b72f7f49 100644 --- a/tests/contrib/aiohttp_jinja2/conftest.py +++ b/tests/contrib/aiohttp_jinja2/conftest.py @@ -3,7 +3,7 @@ from ddtrace.contrib.internal.aiohttp_jinja2.patch import patch from ddtrace.contrib.internal.aiohttp_jinja2.patch import unpatch -from ddtrace.pin import Pin +from ddtrace.trace import Pin from tests.contrib.aiohttp.conftest import app_tracer # noqa:F401 from tests.contrib.aiohttp.conftest import patched_app_tracer # noqa:F401 from tests.contrib.aiohttp.conftest import untraced_app_tracer # noqa:F401 diff --git a/tests/contrib/aiohttp_jinja2/test_aiohttp_jinja2.py b/tests/contrib/aiohttp_jinja2/test_aiohttp_jinja2.py index 222522d66f6..8889d828752 100644 --- a/tests/contrib/aiohttp_jinja2/test_aiohttp_jinja2.py +++ b/tests/contrib/aiohttp_jinja2/test_aiohttp_jinja2.py @@ -1,9 +1,9 @@ import aiohttp_jinja2 import pytest -from ddtrace import Pin from ddtrace import tracer from ddtrace.constants import ERROR_MSG +from ddtrace.trace import Pin from tests.contrib.aiohttp.app.web import set_filesystem_loader from tests.contrib.aiohttp.app.web import set_package_loader import tests.contrib.aiohttp.conftest # noqa:F401 diff --git a/tests/contrib/aiomysql/test_aiomysql.py b/tests/contrib/aiomysql/test_aiomysql.py index 9bd898c52ae..0bf8839dc96 100644 --- a/tests/contrib/aiomysql/test_aiomysql.py +++ b/tests/contrib/aiomysql/test_aiomysql.py @@ -5,11 +5,11 @@ import pymysql import pytest -from ddtrace import Pin from ddtrace import Tracer from ddtrace.contrib.internal.aiomysql.patch import patch from ddtrace.contrib.internal.aiomysql.patch import unpatch from ddtrace.internal.schema import DEFAULT_SPAN_SERVICE_NAME +from ddtrace.trace import Pin from tests.contrib import shared_tests_async as shared_tests from tests.contrib.asyncio.utils import AsyncioTestCase from tests.contrib.asyncio.utils import mark_asyncio diff --git a/tests/contrib/aiopg/test.py b/tests/contrib/aiopg/test.py index da741bacca3..eb738e009d8 100644 --- a/tests/contrib/aiopg/test.py +++ b/tests/contrib/aiopg/test.py @@ -4,11 +4,12 @@ from psycopg2 import extras import pytest -# project -from ddtrace import Pin from ddtrace.contrib.internal.aiopg.patch import patch from ddtrace.contrib.internal.aiopg.patch import unpatch from ddtrace.internal.schema import DEFAULT_SPAN_SERVICE_NAME + +# project +from ddtrace.trace import Pin from tests.contrib.asyncio.utils import AsyncioTestCase from tests.contrib.config import POSTGRES_CONFIG from tests.opentracer.utils import init_tracer diff --git a/tests/contrib/algoliasearch/test.py b/tests/contrib/algoliasearch/test.py index 3d563e11035..87f5f7b6910 100644 --- a/tests/contrib/algoliasearch/test.py +++ b/tests/contrib/algoliasearch/test.py @@ -3,7 +3,7 @@ from ddtrace.contrib.internal.algoliasearch.patch import algoliasearch_version from ddtrace.contrib.internal.algoliasearch.patch import patch from ddtrace.contrib.internal.algoliasearch.patch import unpatch -from ddtrace.pin import Pin +from ddtrace.trace import Pin from ddtrace.vendor.packaging.version import parse as parse_version from tests.utils import TracerTestCase from tests.utils import assert_is_measured diff --git a/tests/contrib/anthropic/conftest.py b/tests/contrib/anthropic/conftest.py index d975204ef7c..a6a4b53cd4c 100644 --- a/tests/contrib/anthropic/conftest.py +++ b/tests/contrib/anthropic/conftest.py @@ -3,10 +3,10 @@ import mock import pytest -from ddtrace import Pin from ddtrace.contrib.internal.anthropic.patch import patch from ddtrace.contrib.internal.anthropic.patch import unpatch from ddtrace.llmobs import LLMObs +from ddtrace.trace import Pin from tests.contrib.anthropic.utils import get_request_vcr from tests.utils import DummyTracer from tests.utils import DummyWriter diff --git a/tests/contrib/aredis/test_aredis.py b/tests/contrib/aredis/test_aredis.py index c0e5719b80e..e62cfa974be 100644 --- a/tests/contrib/aredis/test_aredis.py +++ b/tests/contrib/aredis/test_aredis.py @@ -5,9 +5,9 @@ import pytest from wrapt import ObjectProxy -from ddtrace import Pin from ddtrace.contrib.internal.aredis.patch import patch from ddtrace.contrib.internal.aredis.patch import unpatch +from ddtrace.trace import Pin from tests.conftest import DEFAULT_DDTRACE_SUBPROCESS_TEST_SERVICE_NAME from tests.opentracer.utils import init_tracer from tests.utils import override_config @@ -152,7 +152,7 @@ def test_schematization_of_service_and_operation(ddtrace_run_python_code_in_subp import pytest import sys from tests.conftest import * -from ddtrace.pin import Pin +from ddtrace.trace import Pin import aredis from tests.contrib.config import REDIS_CONFIG from tests.contrib.aredis.test_aredis import traced_aredis diff --git a/tests/contrib/asyncpg/test_asyncpg.py b/tests/contrib/asyncpg/test_asyncpg.py index 539f64f10ac..032a0f91731 100644 --- a/tests/contrib/asyncpg/test_asyncpg.py +++ b/tests/contrib/asyncpg/test_asyncpg.py @@ -5,11 +5,11 @@ import mock import pytest -from ddtrace import Pin from ddtrace import tracer from ddtrace.contrib.internal.asyncpg.patch import patch from ddtrace.contrib.internal.asyncpg.patch import unpatch from ddtrace.contrib.trace_utils import iswrapped +from ddtrace.trace import Pin from tests.contrib.asyncio.utils import AsyncioTestCase from tests.contrib.asyncio.utils import mark_asyncio from tests.contrib.config import POSTGRES_CONFIG diff --git a/tests/contrib/avro/test_avro.py b/tests/contrib/avro/test_avro.py index 7be5e2f6351..3db10460a23 100644 --- a/tests/contrib/avro/test_avro.py +++ b/tests/contrib/avro/test_avro.py @@ -4,11 +4,11 @@ from avro.io import DatumWriter from wrapt import ObjectProxy -from ddtrace import Pin from ddtrace.constants import AUTO_KEEP from ddtrace.contrib.internal.avro.patch import patch from ddtrace.contrib.internal.avro.patch import unpatch from ddtrace.ext import schema as SCHEMA_TAGS +from ddtrace.trace import Pin OPENAPI_USER_SCHEMA_DEF = ( diff --git a/tests/contrib/boto/test.py b/tests/contrib/boto/test.py index cd2520be5b9..2570ca9c65c 100644 --- a/tests/contrib/boto/test.py +++ b/tests/contrib/boto/test.py @@ -14,12 +14,13 @@ from moto import mock_s3 from moto import mock_sts -# project -from ddtrace import Pin from ddtrace.contrib.internal.boto.patch import patch from ddtrace.contrib.internal.boto.patch import unpatch from ddtrace.ext import http from ddtrace.internal.schema import DEFAULT_SPAN_SERVICE_NAME + +# project +from ddtrace.trace import Pin from tests.opentracer.utils import init_tracer from tests.utils import TracerTestCase from tests.utils import assert_is_measured diff --git a/tests/contrib/botocore/test.py b/tests/contrib/botocore/test.py index af70c453d19..9e4f91cb61f 100644 --- a/tests/contrib/botocore/test.py +++ b/tests/contrib/botocore/test.py @@ -33,7 +33,6 @@ except ImportError: from moto import mock_kinesis as mock_firehose -from ddtrace import Pin from ddtrace import config from ddtrace.constants import ERROR_MSG from ddtrace.constants import ERROR_STACK @@ -47,6 +46,7 @@ from ddtrace.internal.utils.version import parse_version from ddtrace.propagation.http import HTTP_HEADER_PARENT_ID from ddtrace.propagation.http import HTTP_HEADER_TRACE_ID +from ddtrace.trace import Pin from tests.opentracer.utils import init_tracer from tests.utils import TracerTestCase from tests.utils import assert_is_measured diff --git a/tests/contrib/botocore/test_bedrock.py b/tests/contrib/botocore/test_bedrock.py index 1001aff0dac..99a1729ff1a 100644 --- a/tests/contrib/botocore/test_bedrock.py +++ b/tests/contrib/botocore/test_bedrock.py @@ -4,9 +4,9 @@ import mock import pytest -from ddtrace import Pin from ddtrace.contrib.internal.botocore.patch import patch from ddtrace.contrib.internal.botocore.patch import unpatch +from ddtrace.trace import Pin from tests.contrib.botocore.bedrock_utils import _MODELS from tests.contrib.botocore.bedrock_utils import _REQUEST_BODIES from tests.contrib.botocore.bedrock_utils import get_request_vcr diff --git a/tests/contrib/botocore/test_bedrock_llmobs.py b/tests/contrib/botocore/test_bedrock_llmobs.py index c4af15100f2..790b86f0704 100644 --- a/tests/contrib/botocore/test_bedrock_llmobs.py +++ b/tests/contrib/botocore/test_bedrock_llmobs.py @@ -4,10 +4,10 @@ import mock import pytest -from ddtrace import Pin from ddtrace.contrib.internal.botocore.patch import patch from ddtrace.contrib.internal.botocore.patch import unpatch from ddtrace.llmobs import LLMObs +from ddtrace.trace import Pin from tests.contrib.botocore.bedrock_utils import _MODELS from tests.contrib.botocore.bedrock_utils import _REQUEST_BODIES from tests.contrib.botocore.bedrock_utils import get_request_vcr diff --git a/tests/contrib/botocore/test_stepfunctions.py b/tests/contrib/botocore/test_stepfunctions.py index aaf17eb6051..f350e967d4c 100644 --- a/tests/contrib/botocore/test_stepfunctions.py +++ b/tests/contrib/botocore/test_stepfunctions.py @@ -1,9 +1,9 @@ import json -from ddtrace import Pin from ddtrace.contrib.internal.botocore.services.stepfunctions import update_stepfunction_input from ddtrace.ext import SpanTypes from ddtrace.internal import core +from ddtrace.trace import Pin def test_update_stepfunction_input(): diff --git a/tests/contrib/cassandra/test.py b/tests/contrib/cassandra/test.py index b6f5664ac72..21b98d6396f 100644 --- a/tests/contrib/cassandra/test.py +++ b/tests/contrib/cassandra/test.py @@ -9,7 +9,6 @@ from cassandra.query import SimpleStatement import mock -from ddtrace import Pin from ddtrace import config from ddtrace.constants import ERROR_MSG from ddtrace.constants import ERROR_TYPE @@ -19,6 +18,7 @@ from ddtrace.ext import cassandra as cassx from ddtrace.ext import net from ddtrace.internal.schema import DEFAULT_SPAN_SERVICE_NAME +from ddtrace.trace import Pin from tests.contrib.config import CASSANDRA_CONFIG from tests.opentracer.utils import init_tracer from tests.utils import DummyTracer diff --git a/tests/contrib/celery/autopatch.py b/tests/contrib/celery/autopatch.py index 128ad9f8415..08eeed5acad 100644 --- a/tests/contrib/celery/autopatch.py +++ b/tests/contrib/celery/autopatch.py @@ -1,4 +1,4 @@ -from ddtrace import Pin +from ddtrace.trace import Pin if __name__ == "__main__": diff --git a/tests/contrib/celery/base.py b/tests/contrib/celery/base.py index 4e89b77a94b..c2b7de22a54 100644 --- a/tests/contrib/celery/base.py +++ b/tests/contrib/celery/base.py @@ -3,9 +3,9 @@ import celery import pytest -from ddtrace import Pin from ddtrace.contrib.internal.celery.patch import patch from ddtrace.contrib.internal.celery.patch import unpatch +from ddtrace.trace import Pin from tests.utils import TracerTestCase from ..config import RABBITMQ_CONFIG diff --git a/tests/contrib/celery/test_app.py b/tests/contrib/celery/test_app.py index 6f43a04a9bf..6218d77f061 100644 --- a/tests/contrib/celery/test_app.py +++ b/tests/contrib/celery/test_app.py @@ -1,7 +1,7 @@ import celery -from ddtrace import Pin from ddtrace.contrib.internal.celery.patch import unpatch_app +from ddtrace.trace import Pin from .base import CeleryBaseTestCase diff --git a/tests/contrib/celery/test_integration.py b/tests/contrib/celery/test_integration.py index 8b676fa1108..717ed1de359 100644 --- a/tests/contrib/celery/test_integration.py +++ b/tests/contrib/celery/test_integration.py @@ -8,13 +8,13 @@ import mock import pytest -from ddtrace import Pin from ddtrace._trace.context import Context from ddtrace.constants import ERROR_MSG from ddtrace.contrib.internal.celery.patch import patch from ddtrace.contrib.internal.celery.patch import unpatch import ddtrace.internal.forksafe as forksafe from ddtrace.propagation.http import HTTPPropagator +from ddtrace.trace import Pin from tests.opentracer.utils import init_tracer from tests.utils import flaky diff --git a/tests/contrib/celery/test_patch.py b/tests/contrib/celery/test_patch.py index fe0e5b97b2b..3892fc79fb1 100644 --- a/tests/contrib/celery/test_patch.py +++ b/tests/contrib/celery/test_patch.py @@ -1,6 +1,6 @@ import unittest -from ddtrace import Pin +from ddtrace.trace import Pin from tests.contrib.patch import emit_integration_and_version_to_test_agent diff --git a/tests/contrib/celery/test_tagging.py b/tests/contrib/celery/test_tagging.py index 22e8b539c50..6b88acf9434 100644 --- a/tests/contrib/celery/test_tagging.py +++ b/tests/contrib/celery/test_tagging.py @@ -5,9 +5,9 @@ from celery.contrib.testing.worker import start_worker import pytest -from ddtrace import Pin from ddtrace.contrib.internal.celery.patch import patch from ddtrace.contrib.internal.celery.patch import unpatch +from ddtrace.trace import Pin from tests.utils import DummyTracer from .base import AMQP_BROKER_URL diff --git a/tests/contrib/consul/test.py b/tests/contrib/consul/test.py index c5eec48369d..285287f9e95 100644 --- a/tests/contrib/consul/test.py +++ b/tests/contrib/consul/test.py @@ -1,11 +1,11 @@ import consul from wrapt import BoundFunctionWrapper -from ddtrace import Pin from ddtrace.contrib.internal.consul.patch import patch from ddtrace.contrib.internal.consul.patch import unpatch from ddtrace.ext import consul as consulx from ddtrace.internal.schema import DEFAULT_SPAN_SERVICE_NAME +from ddtrace.trace import Pin from tests.utils import TracerTestCase from tests.utils import assert_is_measured diff --git a/tests/contrib/dbapi/test_dbapi.py b/tests/contrib/dbapi/test_dbapi.py index 80135734a6a..1f6be1d66f5 100644 --- a/tests/contrib/dbapi/test_dbapi.py +++ b/tests/contrib/dbapi/test_dbapi.py @@ -1,7 +1,6 @@ import mock import pytest -from ddtrace import Pin from ddtrace._trace.span import Span # noqa:F401 from ddtrace.contrib.dbapi import FetchTracedCursor from ddtrace.contrib.dbapi import TracedConnection @@ -9,6 +8,7 @@ from ddtrace.propagation._database_monitoring import _DBM_Propagator from ddtrace.settings import Config from ddtrace.settings.integration import IntegrationConfig +from ddtrace.trace import Pin from tests.utils import TracerTestCase from tests.utils import assert_is_measured from tests.utils import assert_is_not_measured diff --git a/tests/contrib/dbapi/test_dbapi_appsec.py b/tests/contrib/dbapi/test_dbapi_appsec.py index b60b3ac05c0..d43d9c37e3c 100644 --- a/tests/contrib/dbapi/test_dbapi_appsec.py +++ b/tests/contrib/dbapi/test_dbapi_appsec.py @@ -1,12 +1,12 @@ import mock import pytest -from ddtrace import Pin from ddtrace.appsec._iast import oce from ddtrace.appsec._iast._utils import _is_python_version_supported from ddtrace.contrib.dbapi import TracedCursor from ddtrace.settings import Config from ddtrace.settings.integration import IntegrationConfig +from ddtrace.trace import Pin from tests.appsec.iast.conftest import _end_iast_context_and_oce from tests.appsec.iast.conftest import _start_iast_context_and_oce from tests.utils import TracerTestCase diff --git a/tests/contrib/dbapi_async/test_dbapi_async.py b/tests/contrib/dbapi_async/test_dbapi_async.py index f7151fe1390..7343e875829 100644 --- a/tests/contrib/dbapi_async/test_dbapi_async.py +++ b/tests/contrib/dbapi_async/test_dbapi_async.py @@ -1,7 +1,6 @@ import mock import pytest -from ddtrace import Pin from ddtrace._trace.span import Span # noqa:F401 from ddtrace.contrib.dbapi_async import FetchTracedAsyncCursor from ddtrace.contrib.dbapi_async import TracedAsyncConnection @@ -9,6 +8,7 @@ from ddtrace.propagation._database_monitoring import _DBM_Propagator from ddtrace.settings import Config from ddtrace.settings.integration import IntegrationConfig +from ddtrace.trace import Pin from tests.contrib.asyncio.utils import AsyncioTestCase from tests.contrib.asyncio.utils import mark_asyncio from tests.utils import assert_is_measured diff --git a/tests/contrib/django/conftest.py b/tests/contrib/django/conftest.py index a2de59753fb..3dd992681b4 100644 --- a/tests/contrib/django/conftest.py +++ b/tests/contrib/django/conftest.py @@ -4,8 +4,8 @@ from django.conf import settings import pytest -from ddtrace import Pin from ddtrace.contrib.internal.django.patch import patch +from ddtrace.trace import Pin from tests.utils import DummyTracer from tests.utils import TracerSpanContainer diff --git a/tests/contrib/django/test_django_dbm.py b/tests/contrib/django/test_django_dbm.py index 00edf1c0815..d44f90f3208 100644 --- a/tests/contrib/django/test_django_dbm.py +++ b/tests/contrib/django/test_django_dbm.py @@ -1,7 +1,7 @@ from django.db import connections import mock -from ddtrace import Pin +from ddtrace.trace import Pin from tests.contrib import shared_tests from tests.utils import DummyTracer from tests.utils import override_config diff --git a/tests/contrib/dogpile_cache/test_tracing.py b/tests/contrib/dogpile_cache/test_tracing.py index d6b26bb1b58..fec78818eda 100644 --- a/tests/contrib/dogpile_cache/test_tracing.py +++ b/tests/contrib/dogpile_cache/test_tracing.py @@ -3,9 +3,9 @@ import dogpile import pytest -from ddtrace import Pin from ddtrace.contrib.internal.dogpile_cache.patch import patch from ddtrace.contrib.internal.dogpile_cache.patch import unpatch +from ddtrace.trace import Pin from tests.conftest import DEFAULT_DDTRACE_SUBPROCESS_TEST_SERVICE_NAME from tests.utils import DummyTracer from tests.utils import TracerSpanContainer diff --git a/tests/contrib/dramatiq/test_integration.py b/tests/contrib/dramatiq/test_integration.py index 7d80d554390..ba8d836181c 100644 --- a/tests/contrib/dramatiq/test_integration.py +++ b/tests/contrib/dramatiq/test_integration.py @@ -5,7 +5,7 @@ from ddtrace.contrib.internal.dramatiq.patch import patch from ddtrace.contrib.internal.dramatiq.patch import unpatch -from ddtrace.pin import Pin +from ddtrace.trace import Pin from tests.utils import DummyTracer from tests.utils import snapshot diff --git a/tests/contrib/elasticsearch/test_elasticsearch.py b/tests/contrib/elasticsearch/test_elasticsearch.py index 091fe4b6901..6e381bc1e31 100644 --- a/tests/contrib/elasticsearch/test_elasticsearch.py +++ b/tests/contrib/elasticsearch/test_elasticsearch.py @@ -6,7 +6,6 @@ import pytest -from ddtrace import Pin from ddtrace import config from ddtrace.contrib.internal.elasticsearch.patch import get_version from ddtrace.contrib.internal.elasticsearch.patch import get_versions @@ -14,6 +13,7 @@ from ddtrace.contrib.internal.elasticsearch.patch import unpatch from ddtrace.ext import http from ddtrace.internal.schema import DEFAULT_SPAN_SERVICE_NAME +from ddtrace.trace import Pin from tests.contrib.patch import emit_integration_and_version_to_test_agent from tests.utils import TracerTestCase diff --git a/tests/contrib/flask/__init__.py b/tests/contrib/flask/__init__.py index 216b3676e93..a512a79f196 100644 --- a/tests/contrib/flask/__init__.py +++ b/tests/contrib/flask/__init__.py @@ -2,9 +2,9 @@ from flask.testing import FlaskClient import wrapt -from ddtrace import Pin from ddtrace.contrib.internal.flask.patch import patch from ddtrace.contrib.internal.flask.patch import unpatch +from ddtrace.trace import Pin from tests.utils import TracerTestCase diff --git a/tests/contrib/flask/test_blueprint.py b/tests/contrib/flask/test_blueprint.py index db069d1bf04..96401dfa1a9 100644 --- a/tests/contrib/flask/test_blueprint.py +++ b/tests/contrib/flask/test_blueprint.py @@ -1,7 +1,7 @@ import flask -from ddtrace import Pin from ddtrace.contrib.internal.flask.patch import unpatch +from ddtrace.trace import Pin from . import BaseFlaskTestCase diff --git a/tests/contrib/flask/test_flask_helpers.py b/tests/contrib/flask/test_flask_helpers.py index a02f25eb4af..d3672213a0a 100644 --- a/tests/contrib/flask/test_flask_helpers.py +++ b/tests/contrib/flask/test_flask_helpers.py @@ -2,10 +2,10 @@ import flask -from ddtrace import Pin from ddtrace.contrib.internal.flask.patch import flask_version from ddtrace.contrib.internal.flask.patch import unpatch from ddtrace.internal.compat import StringIO +from ddtrace.trace import Pin from . import BaseFlaskTestCase diff --git a/tests/contrib/flask/test_signals.py b/tests/contrib/flask/test_signals.py index e16cf84157e..b86e8989047 100644 --- a/tests/contrib/flask/test_signals.py +++ b/tests/contrib/flask/test_signals.py @@ -1,9 +1,9 @@ import flask import mock -from ddtrace import Pin from ddtrace.contrib.internal.flask.patch import flask_version from ddtrace.contrib.internal.flask.patch import unpatch +from ddtrace.trace import Pin from . import BaseFlaskTestCase diff --git a/tests/contrib/flask/test_template.py b/tests/contrib/flask/test_template.py index 7231548642e..a38311d3b86 100644 --- a/tests/contrib/flask/test_template.py +++ b/tests/contrib/flask/test_template.py @@ -1,8 +1,8 @@ import flask -from ddtrace import Pin from ddtrace.contrib.internal.flask.patch import flask_version from ddtrace.contrib.internal.flask.patch import unpatch +from ddtrace.trace import Pin from . import BaseFlaskTestCase diff --git a/tests/contrib/flask_autopatch/test_flask_autopatch.py b/tests/contrib/flask_autopatch/test_flask_autopatch.py index 4da8fc921f0..27c4b47e2d0 100644 --- a/tests/contrib/flask_autopatch/test_flask_autopatch.py +++ b/tests/contrib/flask_autopatch/test_flask_autopatch.py @@ -2,9 +2,9 @@ import flask import wrapt -from ddtrace import Pin from ddtrace.contrib.internal.flask.patch import flask_version from ddtrace.ext import http +from ddtrace.trace import Pin from tests.utils import TracerTestCase from tests.utils import assert_is_measured from tests.utils import assert_span_http_status_code diff --git a/tests/contrib/google_generativeai/conftest.py b/tests/contrib/google_generativeai/conftest.py index 6c370a07452..0cadd515f84 100644 --- a/tests/contrib/google_generativeai/conftest.py +++ b/tests/contrib/google_generativeai/conftest.py @@ -6,7 +6,7 @@ from ddtrace.contrib.internal.google_generativeai.patch import patch from ddtrace.contrib.internal.google_generativeai.patch import unpatch from ddtrace.llmobs import LLMObs -from ddtrace.pin import Pin +from ddtrace.trace import Pin from tests.contrib.google_generativeai.utils import MockGenerativeModelAsyncClient from tests.contrib.google_generativeai.utils import MockGenerativeModelClient from tests.utils import DummyTracer diff --git a/tests/contrib/grpc/common.py b/tests/contrib/grpc/common.py index 668a99d2584..e67e4f32a92 100644 --- a/tests/contrib/grpc/common.py +++ b/tests/contrib/grpc/common.py @@ -2,10 +2,10 @@ from grpc._grpcio_metadata import __version__ as _GRPC_VERSION from grpc.framework.foundation import logging_pool -from ddtrace import Pin from ddtrace.contrib.internal.grpc import constants from ddtrace.contrib.internal.grpc.patch import patch from ddtrace.contrib.internal.grpc.patch import unpatch +from ddtrace.trace import Pin from tests.utils import TracerTestCase from .hello_pb2_grpc import add_HelloServicer_to_server diff --git a/tests/contrib/grpc/test_grpc.py b/tests/contrib/grpc/test_grpc.py index 8f6bd00b4e3..93dacb4cb45 100644 --- a/tests/contrib/grpc/test_grpc.py +++ b/tests/contrib/grpc/test_grpc.py @@ -5,16 +5,16 @@ from grpc.framework.foundation import logging_pool import pytest -from ddtrace import Pin from ddtrace._trace.span import _get_64_highest_order_bits_as_hex from ddtrace.constants import ERROR_MSG from ddtrace.constants import ERROR_STACK from ddtrace.constants import ERROR_TYPE -from ddtrace.contrib.grpc import constants +from ddtrace.contrib.internal.grpc import constants from ddtrace.contrib.internal.grpc.patch import _unpatch_server from ddtrace.contrib.internal.grpc.patch import patch from ddtrace.contrib.internal.grpc.patch import unpatch from ddtrace.internal.schema import DEFAULT_SPAN_SERVICE_NAME +from ddtrace.trace import Pin from tests.utils import TracerTestCase from tests.utils import flaky from tests.utils import snapshot diff --git a/tests/contrib/grpc_aio/test_grpc_aio.py b/tests/contrib/grpc_aio/test_grpc_aio.py index 1296e027486..885765e91ca 100644 --- a/tests/contrib/grpc_aio/test_grpc_aio.py +++ b/tests/contrib/grpc_aio/test_grpc_aio.py @@ -7,7 +7,6 @@ from grpc import aio import pytest -from ddtrace import Pin from ddtrace.constants import ERROR_MSG from ddtrace.constants import ERROR_STACK from ddtrace.constants import ERROR_TYPE @@ -16,6 +15,7 @@ from ddtrace.contrib.internal.grpc.patch import patch from ddtrace.contrib.internal.grpc.patch import unpatch from ddtrace.contrib.internal.grpc.utils import _parse_rpc_repr_string +from ddtrace.trace import Pin import ddtrace.vendor.packaging.version as packaging_version from tests.contrib.grpc.hello_pb2 import HelloReply from tests.contrib.grpc.hello_pb2 import HelloRequest diff --git a/tests/contrib/httplib/test_httplib.py b/tests/contrib/httplib/test_httplib.py index 05977b53cc5..24a5fe3f051 100644 --- a/tests/contrib/httplib/test_httplib.py +++ b/tests/contrib/httplib/test_httplib.py @@ -17,7 +17,7 @@ from ddtrace.internal.compat import parse from ddtrace.internal.constants import _HTTPLIB_NO_TRACE_REQUEST from ddtrace.internal.schema import DEFAULT_SPAN_SERVICE_NAME -from ddtrace.pin import Pin +from ddtrace.trace import Pin from tests.opentracer.utils import init_tracer from tests.utils import TracerTestCase from tests.utils import assert_span_http_status_code diff --git a/tests/contrib/httplib/test_httplib_distributed.py b/tests/contrib/httplib/test_httplib_distributed.py index 3552f65a51a..40e5e891662 100644 --- a/tests/contrib/httplib/test_httplib_distributed.py +++ b/tests/contrib/httplib/test_httplib_distributed.py @@ -7,7 +7,7 @@ from ddtrace import config from ddtrace._trace.span import _get_64_highest_order_bits_as_hex from ddtrace.internal.compat import httplib -from ddtrace.pin import Pin +from ddtrace.trace import Pin from tests.utils import TracerTestCase from .test_httplib import SOCKET diff --git a/tests/contrib/httpx/test_httpx.py b/tests/contrib/httpx/test_httpx.py index 426099dcf04..33ecadb825f 100644 --- a/tests/contrib/httpx/test_httpx.py +++ b/tests/contrib/httpx/test_httpx.py @@ -6,8 +6,8 @@ from ddtrace.contrib.internal.httpx.patch import HTTPX_VERSION from ddtrace.contrib.internal.httpx.patch import patch from ddtrace.contrib.internal.httpx.patch import unpatch -from ddtrace.pin import Pin from ddtrace.settings.http import HttpConfig +from ddtrace.trace import Pin from tests.utils import flaky from tests.utils import override_config from tests.utils import override_http_config diff --git a/tests/contrib/httpx/test_httpx_pre_0_11.py b/tests/contrib/httpx/test_httpx_pre_0_11.py index fc9c003328c..315c53cb29c 100644 --- a/tests/contrib/httpx/test_httpx_pre_0_11.py +++ b/tests/contrib/httpx/test_httpx_pre_0_11.py @@ -6,8 +6,8 @@ from ddtrace.contrib.internal.httpx.patch import HTTPX_VERSION from ddtrace.contrib.internal.httpx.patch import patch from ddtrace.contrib.internal.httpx.patch import unpatch -from ddtrace.pin import Pin from ddtrace.settings.http import HttpConfig +from ddtrace.trace import Pin from tests.utils import override_config from tests.utils import override_http_config diff --git a/tests/contrib/jinja2/test_jinja2.py b/tests/contrib/jinja2/test_jinja2.py index 985c1114383..64002fd6555 100644 --- a/tests/contrib/jinja2/test_jinja2.py +++ b/tests/contrib/jinja2/test_jinja2.py @@ -4,10 +4,10 @@ # 3rd party import jinja2 -from ddtrace import Pin from ddtrace import config from ddtrace.contrib.internal.jinja2.patch import patch from ddtrace.contrib.internal.jinja2.patch import unpatch +from ddtrace.trace import Pin from tests.utils import TracerTestCase from tests.utils import assert_is_measured from tests.utils import assert_is_not_measured diff --git a/tests/contrib/kafka/test_kafka.py b/tests/contrib/kafka/test_kafka.py index f1afc2dbe89..38f6c783fc1 100644 --- a/tests/contrib/kafka/test_kafka.py +++ b/tests/contrib/kafka/test_kafka.py @@ -11,18 +11,18 @@ import mock import pytest -from ddtrace import Pin from ddtrace import Tracer from ddtrace.contrib.internal.kafka.patch import TracedConsumer from ddtrace.contrib.internal.kafka.patch import patch from ddtrace.contrib.internal.kafka.patch import unpatch -from ddtrace.filters import TraceFilter import ddtrace.internal.datastreams # noqa: F401 - used as part of mock patching from ddtrace.internal.datastreams.processor import PROPAGATION_KEY_BASE_64 from ddtrace.internal.datastreams.processor import ConsumerPartitionKey from ddtrace.internal.datastreams.processor import DataStreamsCtx from ddtrace.internal.datastreams.processor import PartitionKey from ddtrace.internal.utils.retry import fibonacci_backoff_with_jitter +from ddtrace.trace import Pin +from ddtrace.trace import TraceFilter from tests.contrib.config import KAFKA_CONFIG from tests.datastreams.test_public_api import MockedTracer from tests.utils import DummyTracer @@ -518,8 +518,8 @@ def _generate_in_subprocess(random_topic): "auto.offset.reset": "earliest", } ) - ddtrace.Pin.override(producer, tracer=ddtrace.tracer) - ddtrace.Pin.override(consumer, tracer=ddtrace.tracer) + ddtrace.trace.Pin.override(producer, tracer=ddtrace.tracer) + ddtrace.trace.Pin.override(consumer, tracer=ddtrace.tracer) # We run all of these commands with retry attempts because the kafka-confluent API # sys.exits on connection failures, which causes the test to fail. We want to retry @@ -799,7 +799,7 @@ def test_tracing_context_is_propagated_when_enabled(ddtrace_run_python_code_in_s import random import sys -from ddtrace import Pin +from ddtrace.trace import Pin from ddtrace.contrib.internal.kafka.patch import patch from tests.contrib.kafka.test_kafka import consumer @@ -1039,7 +1039,7 @@ def test_does_not_trace_empty_poll_when_disabled(ddtrace_run_python_code_in_subp import random import sys -from ddtrace import Pin +from ddtrace.trace import Pin from ddtrace.contrib.internal.kafka.patch import patch from ddtrace import config diff --git a/tests/contrib/kombu/test.py b/tests/contrib/kombu/test.py index ef27d3a09ac..b56ecdf0d0f 100644 --- a/tests/contrib/kombu/test.py +++ b/tests/contrib/kombu/test.py @@ -2,13 +2,13 @@ import kombu import mock -from ddtrace import Pin from ddtrace.contrib.internal.kombu import utils from ddtrace.contrib.internal.kombu.patch import patch from ddtrace.contrib.internal.kombu.patch import unpatch from ddtrace.ext import kombu as kombux from ddtrace.internal.datastreams.processor import PROPAGATION_KEY_BASE_64 from ddtrace.internal.schema import DEFAULT_SPAN_SERVICE_NAME +from ddtrace.trace import Pin from tests.utils import TracerTestCase from tests.utils import assert_is_measured diff --git a/tests/contrib/langchain/conftest.py b/tests/contrib/langchain/conftest.py index f54b040bba7..5184f1093b7 100644 --- a/tests/contrib/langchain/conftest.py +++ b/tests/contrib/langchain/conftest.py @@ -3,9 +3,9 @@ import mock import pytest -from ddtrace import Pin from ddtrace.contrib.internal.langchain.patch import patch from ddtrace.contrib.internal.langchain.patch import unpatch +from ddtrace.trace import Pin from tests.utils import DummyTracer from tests.utils import DummyWriter from tests.utils import override_config diff --git a/tests/contrib/mako/test_mako.py b/tests/contrib/mako/test_mako.py index 9acfaf066ae..7e690b04a43 100644 --- a/tests/contrib/mako/test_mako.py +++ b/tests/contrib/mako/test_mako.py @@ -4,13 +4,13 @@ from mako.runtime import Context from mako.template import Template -from ddtrace import Pin from ddtrace.contrib.internal.mako.constants import DEFAULT_TEMPLATE_NAME from ddtrace.contrib.internal.mako.patch import patch from ddtrace.contrib.internal.mako.patch import unpatch from ddtrace.internal.compat import StringIO from ddtrace.internal.compat import to_unicode from ddtrace.internal.schema import DEFAULT_SPAN_SERVICE_NAME +from ddtrace.trace import Pin from tests.utils import TracerTestCase from tests.utils import assert_is_measured diff --git a/tests/contrib/mariadb/test_mariadb.py b/tests/contrib/mariadb/test_mariadb.py index 05a3718d219..7ea8cd27feb 100644 --- a/tests/contrib/mariadb/test_mariadb.py +++ b/tests/contrib/mariadb/test_mariadb.py @@ -4,9 +4,9 @@ import mariadb import pytest -from ddtrace import Pin from ddtrace.contrib.internal.mariadb.patch import patch from ddtrace.contrib.internal.mariadb.patch import unpatch +from ddtrace.trace import Pin from tests.contrib.config import MARIADB_CONFIG from tests.utils import DummyTracer from tests.utils import assert_dict_issuperset diff --git a/tests/contrib/molten/test_molten.py b/tests/contrib/molten/test_molten.py index d0b1eb1648f..cc73ceef861 100644 --- a/tests/contrib/molten/test_molten.py +++ b/tests/contrib/molten/test_molten.py @@ -2,7 +2,6 @@ from molten.testing import TestClient import pytest -from ddtrace import Pin from ddtrace import config from ddtrace.constants import ERROR_MSG from ddtrace.contrib.internal.molten.patch import MOLTEN_VERSION @@ -12,6 +11,7 @@ from ddtrace.internal.schema import DEFAULT_SPAN_SERVICE_NAME from ddtrace.propagation.http import HTTP_HEADER_PARENT_ID from ddtrace.propagation.http import HTTP_HEADER_TRACE_ID +from ddtrace.trace import Pin from tests.utils import TracerTestCase from tests.utils import assert_is_measured from tests.utils import assert_span_http_status_code diff --git a/tests/contrib/molten/test_molten_di.py b/tests/contrib/molten/test_molten_di.py index f320dd01e64..d360698f4cb 100644 --- a/tests/contrib/molten/test_molten_di.py +++ b/tests/contrib/molten/test_molten_di.py @@ -3,9 +3,9 @@ import molten from molten import DependencyInjector -from ddtrace import Pin from ddtrace.contrib.internal.molten.patch import patch from ddtrace.contrib.internal.molten.patch import unpatch +from ddtrace.trace import Pin from tests.utils import TracerTestCase diff --git a/tests/contrib/mongoengine/test.py b/tests/contrib/mongoengine/test.py index 0f41ac61d8b..b3961e3808c 100644 --- a/tests/contrib/mongoengine/test.py +++ b/tests/contrib/mongoengine/test.py @@ -3,11 +3,11 @@ import mongoengine import pymongo -from ddtrace import Pin from ddtrace.contrib.internal.mongoengine.patch import patch from ddtrace.contrib.internal.mongoengine.patch import unpatch from ddtrace.ext import mongo as mongox from ddtrace.internal.schema import DEFAULT_SPAN_SERVICE_NAME +from ddtrace.trace import Pin from tests.opentracer.utils import init_tracer from tests.utils import DummyTracer from tests.utils import TracerTestCase diff --git a/tests/contrib/mysql/test_mysql.py b/tests/contrib/mysql/test_mysql.py index 0c80d4a2c57..08626890fac 100644 --- a/tests/contrib/mysql/test_mysql.py +++ b/tests/contrib/mysql/test_mysql.py @@ -1,9 +1,9 @@ import mock import mysql -from ddtrace import Pin from ddtrace.contrib.internal.mysql.patch import patch from ddtrace.contrib.internal.mysql.patch import unpatch +from ddtrace.trace import Pin from tests.contrib import shared_tests from tests.contrib.config import MYSQL_CONFIG from tests.opentracer.utils import init_tracer diff --git a/tests/contrib/mysqldb/test_mysqldb.py b/tests/contrib/mysqldb/test_mysqldb.py index e27163b649a..5d2c98a752c 100644 --- a/tests/contrib/mysqldb/test_mysqldb.py +++ b/tests/contrib/mysqldb/test_mysqldb.py @@ -2,10 +2,10 @@ import MySQLdb import pytest -from ddtrace import Pin from ddtrace.contrib.internal.mysqldb.patch import patch from ddtrace.contrib.internal.mysqldb.patch import unpatch from ddtrace.internal.schema import DEFAULT_SPAN_SERVICE_NAME +from ddtrace.trace import Pin from tests.contrib import shared_tests from tests.opentracer.utils import init_tracer from tests.utils import TracerTestCase diff --git a/tests/contrib/openai/conftest.py b/tests/contrib/openai/conftest.py index 0d36a5803bf..02311649bde 100644 --- a/tests/contrib/openai/conftest.py +++ b/tests/contrib/openai/conftest.py @@ -7,11 +7,11 @@ import mock import pytest -from ddtrace import Pin from ddtrace.contrib.internal.openai.patch import patch from ddtrace.contrib.internal.openai.patch import unpatch -from ddtrace.filters import TraceFilter from ddtrace.llmobs import LLMObs +from ddtrace.trace import Pin +from ddtrace.trace import TraceFilter from tests.utils import DummyTracer from tests.utils import DummyWriter from tests.utils import override_config diff --git a/tests/contrib/openai/test_openai_v0.py b/tests/contrib/openai/test_openai_v0.py index 04654f4a4cf..c9ab8aac716 100644 --- a/tests/contrib/openai/test_openai_v0.py +++ b/tests/contrib/openai/test_openai_v0.py @@ -1476,7 +1476,7 @@ def test_integration_sync(openai_api_key, ddtrace_run_python_code_in_subprocess) import ddtrace from tests.contrib.openai.conftest import FilterOrg from tests.contrib.openai.test_openai_v0 import get_openai_vcr -pin = ddtrace.Pin.get_from(openai) +pin = ddtrace.trace.Pin.get_from(openai) pin.tracer.configure(settings={"FILTERS": [FilterOrg()]}) with get_openai_vcr(subdirectory_name="v0").use_cassette("completion.yaml"): resp = openai.Completion.create(model="ada", prompt="Hello world", temperature=0.8, n=2, stop=".", max_tokens=10) @@ -1527,7 +1527,7 @@ def test_integration_async(openai_api_key, ddtrace_run_python_code_in_subprocess import ddtrace from tests.contrib.openai.conftest import FilterOrg from tests.contrib.openai.test_openai_v0 import get_openai_vcr -pin = ddtrace.Pin.get_from(openai) +pin = ddtrace.trace.Pin.get_from(openai) pin.tracer.configure(settings={"FILTERS": [FilterOrg()]}) async def task(): with get_openai_vcr(subdirectory_name="v0").use_cassette("completion_async.yaml"): @@ -1900,7 +1900,7 @@ def test_integration_service_name(openai_api_key, ddtrace_run_python_code_in_sub import ddtrace from tests.contrib.openai.conftest import FilterOrg from tests.contrib.openai.test_openai_v0 import get_openai_vcr -pin = ddtrace.Pin.get_from(openai) +pin = ddtrace.trace.Pin.get_from(openai) pin.tracer.configure(settings={"FILTERS": [FilterOrg()]}) with get_openai_vcr(subdirectory_name="v0").use_cassette("completion.yaml"): resp = openai.Completion.create(model="ada", prompt="Hello world", temperature=0.8, n=2, stop=".", max_tokens=10) diff --git a/tests/contrib/openai/test_openai_v1.py b/tests/contrib/openai/test_openai_v1.py index 91737d9e5eb..9c9738cbb69 100644 --- a/tests/contrib/openai/test_openai_v1.py +++ b/tests/contrib/openai/test_openai_v1.py @@ -1111,7 +1111,7 @@ def test_integration_sync(openai_api_key, ddtrace_run_python_code_in_subprocess) import ddtrace from tests.contrib.openai.conftest import FilterOrg from tests.contrib.openai.test_openai_v1 import get_openai_vcr -pin = ddtrace.Pin.get_from(openai) +pin = ddtrace.trace.Pin.get_from(openai) pin.tracer.configure(settings={"FILTERS": [FilterOrg()]}) with get_openai_vcr(subdirectory_name="v1").use_cassette("completion.yaml"): client = openai.OpenAI() @@ -1159,7 +1159,7 @@ def test_integration_async(openai_api_key, ddtrace_run_python_code_in_subprocess import ddtrace from tests.contrib.openai.conftest import FilterOrg from tests.contrib.openai.test_openai_v1 import get_openai_vcr -pin = ddtrace.Pin.get_from(openai) +pin = ddtrace.trace.Pin.get_from(openai) pin.tracer.configure(settings={"FILTERS": [FilterOrg()]}) async def task(): with get_openai_vcr(subdirectory_name="v1").use_cassette("completion.yaml"): @@ -1547,7 +1547,7 @@ def test_integration_service_name(openai_api_key, ddtrace_run_python_code_in_sub import ddtrace from tests.contrib.openai.conftest import FilterOrg from tests.contrib.openai.test_openai_v1 import get_openai_vcr -pin = ddtrace.Pin.get_from(openai) +pin = ddtrace.trace.Pin.get_from(openai) pin.tracer.configure(settings={"FILTERS": [FilterOrg()]}) with get_openai_vcr(subdirectory_name="v1").use_cassette("completion.yaml"): client = openai.OpenAI() diff --git a/tests/contrib/psycopg/test_psycopg.py b/tests/contrib/psycopg/test_psycopg.py index d7821ca8905..8e13ecc4128 100644 --- a/tests/contrib/psycopg/test_psycopg.py +++ b/tests/contrib/psycopg/test_psycopg.py @@ -8,11 +8,11 @@ from psycopg.sql import Identifier from psycopg.sql import Literal -from ddtrace import Pin from ddtrace.contrib.internal.psycopg.patch import patch from ddtrace.contrib.internal.psycopg.patch import unpatch from ddtrace.internal.schema import DEFAULT_SPAN_SERVICE_NAME from ddtrace.internal.utils.version import parse_version +from ddtrace.trace import Pin from tests.contrib.config import POSTGRES_CONFIG from tests.opentracer.utils import init_tracer from tests.utils import TracerTestCase diff --git a/tests/contrib/psycopg/test_psycopg_async.py b/tests/contrib/psycopg/test_psycopg_async.py index 7a8654f1626..7e4fbd59624 100644 --- a/tests/contrib/psycopg/test_psycopg_async.py +++ b/tests/contrib/psycopg/test_psycopg_async.py @@ -5,9 +5,9 @@ from psycopg.sql import SQL from psycopg.sql import Literal -from ddtrace import Pin from ddtrace.contrib.internal.psycopg.patch import patch from ddtrace.contrib.internal.psycopg.patch import unpatch +from ddtrace.trace import Pin from tests.contrib.asyncio.utils import AsyncioTestCase from tests.contrib.config import POSTGRES_CONFIG from tests.opentracer.utils import init_tracer diff --git a/tests/contrib/psycopg2/test_psycopg.py b/tests/contrib/psycopg2/test_psycopg.py index eeea555d396..902d24d3c0e 100644 --- a/tests/contrib/psycopg2/test_psycopg.py +++ b/tests/contrib/psycopg2/test_psycopg.py @@ -7,11 +7,11 @@ from psycopg2 import extensions from psycopg2 import extras -from ddtrace import Pin from ddtrace.contrib.internal.psycopg.patch import patch from ddtrace.contrib.internal.psycopg.patch import unpatch from ddtrace.internal.schema import DEFAULT_SPAN_SERVICE_NAME from ddtrace.internal.utils.version import parse_version +from ddtrace.trace import Pin from tests.contrib.config import POSTGRES_CONFIG from tests.opentracer.utils import init_tracer from tests.utils import TracerTestCase diff --git a/tests/contrib/pylibmc/test.py b/tests/contrib/pylibmc/test.py index 7a112677c27..9de012439dc 100644 --- a/tests/contrib/pylibmc/test.py +++ b/tests/contrib/pylibmc/test.py @@ -5,12 +5,13 @@ # 3p import pylibmc -# project -from ddtrace import Pin from ddtrace.contrib.internal.pylibmc.client import TracedClient from ddtrace.contrib.internal.pylibmc.patch import patch from ddtrace.contrib.internal.pylibmc.patch import unpatch from ddtrace.ext import memcached + +# project +from ddtrace.trace import Pin from tests.contrib.config import MEMCACHED_CONFIG as cfg from tests.opentracer.utils import init_tracer from tests.utils import TracerTestCase diff --git a/tests/contrib/pymemcache/test_client.py b/tests/contrib/pymemcache/test_client.py index 07eba3766ed..19a7a93d523 100644 --- a/tests/contrib/pymemcache/test_client.py +++ b/tests/contrib/pymemcache/test_client.py @@ -9,12 +9,13 @@ import pytest import wrapt -# project -from ddtrace import Pin from ddtrace.contrib.internal.pymemcache.client import WrappedClient from ddtrace.contrib.internal.pymemcache.patch import patch from ddtrace.contrib.internal.pymemcache.patch import unpatch from ddtrace.internal.schema import DEFAULT_SPAN_SERVICE_NAME + +# project +from ddtrace.trace import Pin from tests.utils import DummyTracer from tests.utils import TracerTestCase from tests.utils import override_config diff --git a/tests/contrib/pymemcache/test_client_defaults.py b/tests/contrib/pymemcache/test_client_defaults.py index b16414c3c9f..0b5e44aa3b0 100644 --- a/tests/contrib/pymemcache/test_client_defaults.py +++ b/tests/contrib/pymemcache/test_client_defaults.py @@ -2,10 +2,11 @@ import pymemcache import pytest -# project -from ddtrace import Pin from ddtrace.contrib.internal.pymemcache.patch import patch from ddtrace.contrib.internal.pymemcache.patch import unpatch + +# project +from ddtrace.trace import Pin from tests.utils import override_config from .test_client_mixin import TEST_HOST diff --git a/tests/contrib/pymemcache/test_client_mixin.py b/tests/contrib/pymemcache/test_client_mixin.py index 78162fcf93c..2d471765e1f 100644 --- a/tests/contrib/pymemcache/test_client_mixin.py +++ b/tests/contrib/pymemcache/test_client_mixin.py @@ -2,12 +2,13 @@ import pymemcache import pytest -# project -from ddtrace import Pin from ddtrace.contrib.internal.pymemcache.patch import patch from ddtrace.contrib.internal.pymemcache.patch import unpatch from ddtrace.ext import memcached as memcachedx from ddtrace.ext import net + +# project +from ddtrace.trace import Pin from tests.utils import DummyTracer from tests.utils import TracerTestCase from tests.utils import override_config diff --git a/tests/contrib/pymongo/test.py b/tests/contrib/pymongo/test.py index f439036fcc1..7c0c1e58140 100644 --- a/tests/contrib/pymongo/test.py +++ b/tests/contrib/pymongo/test.py @@ -3,13 +3,14 @@ import pymongo -# project -from ddtrace import Pin from ddtrace.contrib.internal.pymongo.client import normalize_filter from ddtrace.contrib.internal.pymongo.patch import _CHECKOUT_FN_NAME from ddtrace.contrib.internal.pymongo.patch import patch from ddtrace.contrib.internal.pymongo.patch import unpatch from ddtrace.ext import SpanTypes + +# project +from ddtrace.trace import Pin from tests.opentracer.utils import init_tracer from tests.utils import DummyTracer from tests.utils import TracerTestCase diff --git a/tests/contrib/pymysql/test_pymysql.py b/tests/contrib/pymysql/test_pymysql.py index b8a11e5afa7..e94e03c8395 100644 --- a/tests/contrib/pymysql/test_pymysql.py +++ b/tests/contrib/pymysql/test_pymysql.py @@ -1,10 +1,10 @@ import mock import pymysql -from ddtrace import Pin from ddtrace.contrib.internal.pymysql.patch import patch from ddtrace.contrib.internal.pymysql.patch import unpatch from ddtrace.internal.schema import DEFAULT_SPAN_SERVICE_NAME +from ddtrace.trace import Pin from tests.contrib import shared_tests from tests.opentracer.utils import init_tracer from tests.utils import TracerTestCase diff --git a/tests/contrib/pynamodb/test_pynamodb.py b/tests/contrib/pynamodb/test_pynamodb.py index 8f474419bc0..33b4e4c2c14 100644 --- a/tests/contrib/pynamodb/test_pynamodb.py +++ b/tests/contrib/pynamodb/test_pynamodb.py @@ -4,10 +4,10 @@ from pynamodb.connection.base import Connection import pytest -from ddtrace import Pin from ddtrace.contrib.internal.pynamodb.patch import patch from ddtrace.contrib.internal.pynamodb.patch import unpatch from ddtrace.internal.schema import DEFAULT_SPAN_SERVICE_NAME +from ddtrace.trace import Pin from tests.utils import TracerTestCase from tests.utils import assert_is_measured diff --git a/tests/contrib/pyodbc/test_pyodbc.py b/tests/contrib/pyodbc/test_pyodbc.py index 8703b0b0391..4c965aede7b 100644 --- a/tests/contrib/pyodbc/test_pyodbc.py +++ b/tests/contrib/pyodbc/test_pyodbc.py @@ -1,9 +1,9 @@ import pyodbc -from ddtrace import Pin from ddtrace.contrib.internal.pyodbc.patch import patch from ddtrace.contrib.internal.pyodbc.patch import unpatch from ddtrace.internal.schema import DEFAULT_SPAN_SERVICE_NAME +from ddtrace.trace import Pin from tests.utils import TracerTestCase from tests.utils import assert_is_measured diff --git a/tests/contrib/pyramid/pserve_app/app/__init__.py b/tests/contrib/pyramid/pserve_app/app/__init__.py index 13b4b58b6ab..69ba7b14d8b 100644 --- a/tests/contrib/pyramid/pserve_app/app/__init__.py +++ b/tests/contrib/pyramid/pserve_app/app/__init__.py @@ -2,7 +2,7 @@ from pyramid.response import Response from ddtrace import tracer -from ddtrace.filters import TraceFilter +from ddtrace.trace import TraceFilter class PingFilter(TraceFilter): diff --git a/tests/contrib/pytest/test_pytest.py b/tests/contrib/pytest/test_pytest.py index eeefa59f714..8dcf9c50bd4 100644 --- a/tests/contrib/pytest/test_pytest.py +++ b/tests/contrib/pytest/test_pytest.py @@ -724,7 +724,7 @@ def test_dd_origin_tag_propagated_to_every_span(self): """ import pytest import ddtrace - from ddtrace import Pin + from ddtrace.trace import Pin def test_service(ddtracer): with ddtracer.trace("SPAN2") as span2: diff --git a/tests/contrib/redis/test_redis.py b/tests/contrib/redis/test_redis.py index 4bd4f896686..233e78cba4e 100644 --- a/tests/contrib/redis/test_redis.py +++ b/tests/contrib/redis/test_redis.py @@ -5,10 +5,10 @@ import redis import ddtrace -from ddtrace import Pin from ddtrace.contrib.internal.redis.patch import patch from ddtrace.contrib.internal.redis.patch import unpatch from ddtrace.internal.schema import DEFAULT_SPAN_SERVICE_NAME +from ddtrace.trace import Pin from tests.opentracer.utils import init_tracer from tests.utils import DummyTracer from tests.utils import TracerTestCase diff --git a/tests/contrib/redis/test_redis_asyncio.py b/tests/contrib/redis/test_redis_asyncio.py index 116b0dd2784..77a809392cd 100644 --- a/tests/contrib/redis/test_redis_asyncio.py +++ b/tests/contrib/redis/test_redis_asyncio.py @@ -7,10 +7,10 @@ import redis.asyncio from wrapt import ObjectProxy -from ddtrace import Pin from ddtrace import tracer from ddtrace.contrib.internal.redis.patch import patch from ddtrace.contrib.internal.redis.patch import unpatch +from ddtrace.trace import Pin from tests.utils import override_config from ..config import REDIS_CONFIG diff --git a/tests/contrib/redis/test_redis_cluster.py b/tests/contrib/redis/test_redis_cluster.py index dcf58c2d00a..2731a18fcee 100644 --- a/tests/contrib/redis/test_redis_cluster.py +++ b/tests/contrib/redis/test_redis_cluster.py @@ -2,10 +2,10 @@ import pytest import redis -from ddtrace import Pin from ddtrace.contrib.internal.redis.patch import patch from ddtrace.contrib.internal.redis.patch import unpatch from ddtrace.internal.schema import DEFAULT_SPAN_SERVICE_NAME +from ddtrace.trace import Pin from tests.contrib.config import REDISCLUSTER_CONFIG from tests.utils import DummyTracer from tests.utils import TracerTestCase diff --git a/tests/contrib/redis/test_redis_cluster_asyncio.py b/tests/contrib/redis/test_redis_cluster_asyncio.py index 03815073723..b8624c533aa 100644 --- a/tests/contrib/redis/test_redis_cluster_asyncio.py +++ b/tests/contrib/redis/test_redis_cluster_asyncio.py @@ -2,9 +2,9 @@ import pytest import redis -from ddtrace import Pin from ddtrace.contrib.internal.redis.patch import patch from ddtrace.contrib.internal.redis.patch import unpatch +from ddtrace.trace import Pin from tests.contrib.config import REDISCLUSTER_CONFIG from tests.utils import DummyTracer from tests.utils import assert_is_measured @@ -164,9 +164,9 @@ def test_default_service_name_v1(): import redis - from ddtrace import Pin from ddtrace.contrib.internal.redis.patch import patch from ddtrace.internal.schema import DEFAULT_SPAN_SERVICE_NAME + from ddtrace.trace import Pin from tests.contrib.config import REDISCLUSTER_CONFIG from tests.utils import DummyTracer from tests.utils import TracerSpanContainer @@ -210,9 +210,9 @@ def test_user_specified_service_v0(): import redis - from ddtrace import Pin from ddtrace import config from ddtrace.contrib.internal.redis.patch import patch + from ddtrace.trace import Pin from tests.contrib.config import REDISCLUSTER_CONFIG from tests.utils import DummyTracer from tests.utils import TracerSpanContainer @@ -259,9 +259,9 @@ def test_user_specified_service_v1(): import redis - from ddtrace import Pin from ddtrace import config from ddtrace.contrib.internal.redis.patch import patch + from ddtrace.trace import Pin from tests.contrib.config import REDISCLUSTER_CONFIG from tests.utils import DummyTracer from tests.utils import TracerSpanContainer @@ -304,8 +304,8 @@ def test_env_user_specified_rediscluster_service_v0(): import redis - from ddtrace import Pin from ddtrace.contrib.internal.redis.patch import patch + from ddtrace.trace import Pin from tests.contrib.config import REDISCLUSTER_CONFIG from tests.utils import DummyTracer from tests.utils import TracerSpanContainer @@ -345,8 +345,8 @@ def test_env_user_specified_rediscluster_service_v1(): import redis - from ddtrace import Pin from ddtrace.contrib.internal.redis.patch import patch + from ddtrace.trace import Pin from tests.contrib.config import REDISCLUSTER_CONFIG from tests.utils import DummyTracer from tests.utils import TracerSpanContainer @@ -390,9 +390,9 @@ def test_service_precedence_v0(): import redis - from ddtrace import Pin from ddtrace import config from ddtrace.contrib.internal.redis.patch import patch + from ddtrace.trace import Pin from tests.contrib.config import REDISCLUSTER_CONFIG from tests.utils import DummyTracer from tests.utils import TracerSpanContainer @@ -435,9 +435,9 @@ def test_service_precedence_v1(): import redis - from ddtrace import Pin from ddtrace import config from ddtrace.contrib.internal.redis.patch import patch + from ddtrace.trace import Pin from tests.contrib.config import REDISCLUSTER_CONFIG from tests.utils import DummyTracer from tests.utils import TracerSpanContainer diff --git a/tests/contrib/rediscluster/test.py b/tests/contrib/rediscluster/test.py index 1ba8881e161..a2c5ac5c6b2 100644 --- a/tests/contrib/rediscluster/test.py +++ b/tests/contrib/rediscluster/test.py @@ -2,11 +2,11 @@ import pytest import rediscluster -from ddtrace import Pin from ddtrace.contrib.internal.rediscluster.patch import REDISCLUSTER_VERSION from ddtrace.contrib.internal.rediscluster.patch import patch from ddtrace.contrib.internal.rediscluster.patch import unpatch from ddtrace.internal.schema import DEFAULT_SPAN_SERVICE_NAME +from ddtrace.trace import Pin from tests.contrib.config import REDISCLUSTER_CONFIG from tests.utils import DummyTracer from tests.utils import TracerTestCase diff --git a/tests/contrib/rq/test_rq.py b/tests/contrib/rq/test_rq.py index d2efb996227..d72871823da 100644 --- a/tests/contrib/rq/test_rq.py +++ b/tests/contrib/rq/test_rq.py @@ -6,10 +6,10 @@ import redis import rq -from ddtrace import Pin from ddtrace.contrib.internal.rq.patch import get_version from ddtrace.contrib.internal.rq.patch import patch from ddtrace.contrib.internal.rq.patch import unpatch +from ddtrace.trace import Pin from tests.contrib.patch import emit_integration_and_version_to_test_agent from tests.utils import override_config from tests.utils import snapshot diff --git a/tests/contrib/shared_tests.py b/tests/contrib/shared_tests.py index a7659374693..cf647a15628 100644 --- a/tests/contrib/shared_tests.py +++ b/tests/contrib/shared_tests.py @@ -1,4 +1,4 @@ -from ddtrace import Pin +from ddtrace.trace import Pin # DBM Shared Tests diff --git a/tests/contrib/shared_tests_async.py b/tests/contrib/shared_tests_async.py index 97d1df32cfa..0d49f09d608 100644 --- a/tests/contrib/shared_tests_async.py +++ b/tests/contrib/shared_tests_async.py @@ -1,4 +1,4 @@ -from ddtrace import Pin +from ddtrace.trace import Pin # DBM Shared Tests diff --git a/tests/contrib/snowflake/test_snowflake.py b/tests/contrib/snowflake/test_snowflake.py index cdb735fb947..9762804651d 100644 --- a/tests/contrib/snowflake/test_snowflake.py +++ b/tests/contrib/snowflake/test_snowflake.py @@ -6,10 +6,10 @@ import responses import snowflake.connector -from ddtrace import Pin from ddtrace import tracer from ddtrace.contrib.internal.snowflake.patch import patch from ddtrace.contrib.internal.snowflake.patch import unpatch +from ddtrace.trace import Pin from tests.opentracer.utils import init_tracer from tests.utils import override_config from tests.utils import snapshot diff --git a/tests/contrib/sqlalchemy/test_patch.py b/tests/contrib/sqlalchemy/test_patch.py index f33c4bdd8a5..a6f08bb5f46 100644 --- a/tests/contrib/sqlalchemy/test_patch.py +++ b/tests/contrib/sqlalchemy/test_patch.py @@ -1,10 +1,10 @@ import sqlalchemy from sqlalchemy import text -from ddtrace import Pin from ddtrace.contrib.internal.sqlalchemy.patch import get_version from ddtrace.contrib.internal.sqlalchemy.patch import patch from ddtrace.contrib.internal.sqlalchemy.patch import unpatch +from ddtrace.trace import Pin from tests.contrib.patch import emit_integration_and_version_to_test_agent from tests.utils import TracerTestCase from tests.utils import assert_is_measured diff --git a/tests/contrib/sqlite3/test_sqlite3.py b/tests/contrib/sqlite3/test_sqlite3.py index f10452cb8da..6101dcfa081 100644 --- a/tests/contrib/sqlite3/test_sqlite3.py +++ b/tests/contrib/sqlite3/test_sqlite3.py @@ -13,7 +13,6 @@ import pytest import ddtrace -from ddtrace import Pin from ddtrace.constants import ERROR_MSG from ddtrace.constants import ERROR_STACK from ddtrace.constants import ERROR_TYPE @@ -21,6 +20,7 @@ from ddtrace.contrib.internal.sqlite3.patch import patch from ddtrace.contrib.internal.sqlite3.patch import unpatch from ddtrace.internal.schema import DEFAULT_SPAN_SERVICE_NAME +from ddtrace.trace import Pin from tests.opentracer.utils import init_tracer from tests.utils import TracerTestCase from tests.utils import assert_is_measured diff --git a/tests/contrib/starlette/test_starlette.py b/tests/contrib/starlette/test_starlette.py index 7d56c71c3f8..f290ade8ea7 100644 --- a/tests/contrib/starlette/test_starlette.py +++ b/tests/contrib/starlette/test_starlette.py @@ -8,13 +8,13 @@ from starlette.testclient import TestClient import ddtrace -from ddtrace import Pin from ddtrace.constants import ERROR_MSG from ddtrace.contrib.internal.sqlalchemy.patch import patch as sql_patch from ddtrace.contrib.internal.sqlalchemy.patch import unpatch as sql_unpatch from ddtrace.contrib.internal.starlette.patch import patch as starlette_patch from ddtrace.contrib.internal.starlette.patch import unpatch as starlette_unpatch from ddtrace.propagation import http as http_propagation +from ddtrace.trace import Pin from tests.contrib.starlette.app import get_app from tests.utils import DummyTracer from tests.utils import TracerSpanContainer diff --git a/tests/contrib/subprocess/test_subprocess.py b/tests/contrib/subprocess/test_subprocess.py index a6ff5a3a9bd..40e7ab67431 100644 --- a/tests/contrib/subprocess/test_subprocess.py +++ b/tests/contrib/subprocess/test_subprocess.py @@ -4,13 +4,13 @@ import pytest -from ddtrace import Pin from ddtrace.contrib.internal.subprocess.constants import COMMANDS from ddtrace.contrib.internal.subprocess.patch import SubprocessCmdLine from ddtrace.contrib.internal.subprocess.patch import patch from ddtrace.contrib.internal.subprocess.patch import unpatch from ddtrace.ext import SpanTypes from ddtrace.internal import core +from ddtrace.trace import Pin from tests.utils import override_config from tests.utils import override_global_config diff --git a/tests/contrib/tornado/test_config.py b/tests/contrib/tornado/test_config.py index d91a2e95912..718bf48d1dc 100644 --- a/tests/contrib/tornado/test_config.py +++ b/tests/contrib/tornado/test_config.py @@ -1,5 +1,5 @@ from ddtrace._trace.tracer import Tracer -from ddtrace.filters import TraceFilter +from ddtrace.trace import TraceFilter from tests.utils import DummyWriter from .utils import TornadoTestCase diff --git a/tests/contrib/urllib3/test_urllib3.py b/tests/contrib/urllib3/test_urllib3.py index 256104a80f5..53c9b12bf6b 100644 --- a/tests/contrib/urllib3/test_urllib3.py +++ b/tests/contrib/urllib3/test_urllib3.py @@ -11,8 +11,8 @@ from ddtrace.contrib.internal.urllib3.patch import unpatch from ddtrace.ext import http from ddtrace.internal.schema import DEFAULT_SPAN_SERVICE_NAME -from ddtrace.pin import Pin from ddtrace.settings.asm import config as asm_config +from ddtrace.trace import Pin from tests.contrib.config import HTTPBIN_CONFIG from tests.opentracer.utils import init_tracer from tests.utils import TracerTestCase diff --git a/tests/contrib/vertexai/conftest.py b/tests/contrib/vertexai/conftest.py index d048683a5c7..d5e689137ff 100644 --- a/tests/contrib/vertexai/conftest.py +++ b/tests/contrib/vertexai/conftest.py @@ -5,7 +5,7 @@ from ddtrace.contrib.internal.vertexai.patch import patch from ddtrace.contrib.internal.vertexai.patch import unpatch from ddtrace.llmobs import LLMObs -from ddtrace.pin import Pin +from ddtrace.trace import Pin from tests.contrib.vertexai.utils import MockAsyncPredictionServiceClient from tests.contrib.vertexai.utils import MockPredictionServiceClient from tests.utils import DummyTracer diff --git a/tests/contrib/vertica/test_vertica.py b/tests/contrib/vertica/test_vertica.py index f0c47887f9e..196e1621ee5 100644 --- a/tests/contrib/vertica/test_vertica.py +++ b/tests/contrib/vertica/test_vertica.py @@ -2,7 +2,6 @@ import wrapt import ddtrace -from ddtrace import Pin from ddtrace import config from ddtrace.constants import ERROR_MSG from ddtrace.constants import ERROR_STACK @@ -11,6 +10,7 @@ from ddtrace.contrib.internal.vertica.patch import unpatch from ddtrace.internal.schema import DEFAULT_SPAN_SERVICE_NAME from ddtrace.settings.config import _deepmerge +from ddtrace.trace import Pin from tests.contrib.config import VERTICA_CONFIG from tests.opentracer.utils import init_tracer from tests.utils import DummyTracer diff --git a/tests/contrib/yaaredis/test_yaaredis.py b/tests/contrib/yaaredis/test_yaaredis.py index 17903e4ec3d..350b323de9c 100644 --- a/tests/contrib/yaaredis/test_yaaredis.py +++ b/tests/contrib/yaaredis/test_yaaredis.py @@ -6,9 +6,9 @@ from wrapt import ObjectProxy import yaaredis -from ddtrace import Pin from ddtrace.contrib.internal.yaaredis.patch import patch from ddtrace.contrib.internal.yaaredis.patch import unpatch +from ddtrace.trace import Pin from tests.opentracer.utils import init_tracer from tests.utils import override_config diff --git a/tests/telemetry/test_telemetry.py b/tests/telemetry/test_telemetry.py index 558e9961afc..f128e695c67 100644 --- a/tests/telemetry/test_telemetry.py +++ b/tests/telemetry/test_telemetry.py @@ -148,7 +148,7 @@ def test_app_started_error_handled_exception(test_agent_session, run_python_code logging.basicConfig() from ddtrace import tracer -from ddtrace.filters import TraceFilter +from ddtrace.trace import TraceFilter class FailingFilture(TraceFilter): def process_trace(self, trace): diff --git a/tests/tracer/runtime/test_tag_collectors.py b/tests/tracer/runtime/test_tag_collectors.py index 3889b7b7e15..c15590d2daf 100644 --- a/tests/tracer/runtime/test_tag_collectors.py +++ b/tests/tracer/runtime/test_tag_collectors.py @@ -77,8 +77,8 @@ def test_tracer_tags_config(): def test_tracer_tags_service_from_code(): """Ensure we collect the expected tags for the TracerTagCollector""" import ddtrace - from ddtrace.filters import TraceFilter from ddtrace.internal.runtime import tag_collectors + from ddtrace.trace import TraceFilter from tests.conftest import DEFAULT_DDTRACE_SUBPROCESS_TEST_SERVICE_NAME class DropFilter(TraceFilter): diff --git a/tests/tracer/test_filters.py b/tests/tracer/test_filters.py index 73861f8d3a2..871405517b7 100644 --- a/tests/tracer/test_filters.py +++ b/tests/tracer/test_filters.py @@ -4,8 +4,8 @@ from ddtrace._trace.span import Span from ddtrace.ext.http import URL -from ddtrace.filters import FilterRequestsOnUrl -from ddtrace.filters import TraceFilter +from ddtrace.trace import FilterRequestsOnUrl +from ddtrace.trace import TraceFilter class FilterRequestOnUrlTests(TestCase): diff --git a/tests/tracer/test_instance_config.py b/tests/tracer/test_instance_config.py index fb5235a8d77..457bf53a408 100644 --- a/tests/tracer/test_instance_config.py +++ b/tests/tracer/test_instance_config.py @@ -1,8 +1,8 @@ from unittest import TestCase from ddtrace import config -from ddtrace.pin import Pin from ddtrace.settings import IntegrationConfig +from ddtrace.trace import Pin class InstanceConfigTestCase(TestCase): diff --git a/tests/tracer/test_pin.py b/tests/tracer/test_pin.py index a1b83ca4c37..47712d2f421 100644 --- a/tests/tracer/test_pin.py +++ b/tests/tracer/test_pin.py @@ -2,7 +2,7 @@ import pytest -from ddtrace import Pin +from ddtrace.trace import Pin class PinTestCase(TestCase): diff --git a/tests/tracer/test_trace_utils.py b/tests/tracer/test_trace_utils.py index e9869c13b17..a7604636b62 100644 --- a/tests/tracer/test_trace_utils.py +++ b/tests/tracer/test_trace_utils.py @@ -13,7 +13,6 @@ import mock import pytest -from ddtrace import Pin from ddtrace import Tracer from ddtrace import config from ddtrace._trace.context import Context @@ -29,6 +28,7 @@ from ddtrace.propagation.http import HTTP_HEADER_TRACE_ID from ddtrace.settings import Config from ddtrace.settings import IntegrationConfig +from ddtrace.trace import Pin from tests.appsec.utils import asm_context from tests.utils import override_global_config diff --git a/tests/webclient.py b/tests/webclient.py index 7254a0896dd..33e5751baf6 100644 --- a/tests/webclient.py +++ b/tests/webclient.py @@ -3,9 +3,9 @@ import requests from ddtrace._trace.context import Context -from ddtrace.filters import TraceFilter from ddtrace.internal.utils.retry import retry from ddtrace.propagation.http import HTTPPropagator +from ddtrace.trace import TraceFilter class Client(object): From bbc1bd814b195a15916adad5f27a278f9dc6bcef Mon Sep 17 00:00:00 2001 From: lievan <42917263+lievan@users.noreply.github.com> Date: Wed, 15 Jan 2025 14:32:34 -0500 Subject: [PATCH 07/16] chore(llmobs): implement ragas context precision (#11716) Implements context precision metric for ragas integration. ## About context precision [Context Precision](https://docs.ragas.io/en/latest/concepts/metrics/available_metrics/context_precision/) is a metric that verifies if the context was useful in arriving at the given answer. We compute this by dividing the number of relevant contexts by the total number of contexts. Note that this is slightly modified from the original context precision metric in ragas, which computes the mean of the precision @ rank k for each chunk in the context (where k is the number of retrieved context chunks). **stage 1 - input extraction** We must extract a question, contexts, and answer from an LLM span. _Question_ - same as faithfulness (last input message or `question` prompt variable) _Contexts_ - uses a **list** of context texts that ground the generation. Defaults to just the context prompt variable being the single item in the list or user defined prompt context variables _Answer_ - first output message for the llm span **stage 2 - prompt creation / llm calls** For each context, create a prompt that asks an LLM if the context was used in the generated answer & execute llm calls for these prompts. **stage 3 - score computation** Divide the number of "used" contexts by total number of contexts **why do we modify?** The ragas context precision metric is a mean of context precision @ k, where k is number of chunks returned. This uses number of relevant context docs as the denominator. We only have access to the context information on an LLM call and not at the retrieval step. For most users, there should only be one "context" being used to ground the LLM call. So if we use the original ragas formula, the denominator may frequently be zero if the LLM decided the context was not used. Thus, we revise the metric to answer a simple question: - was my context used in the llm's generation (in the case of a single context) - what proportion of my contexts were used in the llm's generation (in the case of multiple contexts) ### Example trace image ## Checklist - [x] PR author has checked that all the criteria below are met - The PR description includes an overview of the change - The PR description articulates the motivation for the change - The change includes tests OR the PR description describes a testing strategy - The PR description notes risks associated with the change, if any - Newly-added code is easy to change - The change follows the [library release note guidelines](https://ddtrace.readthedocs.io/en/stable/releasenotes.html) - The change includes or references documentation updates if necessary - Backport labels are set (if [applicable](https://ddtrace.readthedocs.io/en/latest/contributing.html#backporting)) ## Reviewer Checklist - [ ] Reviewer has checked that all the criteria below are met - Title is accurate - All changes are related to the pull request's stated goal - Avoids breaking [API](https://ddtrace.readthedocs.io/en/stable/versioning.html#interfaces) changes - Testing strategy adequately addresses listed risks - Newly-added code is easy to change - Release note makes sense to a user of the library - If necessary, author has acknowledged and discussed the performance implications of this PR as reported in the benchmarks PR comment - Backport labels are set in a manner that is consistent with the [release branch maintenance policy](https://ddtrace.readthedocs.io/en/latest/contributing.html#backporting) --------- Co-authored-by: lievan Co-authored-by: Quinna Halim --- ddtrace/llmobs/_evaluators/ragas/base.py | 4 + .../_evaluators/ragas/context_precision.py | 153 +++++++++ ddtrace/llmobs/_evaluators/ragas/models.py | 7 + ddtrace/llmobs/_evaluators/runner.py | 2 + tests/llmobs/_utils.py | 2 + ...as_context_precision_multiple_context.yaml | 312 ++++++++++++++++++ ...agas_context_precision_single_context.yaml | 160 +++++++++ tests/llmobs/test_llmobs_ragas_evaluators.py | 193 +++++++++++ 8 files changed, 833 insertions(+) create mode 100644 ddtrace/llmobs/_evaluators/ragas/context_precision.py create mode 100644 tests/llmobs/llmobs_cassettes/tests.llmobs.test_llmobs_ragas_evaluators.test_ragas_context_precision_multiple_context.yaml create mode 100644 tests/llmobs/llmobs_cassettes/tests.llmobs.test_llmobs_ragas_evaluators.test_ragas_context_precision_single_context.yaml diff --git a/ddtrace/llmobs/_evaluators/ragas/base.py b/ddtrace/llmobs/_evaluators/ragas/base.py index 10e89165a01..1c4840e9485 100644 --- a/ddtrace/llmobs/_evaluators/ragas/base.py +++ b/ddtrace/llmobs/_evaluators/ragas/base.py @@ -56,6 +56,10 @@ def __init__(self): self.get_segmenter = get_segmenter + from ddtrace.llmobs._evaluators.ragas.models import ContextPrecisionVerification + + self.ContextPrecisionVerification = ContextPrecisionVerification + from ddtrace.llmobs._evaluators.ragas.models import StatementFaithfulnessAnswers self.StatementFaithfulnessAnswers = StatementFaithfulnessAnswers diff --git a/ddtrace/llmobs/_evaluators/ragas/context_precision.py b/ddtrace/llmobs/_evaluators/ragas/context_precision.py new file mode 100644 index 00000000000..990302931c8 --- /dev/null +++ b/ddtrace/llmobs/_evaluators/ragas/context_precision.py @@ -0,0 +1,153 @@ +import math +from typing import Optional +from typing import Tuple +from typing import Union + +from ddtrace.internal.logger import get_logger +from ddtrace.llmobs._constants import EVALUATION_KIND_METADATA +from ddtrace.llmobs._constants import EVALUATION_SPAN_METADATA +from ddtrace.llmobs._evaluators.ragas.base import BaseRagasEvaluator +from ddtrace.llmobs._evaluators.ragas.base import _get_ml_app_for_ragas_trace + + +logger = get_logger(__name__) + + +class RagasContextPrecisionEvaluator(BaseRagasEvaluator): + """ + A class used by EvaluatorRunner to conduct ragas context precision evaluations + on LLM Observability span events. + """ + + LABEL = "ragas_context_precision" + METRIC_TYPE = "score" + + def __init__(self, llmobs_service): + """ + Initialize an evaluator that uses the ragas library to generate a context precision score on finished LLM spans. + + Context Precision is a metric that verifies if the context was useful in arriving at the given answer. + We compute this by dividing the number of relevant contexts by the total number of contexts. + Note that this is slightly modified from the original context precision metric in ragas, which computes + the mean of the precision @ rank k for each chunk in the context (where k is the number of + retrieved context chunks). + + For more information, see https://docs.ragas.io/en/latest/concepts/metrics/available_metrics/context_precision/ + + The `ragas.metrics.context_precision` instance is used for context precision scores. + If there is no llm attribute set on this instance, it will be set to the + default `llm_factory()` which uses openai. + + :param llmobs_service: An instance of the LLM Observability service used for tracing the evaluation and + submitting evaluation metrics. + + Raises: NotImplementedError if the ragas library is not found or if ragas version is not supported. + """ + super().__init__(llmobs_service) + self.ragas_context_precision_instance = self._get_context_precision_instance() + self.context_precision_output_parser = self.ragas_dependencies.RagasoutputParser( + pydantic_object=self.ragas_dependencies.ContextPrecisionVerification + ) + + def _get_context_precision_instance(self): + """ + This helper function ensures the context precision instance used in + ragas evaluator is updated with the latest ragas context precision instance + instance AND has an non-null llm + """ + if self.ragas_dependencies.context_precision is None: + return None + ragas_context_precision_instance = self.ragas_dependencies.context_precision + if not ragas_context_precision_instance.llm: + ragas_context_precision_instance.llm = self.ragas_dependencies.llm_factory() + return ragas_context_precision_instance + + def evaluate(self, span_event: dict) -> Tuple[Union[float, str], Optional[dict]]: + """ + Performs a context precision evaluation on an llm span event, returning either + - context precision score (float) OR failure reason (str) + - evaluation metadata (dict) + If the ragas context precision instance does not have `llm` set, we set `llm` using the `llm_factory()` + method from ragas which currently defaults to openai's gpt-4o-turbo. + """ + self.ragas_context_precision_instance = self._get_context_precision_instance() + if not self.ragas_context_precision_instance: + return "fail_context_precision_is_none", {} + + evaluation_metadata = {EVALUATION_KIND_METADATA: "context_precision"} # type: dict[str, Union[str, dict, list]] + + # initialize data we annotate for tracing ragas + score = math.nan + + with self.llmobs_service.workflow( + "dd-ragas.context_precision", ml_app=_get_ml_app_for_ragas_trace(span_event) + ) as ragas_cp_workflow: + try: + evaluation_metadata[EVALUATION_SPAN_METADATA] = self.llmobs_service.export_span(span=ragas_cp_workflow) + + ctx_precision_inputs = self._extract_evaluation_inputs_from_span(span_event) + if ctx_precision_inputs is None: + logger.debug( + "Failed to extract evaluation inputs from " + "span sampled for `ragas_context_precision` evaluation" + ) + return "fail_extract_context_precision_inputs", evaluation_metadata + + # create a prompt to evaluate the relevancy of each context chunk + context_precision_prompts = [ + self.ragas_context_precision_instance.context_precision_prompt.format( + question=ctx_precision_inputs["question"], + context=c, + answer=ctx_precision_inputs["answer"], + ) + for c in ctx_precision_inputs["contexts"] + ] + + responses = [] + + for prompt in context_precision_prompts: + result = self.ragas_context_precision_instance.llm.generate_text(prompt) + reproducibility = getattr(self.ragas_context_precision_instance, "_reproducibility", 1) + + results = [result.generations[0][i].text for i in range(reproducibility)] + try: + responses.append( + [ + res.dict() + for res in [self.context_precision_output_parser.parse(text) for text in results] + if res is not None + ] + ) + except Exception as e: + logger.debug( + "Failed to parse context precision verification for `ragas_context_precision`", + exc_info=e, + ) + return "fail_context_precision_parsing", evaluation_metadata + + answers = [] + for response in responses: + agg_answer = self.ragas_dependencies.ensembler.from_discrete([response], "verdict") + if agg_answer: + try: + agg_answer = self.ragas_dependencies.ContextPrecisionVerification.parse_obj(agg_answer[0]) + except Exception as e: + logger.debug( + "Failed to parse context precision verification for `ragas_context_precision`", + exc_info=e, + ) + return "fail_context_precision_parsing", evaluation_metadata + answers.append(agg_answer) + + if len(answers) == 0: + return "fail_no_answers", evaluation_metadata + + verdict_list = [1 if ver.verdict else 0 for ver in answers] + score = sum(verdict_list) / len(verdict_list) + return score, evaluation_metadata + finally: + self.llmobs_service.annotate( + span=ragas_cp_workflow, + input_data=span_event, + output_data=score, + ) diff --git a/ddtrace/llmobs/_evaluators/ragas/models.py b/ddtrace/llmobs/_evaluators/ragas/models.py index 5ee4d433c33..9886c7cf1d3 100644 --- a/ddtrace/llmobs/_evaluators/ragas/models.py +++ b/ddtrace/llmobs/_evaluators/ragas/models.py @@ -11,6 +11,13 @@ """ +class ContextPrecisionVerification(BaseModel): + """Answer for the verification task whether the context was useful.""" + + reason: str = Field(..., description="Reason for verification") + verdict: int = Field(..., description="Binary (0/1) verdict of verification") + + class StatementFaithfulnessAnswer(BaseModel): statement: str = Field(..., description="the original statement, word-by-word") reason: str = Field(..., description="the reason of the verdict") diff --git a/ddtrace/llmobs/_evaluators/runner.py b/ddtrace/llmobs/_evaluators/runner.py index ffbe4a58d64..099887bea8c 100644 --- a/ddtrace/llmobs/_evaluators/runner.py +++ b/ddtrace/llmobs/_evaluators/runner.py @@ -8,6 +8,7 @@ from ddtrace.internal.periodic import PeriodicService from ddtrace.internal.telemetry import telemetry_writer from ddtrace.internal.telemetry.constants import TELEMETRY_NAMESPACE +from ddtrace.llmobs._evaluators.ragas.context_precision import RagasContextPrecisionEvaluator from ddtrace.llmobs._evaluators.ragas.faithfulness import RagasFaithfulnessEvaluator from ddtrace.llmobs._evaluators.sampler import EvaluatorRunnerSampler @@ -17,6 +18,7 @@ SUPPORTED_EVALUATORS = { RagasFaithfulnessEvaluator.LABEL: RagasFaithfulnessEvaluator, + RagasContextPrecisionEvaluator.LABEL: RagasContextPrecisionEvaluator, } diff --git a/tests/llmobs/_utils.py b/tests/llmobs/_utils.py index 4e60a8f3996..f35097f7abc 100644 --- a/tests/llmobs/_utils.py +++ b/tests/llmobs/_utils.py @@ -573,6 +573,7 @@ def _expected_ragas_context_precision_spans(ragas_inputs=None): "span.kind": "workflow", "input": {"value": mock.ANY}, "output": {"value": "1.0"}, + "metadata": {}, }, "metrics": {}, "tags": expected_ragas_trace_tags(), @@ -589,6 +590,7 @@ def _expected_ragas_context_precision_spans(ragas_inputs=None): "span.kind": "workflow", "input": {"value": mock.ANY}, "output": {"value": mock.ANY}, + "metadata": {}, }, "metrics": {}, "tags": expected_ragas_trace_tags(), diff --git a/tests/llmobs/llmobs_cassettes/tests.llmobs.test_llmobs_ragas_evaluators.test_ragas_context_precision_multiple_context.yaml b/tests/llmobs/llmobs_cassettes/tests.llmobs.test_llmobs_ragas_evaluators.test_ragas_context_precision_multiple_context.yaml new file mode 100644 index 00000000000..a78852c8d64 --- /dev/null +++ b/tests/llmobs/llmobs_cassettes/tests.llmobs.test_llmobs_ragas_evaluators.test_ragas_context_precision_multiple_context.yaml @@ -0,0 +1,312 @@ +interactions: +- request: + body: '{"messages": [{"content": "Given question, answer and context verify if + the context was useful in arriving at the given answer. Give verdict as \"1\" + if useful and \"0\" if not with json output.\n\nThe output should be a well-formatted + JSON instance that conforms to the JSON schema below.\n\nAs an example, for + the schema {\"properties\": {\"foo\": {\"title\": \"Foo\", \"description\": + \"a list of strings\", \"type\": \"array\", \"items\": {\"type\": \"string\"}}}, + \"required\": [\"foo\"]}\nthe object {\"foo\": [\"bar\", \"baz\"]} is a well-formatted + instance of the schema. The object {\"properties\": {\"foo\": [\"bar\", \"baz\"]}} + is not well-formatted.\n\nHere is the output JSON schema:\n```\n{\"description\": + \"Answer for the verification task wether the context was useful.\", \"type\": + \"object\", \"properties\": {\"reason\": {\"title\": \"Reason\", \"description\": + \"Reason for verification\", \"type\": \"string\"}, \"verdict\": {\"title\": + \"Verdict\", \"description\": \"Binary (0/1) verdict of verification\", \"type\": + \"integer\"}}, \"required\": [\"reason\", \"verdict\"]}\n```\n\nDo not return + any preamble or explanations, return only a pure JSON string surrounded by triple + backticks (```).\n\nExamples:\n\nquestion: \"What can you tell me about albert + Albert Einstein?\"\ncontext: \"Albert Einstein (14 March 1879 \u2013 18 April + 1955) was a German-born theoretical physicist, widely held to be one of the + greatest and most influential scientists of all time. Best known for developing + the theory of relativity, he also made important contributions to quantum mechanics, + and was thus a central figure in the revolutionary reshaping of the scientific + understanding of nature that modern physics accomplished in the first decades + of the twentieth century. His mass\u2013energy equivalence formula E = mc2, + which arises from relativity theory, has been called \\\"the world''s most famous + equation\\\". He received the 1921 Nobel Prize in Physics \\\"for his services + to theoretical physics, and especially for his discovery of the law of the photoelectric + effect\\\", a pivotal step in the development of quantum theory. His work is + also known for its influence on the philosophy of science. In a 1999 poll of + 130 leading physicists worldwide by the British journal Physics World, Einstein + was ranked the greatest physicist of all time. His intellectual achievements + and originality have made Einstein synonymous with genius.\"\nanswer: \"Albert + Einstein born in 14 March 1879 was German-born theoretical physicist, widely + held to be one of the greatest and most influential scientists of all time. + He received the 1921 Nobel Prize in Physics for his services to theoretical + physics. He published 4 papers in 1905. Einstein moved to Switzerland in 1895\"\nverification: + ```{\"reason\": \"The provided context was indeed useful in arriving at the + given answer. The context includes key information about Albert Einstein''s + life and contributions, which are reflected in the answer.\", \"verdict\": 1}```\n\nquestion: + \"who won 2020 icc world cup?\"\ncontext: \"The 2022 ICC Men''s T20 World Cup, + held from October 16 to November 13, 2022, in Australia, was the eighth edition + of the tournament. Originally scheduled for 2020, it was postponed due to the + COVID-19 pandemic. England emerged victorious, defeating Pakistan by five wickets + in the final to clinch their second ICC Men''s T20 World Cup title.\"\nanswer: + \"England\"\nverification: ```{\"reason\": \"the context was useful in clarifying + the situation regarding the 2020 ICC World Cup and indicating that England was + the winner of the tournament that was intended to be held in 2020 but actually + took place in 2022.\", \"verdict\": 1}```\n\nquestion: \"What is the tallest + mountain in the world?\"\ncontext: \"The Andes is the longest continental mountain + range in the world, located in South America. It stretches across seven countries + and features many of the highest peaks in the Western Hemisphere. The range + is known for its diverse ecosystems, including the high-altitude Andean Plateau + and the Amazon rainforest.\"\nanswer: \"Mount Everest.\"\nverification: ```{\"reason\": + \"the provided context discusses the Andes mountain range, which, while impressive, + does not include Mount Everest or directly relate to the question about the + world''s tallest mountain.\", \"verdict\": 0}```\n\nYour actual task:\n\nquestion: + \"Is france part of europe?\"\ncontext: \"irrelevant\"\nanswer: \"France is + indeed part of europe\"\nverification: \n", "role": "user"}], "model": "gpt-4o-mini", + "n": 1, "stream": false, "temperature": 1e-08}' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-length: + - '4612' + content-type: + - application/json + cookie: + - __cf_bm=2N0lRp5YNIBKY6AUc.tpQsJVlWEga7Ys924AChkX4qk-1733967111-1.0.1.1-IJEARyUXuMN2pbqt5jU4yaj77.QHaVM0uVSztZt49GpbAV1HXoPr6.uIdz2viIUlRExuu5tYN_.v5wUpYjyBSQ; + _cfuvid=TvHcCPz7N_.kfviRP.Y0iD_HMeA.0uxvji5nzbbTR5w-1733967111302-0.0.1.1-604800000 + host: + - api.openai.com + user-agent: + - OpenAI/Python 1.52.0 + x-stainless-arch: + - arm64 + x-stainless-async: + - 'false' + x-stainless-lang: + - python + x-stainless-os: + - MacOS + x-stainless-package-version: + - 1.52.0 + x-stainless-retry-count: + - '0' + x-stainless-runtime: + - CPython + x-stainless-runtime-version: + - 3.10.13 + method: POST + uri: https://api.openai.com/v1/chat/completions + response: + body: + string: !!binary | + H4sIAAAAAAAAAwAAAP//jFNNj5swFLzzK5586QVWZJNtPm49dC+tVKmteikVGPsFnBrbsR9Roij/ + vTLJBlbdSr0g9ObNaGYenBMApiTbABMtJ9E5nX2Q3z5//dTu9WH/vJD57stx79z6JFD+oBVLI8PW + OxT0wnoQtnMaSVlzhYVHThhVZ8v5fP1+OZvNB6CzEnWkNY6yhc06ZVT2mD8usnyZzW7iorVKYGAb + +JkAAJyHZ/RpJB7ZBvL0ZdJhCLxBtrkvATBvdZwwHoIKxA2xdASFNYRmsF5V1S5YU5hzwTzy+Mo2 + ULDvLcKwdiRw3h6URAkqgPIeNR64IeBGgrQYwFgadr2qe0Lg5gTKbK3veGwDPDbcS2UaePbcCHwX + oEHbeO5aJbiGQJz6AMrAx95bhw8FS6FgB/RSCYp28kthqqqaRvC47QOPNZpe69v8cu9E28Z5W4cb + fp9vlVGhLa9JY/5A1rEBvSQAv4bu+1d1Mudt56gk+xtNFFw/PV312HjyEZ0vbiBZ4nrCWq3TN/RK + icSVDpPrMcFFi3KkjqfmvVR2AiST1H+7eUv7mlyZ5n/kR0AIdISydB7jUV4lHtc8xj/iX2v3lgfD + LJwCYVdulWnQO6+u3+PWlXXN52KFy7xmySX5AwAA//8DAEriHaudAwAA + headers: + CF-Cache-Status: + - DYNAMIC + CF-RAY: + - 8f09f515b8157281-EWR + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Thu, 12 Dec 2024 01:31:53 GMT + Server: + - cloudflare + Transfer-Encoding: + - chunked + X-Content-Type-Options: + - nosniff + access-control-expose-headers: + - X-Request-ID + alt-svc: + - h3=":443"; ma=86400 + openai-organization: + - datadog-staging + openai-processing-ms: + - '976' + openai-version: + - '2020-10-01' + strict-transport-security: + - max-age=31536000; includeSubDomains; preload + x-ratelimit-limit-requests: + - '30000' + x-ratelimit-limit-tokens: + - '150000000' + x-ratelimit-remaining-requests: + - '29999' + x-ratelimit-remaining-tokens: + - '149998903' + x-ratelimit-reset-requests: + - 2ms + x-ratelimit-reset-tokens: + - 0s + x-request-id: + - req_2c2d33a1a025655db6ab9a62096b644a + status: + code: 200 + message: OK +- request: + body: '{"messages": [{"content": "Given question, answer and context verify if + the context was useful in arriving at the given answer. Give verdict as \"1\" + if useful and \"0\" if not with json output.\n\nThe output should be a well-formatted + JSON instance that conforms to the JSON schema below.\n\nAs an example, for + the schema {\"properties\": {\"foo\": {\"title\": \"Foo\", \"description\": + \"a list of strings\", \"type\": \"array\", \"items\": {\"type\": \"string\"}}}, + \"required\": [\"foo\"]}\nthe object {\"foo\": [\"bar\", \"baz\"]} is a well-formatted + instance of the schema. The object {\"properties\": {\"foo\": [\"bar\", \"baz\"]}} + is not well-formatted.\n\nHere is the output JSON schema:\n```\n{\"description\": + \"Answer for the verification task wether the context was useful.\", \"type\": + \"object\", \"properties\": {\"reason\": {\"title\": \"Reason\", \"description\": + \"Reason for verification\", \"type\": \"string\"}, \"verdict\": {\"title\": + \"Verdict\", \"description\": \"Binary (0/1) verdict of verification\", \"type\": + \"integer\"}}, \"required\": [\"reason\", \"verdict\"]}\n```\n\nDo not return + any preamble or explanations, return only a pure JSON string surrounded by triple + backticks (```).\n\nExamples:\n\nquestion: \"What can you tell me about albert + Albert Einstein?\"\ncontext: \"Albert Einstein (14 March 1879 \u2013 18 April + 1955) was a German-born theoretical physicist, widely held to be one of the + greatest and most influential scientists of all time. Best known for developing + the theory of relativity, he also made important contributions to quantum mechanics, + and was thus a central figure in the revolutionary reshaping of the scientific + understanding of nature that modern physics accomplished in the first decades + of the twentieth century. His mass\u2013energy equivalence formula E = mc2, + which arises from relativity theory, has been called \\\"the world''s most famous + equation\\\". He received the 1921 Nobel Prize in Physics \\\"for his services + to theoretical physics, and especially for his discovery of the law of the photoelectric + effect\\\", a pivotal step in the development of quantum theory. His work is + also known for its influence on the philosophy of science. In a 1999 poll of + 130 leading physicists worldwide by the British journal Physics World, Einstein + was ranked the greatest physicist of all time. His intellectual achievements + and originality have made Einstein synonymous with genius.\"\nanswer: \"Albert + Einstein born in 14 March 1879 was German-born theoretical physicist, widely + held to be one of the greatest and most influential scientists of all time. + He received the 1921 Nobel Prize in Physics for his services to theoretical + physics. He published 4 papers in 1905. Einstein moved to Switzerland in 1895\"\nverification: + ```{\"reason\": \"The provided context was indeed useful in arriving at the + given answer. The context includes key information about Albert Einstein''s + life and contributions, which are reflected in the answer.\", \"verdict\": 1}```\n\nquestion: + \"who won 2020 icc world cup?\"\ncontext: \"The 2022 ICC Men''s T20 World Cup, + held from October 16 to November 13, 2022, in Australia, was the eighth edition + of the tournament. Originally scheduled for 2020, it was postponed due to the + COVID-19 pandemic. England emerged victorious, defeating Pakistan by five wickets + in the final to clinch their second ICC Men''s T20 World Cup title.\"\nanswer: + \"England\"\nverification: ```{\"reason\": \"the context was useful in clarifying + the situation regarding the 2020 ICC World Cup and indicating that England was + the winner of the tournament that was intended to be held in 2020 but actually + took place in 2022.\", \"verdict\": 1}```\n\nquestion: \"What is the tallest + mountain in the world?\"\ncontext: \"The Andes is the longest continental mountain + range in the world, located in South America. It stretches across seven countries + and features many of the highest peaks in the Western Hemisphere. The range + is known for its diverse ecosystems, including the high-altitude Andean Plateau + and the Amazon rainforest.\"\nanswer: \"Mount Everest.\"\nverification: ```{\"reason\": + \"the provided context discusses the Andes mountain range, which, while impressive, + does not include Mount Everest or directly relate to the question about the + world''s tallest mountain.\", \"verdict\": 0}```\n\nYour actual task:\n\nquestion: + \"Is france part of europe?\"\ncontext: \"France is part of europe\"\nanswer: + \"France is indeed part of europe\"\nverification: \n", "role": "user"}], "model": + "gpt-4o-mini", "n": 1, "stream": false, "temperature": 1e-08}' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-length: + - '4626' + content-type: + - application/json + cookie: + - __cf_bm=2N0lRp5YNIBKY6AUc.tpQsJVlWEga7Ys924AChkX4qk-1733967111-1.0.1.1-IJEARyUXuMN2pbqt5jU4yaj77.QHaVM0uVSztZt49GpbAV1HXoPr6.uIdz2viIUlRExuu5tYN_.v5wUpYjyBSQ; + _cfuvid=TvHcCPz7N_.kfviRP.Y0iD_HMeA.0uxvji5nzbbTR5w-1733967111302-0.0.1.1-604800000 + host: + - api.openai.com + user-agent: + - OpenAI/Python 1.52.0 + x-stainless-arch: + - arm64 + x-stainless-async: + - 'false' + x-stainless-lang: + - python + x-stainless-os: + - MacOS + x-stainless-package-version: + - 1.52.0 + x-stainless-retry-count: + - '0' + x-stainless-runtime: + - CPython + x-stainless-runtime-version: + - 3.10.13 + method: POST + uri: https://api.openai.com/v1/chat/completions + response: + body: + string: !!binary | + H4sIAAAAAAAAAwAAAP//jFLLbtswELzrKxY8S4EVu7Wtm4H0ccilSHtpVUg0uZLoSCRLrmKnhv+9 + oOxYCpoCvRDEzs5wZpfHCIApyTJgouEkOtsmG/lw//Bp83tffRH2+7duU9/dfT7gPS53j79YHBhm + u0NBL6wbYTrbIimjz7BwyAmDarqcz9fvl2m6GIDOSGwDrbaULEzSKa2S29ntIpktk3R1YTdGCfQs + gx8RAMBxOINPLfHAMpjFL5UOvec1suzaBMCcaUOFce+VJ66JxSMojCbUg/WyLHfe6Fwfc+aQhyvL + IGdfG4Sh7UAglUNB7TN44oQeqOEEHx3XAkF5sNwRmAo+9M5YjGHfKNFMSL21xlGgIXDt9+jAOvOk + JMqbnMWQsyd0UgkKL6enXJdlOXXrsOo9DxPTfdte6qdr/NbU1pmtv+DXeqW08k1xDhWiejKWDegp + Avg5jLl/NTlmneksFWQeUQfB9bvVWY+N2x3R+fwCkiHeTljrNH5Dr5BIXLV+sigmuGhQjtRxq7yX + ykyAaJL6bzdvaZ+TK13/j/wICIGWUBbWYVjKq8Rjm8Pw+f/Vdp3yYJj5Z0/YFZXSNTrr1PnrVbbY + bvlcrHA527LoFP0BAAD//wMAcOvPiIgDAAA= + headers: + CF-Cache-Status: + - DYNAMIC + CF-RAY: + - 8f09f51cdbaf7281-EWR + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Thu, 12 Dec 2024 01:31:54 GMT + Server: + - cloudflare + Transfer-Encoding: + - chunked + X-Content-Type-Options: + - nosniff + access-control-expose-headers: + - X-Request-ID + alt-svc: + - h3=":443"; ma=86400 + openai-organization: + - datadog-staging + openai-processing-ms: + - '779' + openai-version: + - '2020-10-01' + strict-transport-security: + - max-age=31536000; includeSubDomains; preload + x-ratelimit-limit-requests: + - '30000' + x-ratelimit-limit-tokens: + - '150000000' + x-ratelimit-remaining-requests: + - '29999' + x-ratelimit-remaining-tokens: + - '149998900' + x-ratelimit-reset-requests: + - 2ms + x-ratelimit-reset-tokens: + - 0s + x-request-id: + - req_4b6ae2d675e25726729e2577e662f691 + status: + code: 200 + message: OK +version: 1 \ No newline at end of file diff --git a/tests/llmobs/llmobs_cassettes/tests.llmobs.test_llmobs_ragas_evaluators.test_ragas_context_precision_single_context.yaml b/tests/llmobs/llmobs_cassettes/tests.llmobs.test_llmobs_ragas_evaluators.test_ragas_context_precision_single_context.yaml new file mode 100644 index 00000000000..ddbd8f5c3d9 --- /dev/null +++ b/tests/llmobs/llmobs_cassettes/tests.llmobs.test_llmobs_ragas_evaluators.test_ragas_context_precision_single_context.yaml @@ -0,0 +1,160 @@ +interactions: +- request: + body: '{"messages": [{"content": "Given question, answer and context verify if + the context was useful in arriving at the given answer. Give verdict as \"1\" + if useful and \"0\" if not with json output.\n\nThe output should be a well-formatted + JSON instance that conforms to the JSON schema below.\n\nAs an example, for + the schema {\"properties\": {\"foo\": {\"title\": \"Foo\", \"description\": + \"a list of strings\", \"type\": \"array\", \"items\": {\"type\": \"string\"}}}, + \"required\": [\"foo\"]}\nthe object {\"foo\": [\"bar\", \"baz\"]} is a well-formatted + instance of the schema. The object {\"properties\": {\"foo\": [\"bar\", \"baz\"]}} + is not well-formatted.\n\nHere is the output JSON schema:\n```\n{\"description\": + \"Answer for the verification task wether the context was useful.\", \"type\": + \"object\", \"properties\": {\"reason\": {\"title\": \"Reason\", \"description\": + \"Reason for verification\", \"type\": \"string\"}, \"verdict\": {\"title\": + \"Verdict\", \"description\": \"Binary (0/1) verdict of verification\", \"type\": + \"integer\"}}, \"required\": [\"reason\", \"verdict\"]}\n```\n\nDo not return + any preamble or explanations, return only a pure JSON string surrounded by triple + backticks (```).\n\nExamples:\n\nquestion: \"What can you tell me about albert + Albert Einstein?\"\ncontext: \"Albert Einstein (14 March 1879 \u2013 18 April + 1955) was a German-born theoretical physicist, widely held to be one of the + greatest and most influential scientists of all time. Best known for developing + the theory of relativity, he also made important contributions to quantum mechanics, + and was thus a central figure in the revolutionary reshaping of the scientific + understanding of nature that modern physics accomplished in the first decades + of the twentieth century. His mass\u2013energy equivalence formula E = mc2, + which arises from relativity theory, has been called \\\"the world''s most famous + equation\\\". He received the 1921 Nobel Prize in Physics \\\"for his services + to theoretical physics, and especially for his discovery of the law of the photoelectric + effect\\\", a pivotal step in the development of quantum theory. His work is + also known for its influence on the philosophy of science. In a 1999 poll of + 130 leading physicists worldwide by the British journal Physics World, Einstein + was ranked the greatest physicist of all time. His intellectual achievements + and originality have made Einstein synonymous with genius.\"\nanswer: \"Albert + Einstein born in 14 March 1879 was German-born theoretical physicist, widely + held to be one of the greatest and most influential scientists of all time. + He received the 1921 Nobel Prize in Physics for his services to theoretical + physics. He published 4 papers in 1905. Einstein moved to Switzerland in 1895\"\nverification: + ```{\"reason\": \"The provided context was indeed useful in arriving at the + given answer. The context includes key information about Albert Einstein''s + life and contributions, which are reflected in the answer.\", \"verdict\": 1}```\n\nquestion: + \"who won 2020 icc world cup?\"\ncontext: \"The 2022 ICC Men''s T20 World Cup, + held from October 16 to November 13, 2022, in Australia, was the eighth edition + of the tournament. Originally scheduled for 2020, it was postponed due to the + COVID-19 pandemic. England emerged victorious, defeating Pakistan by five wickets + in the final to clinch their second ICC Men''s T20 World Cup title.\"\nanswer: + \"England\"\nverification: ```{\"reason\": \"the context was useful in clarifying + the situation regarding the 2020 ICC World Cup and indicating that England was + the winner of the tournament that was intended to be held in 2020 but actually + took place in 2022.\", \"verdict\": 1}```\n\nquestion: \"What is the tallest + mountain in the world?\"\ncontext: \"The Andes is the longest continental mountain + range in the world, located in South America. It stretches across seven countries + and features many of the highest peaks in the Western Hemisphere. The range + is known for its diverse ecosystems, including the high-altitude Andean Plateau + and the Amazon rainforest.\"\nanswer: \"Mount Everest.\"\nverification: ```{\"reason\": + \"the provided context discusses the Andes mountain range, which, while impressive, + does not include Mount Everest or directly relate to the question about the + world''s tallest mountain.\", \"verdict\": 0}```\n\nYour actual task:\n\nquestion: + \"What is the capital of France?\"\ncontext: \"The capital of France is Paris.\"\nanswer: + \"The capital of France is Paris\"\nverification: \n", "role": "user"}], "model": + "gpt-4o-mini", "n": 1, "stream": false, "temperature": 1e-08}' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-length: + - '4637' + content-type: + - application/json + host: + - api.openai.com + user-agent: + - OpenAI/Python 1.52.0 + x-stainless-arch: + - arm64 + x-stainless-async: + - 'false' + x-stainless-lang: + - python + x-stainless-os: + - MacOS + x-stainless-package-version: + - 1.52.0 + x-stainless-retry-count: + - '0' + x-stainless-runtime: + - CPython + x-stainless-runtime-version: + - 3.10.13 + method: POST + uri: https://api.openai.com/v1/chat/completions + response: + body: + string: !!binary | + H4sIAAAAAAAAAwAAAP//jFJBbtswELzrFQuercCyAzvxrZf20qIpEqOHqpAociXRpUiCXMcODP+9 + IO1YNppDL4K0szOYndEhA2BKshUw0XMSg9P5J/n89fuP55/Ft123XTfFk5qpL6Zbv6z3irNJZNhm + g4LeWXfCDk4jKWtOsPDICaNqsZzPHxfLopgmYLASdaR1jvJ7mw/KqHw2nd3n02VePJzZvVUCA1vB + rwwA4JCe0aeRuGcrSFppMmAIvEO2uiwBMG91nDAeggrEDbHJCAprCE2yXtf1JlhTmkPJPPL4ylZQ + spcewXn7qiRKSPt7Aqk8CtJvEIgTBqCeE1CPILhTxDXYFj57bgSCCvDEvQoT2PVK9PEb9zyRPbYa + BaEEZRKbm7BDf1eyCZTsFb1UgqKL4liauq6vnXtst4HH9MxW6/P8eIlC285524Qzfpm3yqjQV6cD + 49mBrGMJPWYAv1Pk25sUmfN2cFSR/YMmCj4uzpGzsekRnS/OIFni+or1+A7c6FUSiSsdrkpjgose + 5UgdG+ZbqewVkF1d/a+bj7RPlyvT/Y/8CAiBjlBWzmMs5ebicc3jJvX58dol5WSYhbdAOFStMh16 + 59XpN2xd1TR8Lh5wOW1Ydsz+AgAA//8DAJLjX/OUAwAA + headers: + CF-Cache-Status: + - DYNAMIC + CF-RAY: + - 8f09f507cc477281-EWR + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Thu, 12 Dec 2024 01:31:51 GMT + Server: + - cloudflare + Set-Cookie: + - __cf_bm=2N0lRp5YNIBKY6AUc.tpQsJVlWEga7Ys924AChkX4qk-1733967111-1.0.1.1-IJEARyUXuMN2pbqt5jU4yaj77.QHaVM0uVSztZt49GpbAV1HXoPr6.uIdz2viIUlRExuu5tYN_.v5wUpYjyBSQ; + path=/; expires=Thu, 12-Dec-24 02:01:51 GMT; domain=.api.openai.com; HttpOnly; + Secure; SameSite=None + - _cfuvid=TvHcCPz7N_.kfviRP.Y0iD_HMeA.0uxvji5nzbbTR5w-1733967111302-0.0.1.1-604800000; + path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None + Transfer-Encoding: + - chunked + X-Content-Type-Options: + - nosniff + access-control-expose-headers: + - X-Request-ID + alt-svc: + - h3=":443"; ma=86400 + openai-organization: + - datadog-staging + openai-processing-ms: + - '564' + openai-version: + - '2020-10-01' + strict-transport-security: + - max-age=31536000; includeSubDomains; preload + x-ratelimit-limit-requests: + - '30000' + x-ratelimit-limit-tokens: + - '150000000' + x-ratelimit-remaining-requests: + - '29999' + x-ratelimit-remaining-tokens: + - '149998898' + x-ratelimit-reset-requests: + - 2ms + x-ratelimit-reset-tokens: + - 0s + x-request-id: + - req_db048be5fbcb3bc4136d9c89ace1249a + status: + code: 200 + message: OK +version: 1 \ No newline at end of file diff --git a/tests/llmobs/test_llmobs_ragas_evaluators.py b/tests/llmobs/test_llmobs_ragas_evaluators.py index 0e901fe93b4..7fbfad974bd 100644 --- a/tests/llmobs/test_llmobs_ragas_evaluators.py +++ b/tests/llmobs/test_llmobs_ragas_evaluators.py @@ -3,16 +3,36 @@ import mock import pytest +from ddtrace.llmobs._evaluators.ragas.context_precision import RagasContextPrecisionEvaluator from ddtrace.llmobs._evaluators.ragas.faithfulness import RagasFaithfulnessEvaluator from ddtrace.span import Span from tests.llmobs._utils import _expected_llmobs_llm_span_event +from tests.llmobs._utils import _expected_ragas_context_precision_spans from tests.llmobs._utils import _expected_ragas_faithfulness_spans from tests.llmobs._utils import _llm_span_with_expected_ragas_inputs_in_messages from tests.llmobs._utils import _llm_span_with_expected_ragas_inputs_in_prompt +from tests.llmobs._utils import default_ragas_inputs +from tests.llmobs._utils import logs_vcr pytest.importorskip("ragas", reason="Tests require ragas to be available on user env") +ragas_context_precision_single_context_cassette = logs_vcr.use_cassette( + "tests.llmobs.test_llmobs_ragas_evaluators.test_ragas_context_precision_single_context.yaml" +) +ragas_context_precision_multiple_context_cassette = logs_vcr.use_cassette( + "tests.llmobs.test_llmobs_ragas_evaluators.test_ragas_context_precision_multiple_context.yaml" +) + + +@pytest.fixture +def reset_ragas_context_precision_llm(): + import ragas + + previous_llm = ragas.metrics.context_precision.llm + yield + ragas.metrics.context_precision.llm = previous_llm + def _llm_span_without_io(): return _expected_llmobs_llm_span_event(Span("dummy")) @@ -238,3 +258,176 @@ def test_llmobs_with_faithfulness_emits_traces_and_evals_on_exit(mock_writer_log assert status == 0, err assert out == b"" assert err == b"" + + +def test_ragas_context_precision_init(ragas, llmobs): + rcp_evaluator = RagasContextPrecisionEvaluator(llmobs) + assert rcp_evaluator.llmobs_service == llmobs + assert rcp_evaluator.ragas_context_precision_instance == ragas.metrics.context_precision + assert rcp_evaluator.ragas_context_precision_instance.llm == ragas.llms.llm_factory() + + +def test_ragas_context_precision_throws_if_dependencies_not_present(llmobs, mock_ragas_dependencies_not_present, ragas): + with pytest.raises( + NotImplementedError, match="Failed to load dependencies for `ragas_context_precision` evaluator" + ): + RagasContextPrecisionEvaluator(llmobs) + + +def test_ragas_context_precision_returns_none_if_inputs_extraction_fails(ragas, mock_llmobs_submit_evaluation, llmobs): + rcp_evaluator = RagasContextPrecisionEvaluator(llmobs) + failure_msg, _ = rcp_evaluator.evaluate(_llm_span_without_io()) + assert failure_msg == "fail_extract_context_precision_inputs" + assert rcp_evaluator.llmobs_service.submit_evaluation.call_count == 0 + + +def test_ragas_context_precision_has_modified_context_precision_instance( + ragas, mock_llmobs_submit_evaluation, reset_ragas_context_precision_llm, llmobs +): + """Context precision instance used in ragas evaluator should match the global ragas context precision instance""" + from ragas.llms import BaseRagasLLM + from ragas.metrics import context_precision + + class FirstDummyLLM(BaseRagasLLM): + def __init__(self): + super().__init__() + + def generate_text(self) -> str: + return "dummy llm" + + def agenerate_text(self) -> str: + return "dummy llm" + + context_precision.llm = FirstDummyLLM() + + rcp_evaluator = RagasContextPrecisionEvaluator(llmobs) + + assert rcp_evaluator.ragas_context_precision_instance.llm.generate_text() == "dummy llm" + + class SecondDummyLLM(BaseRagasLLM): + def __init__(self): + super().__init__() + + def generate_text(self) -> str: + return "second dummy llm" + + def agenerate_text(self) -> str: + return "second dummy llm" + + context_precision.llm = SecondDummyLLM() + + rcp_evaluator = RagasContextPrecisionEvaluator(llmobs) + + assert rcp_evaluator.ragas_context_precision_instance.llm.generate_text() == "second dummy llm" + + +def test_ragas_context_precision_submits_evaluation(ragas, llmobs, mock_llmobs_submit_evaluation): + """Test that evaluation is submitted for a valid llm span where question is in the prompt variables""" + rcp_evaluator = RagasContextPrecisionEvaluator(llmobs) + llm_span = _llm_span_with_expected_ragas_inputs_in_prompt() + with ragas_context_precision_single_context_cassette: + rcp_evaluator.run_and_submit_evaluation(llm_span) + rcp_evaluator.llmobs_service.submit_evaluation.assert_has_calls( + [ + mock.call( + span_context={ + "span_id": llm_span.get("span_id"), + "trace_id": llm_span.get("trace_id"), + }, + label=RagasContextPrecisionEvaluator.LABEL, + metric_type=RagasContextPrecisionEvaluator.METRIC_TYPE, + value=1.0, + metadata={ + "_dd.evaluation_kind": "context_precision", + "_dd.evaluation_span": {"span_id": mock.ANY, "trace_id": mock.ANY}, + }, + ) + ] + ) + + +def test_ragas_context_precision_submits_evaluation_on_span_with_question_in_messages( + ragas, llmobs, mock_llmobs_submit_evaluation +): + """Test that evaluation is submitted for a valid llm span where the last message content is the question""" + rcp_evaluator = RagasContextPrecisionEvaluator(llmobs) + llm_span = _llm_span_with_expected_ragas_inputs_in_messages() + with ragas_context_precision_single_context_cassette: + rcp_evaluator.run_and_submit_evaluation(llm_span) + rcp_evaluator.llmobs_service.submit_evaluation.assert_has_calls( + [ + mock.call( + span_context={ + "span_id": llm_span.get("span_id"), + "trace_id": llm_span.get("trace_id"), + }, + label=RagasContextPrecisionEvaluator.LABEL, + metric_type=RagasContextPrecisionEvaluator.METRIC_TYPE, + value=1.0, + metadata={ + "_dd.evaluation_kind": "context_precision", + "_dd.evaluation_span": {"span_id": mock.ANY, "trace_id": mock.ANY}, + }, + ) + ] + ) + + +def test_ragas_context_precision_submits_evaluation_on_span_with_custom_keys( + ragas, llmobs, mock_llmobs_submit_evaluation +): + """Test that evaluation is submitted for a valid llm span where the last message content is the question""" + rcp_evaluator = RagasContextPrecisionEvaluator(llmobs) + llm_span = _expected_llmobs_llm_span_event( + Span("dummy"), + prompt={ + "variables": { + "user_input": default_ragas_inputs["question"], + "context_2": default_ragas_inputs["context"], + "context_3": default_ragas_inputs["context"], + }, + "_dd_context_variable_keys": ["context_2", "context_3"], + "_dd_query_variable_keys": ["user_input"], + }, + output_messages=[{"content": default_ragas_inputs["answer"]}], + ) + with ragas_context_precision_multiple_context_cassette: + rcp_evaluator.run_and_submit_evaluation(llm_span) + rcp_evaluator.llmobs_service.submit_evaluation.assert_has_calls( + [ + mock.call( + span_context={ + "span_id": llm_span.get("span_id"), + "trace_id": llm_span.get("trace_id"), + }, + label=RagasContextPrecisionEvaluator.LABEL, + metric_type=RagasContextPrecisionEvaluator.METRIC_TYPE, + value=0.5, + metadata={ + "_dd.evaluation_kind": "context_precision", + "_dd.evaluation_span": {"span_id": mock.ANY, "trace_id": mock.ANY}, + }, + ) + ] + ) + + +def test_ragas_context_precision_emits_traces(ragas, llmobs, llmobs_events): + rcp_evaluator = RagasContextPrecisionEvaluator(llmobs) + with ragas_context_precision_single_context_cassette: + rcp_evaluator.evaluate(_llm_span_with_expected_ragas_inputs_in_prompt()) + ragas_spans = [event for event in llmobs_events if event["name"].startswith("dd-ragas.")] + ragas_spans = sorted(ragas_spans, key=lambda d: d["start_ns"]) + assert len(ragas_spans) == 2 + assert ragas_spans == _expected_ragas_context_precision_spans() + + # verify the trace structure + root_span = ragas_spans[0] + root_span_id = root_span["span_id"] + assert root_span["parent_id"] == "undefined" + assert root_span["meta"] is not None + + root_span_trace_id = root_span["trace_id"] + for child_span in ragas_spans[1:]: + assert child_span["trace_id"] == root_span_trace_id + assert child_span["parent_id"] == root_span_id From e8d68ae545ff83b316acbe362a3df96d9990d79b Mon Sep 17 00:00:00 2001 From: Yun Kim <35776586+Yun-Kim@users.noreply.github.com> Date: Wed, 15 Jan 2025 14:49:46 -0500 Subject: [PATCH 08/16] fix(botocore): accept AWS ARN bedrock model IDs (#11944) This PR fixes the model ID parsing in the bedrock integration to additionally accept model IDs in AWS ARN format. Previously bedrock only worked with base/foundation models which were of the format `{model_provider}.{model_name}`. However now [bedrock](https://docs.aws.amazon.com/bedrock/latest/APIReference/API_runtime_InvokeModel.html#API_runtime_InvokeModel_RequestSyntax) has expanded to allow custom/provisioned/imported/prompt/inference/sagemaker models, following the typical AWS ARN format: ``` Base model: "{model_provider}.{model_name}" Cross-region model: "{region}.{model_provider}.{model_name}" AWS ARNs: Prefixed by "arn:aws{+region?}:bedrock:{region}:{account-id}:" a. Foundation model: ARN prefix + "foundation-model/{region?}.{model_provider}.{model_name}" b. Custom model: ARN prefix + "custom-model/{model_provider}.{model_name}" c. Provisioned model: ARN prefix + "provisioned-model/{model-id}" d. Imported model: ARN prefix + "imported-module/{model-id}" e. Prompt management: ARN prefix + "prompt/{prompt-id}" f. Sagemaker: ARN prefix + "endpoint/{model-id}" g. Inference profile: ARN prefix + "{application-?}inference-profile/{model-id}" h. Default prompt router: ARN prefix + "default-prompt-router/{prompt-id}" ``` Currently if an AWS ARN gets submitted as the model ID (for example, prompt management or a provisioned model) then because we attempt to split the string into a 2/3-part period-separated list, our code breaks when there is no period delimitter in the string. This PR makes a best effort attempt to check each case and extract a model provider and name from the model ID. However, due to the nature of some formats, we default model provider to "custom" and just return the model/prompt/resource ID from the rest of the AWS ARN. ## Checklist - [x] PR author has checked that all the criteria below are met - The PR description includes an overview of the change - The PR description articulates the motivation for the change - The change includes tests OR the PR description describes a testing strategy - The PR description notes risks associated with the change, if any - Newly-added code is easy to change - The change follows the [library release note guidelines](https://ddtrace.readthedocs.io/en/stable/releasenotes.html) - The change includes or references documentation updates if necessary - Backport labels are set (if [applicable](https://ddtrace.readthedocs.io/en/latest/contributing.html#backporting)) ## Reviewer Checklist - [x] Reviewer has checked that all the criteria below are met - Title is accurate - All changes are related to the pull request's stated goal - Avoids breaking [API](https://ddtrace.readthedocs.io/en/stable/versioning.html#interfaces) changes - Testing strategy adequately addresses listed risks - Newly-added code is easy to change - Release note makes sense to a user of the library - If necessary, author has acknowledged and discussed the performance implications of this PR as reported in the benchmarks PR comment - Backport labels are set in a manner that is consistent with the [release branch maintenance policy](https://ddtrace.readthedocs.io/en/latest/contributing.html#backporting) --- .../internal/botocore/services/bedrock.py | 52 +++++++++++++++++-- docs/spelling_wordlist.txt | 2 + ...ock-model-id-parsing-611aea2ca2e00656.yaml | 4 ++ .../amazon_invoke_model_arn.yaml | 52 +++++++++++++++++++ tests/contrib/botocore/test_bedrock.py | 9 ++++ ...t_invoke_model_using_aws_arn_model_id.json | 39 ++++++++++++++ 6 files changed, 153 insertions(+), 5 deletions(-) create mode 100644 releasenotes/notes/fix-bedrock-model-id-parsing-611aea2ca2e00656.yaml create mode 100644 tests/contrib/botocore/bedrock_cassettes/amazon_invoke_model_arn.yaml create mode 100644 tests/snapshots/tests.contrib.botocore.test_bedrock.test_invoke_model_using_aws_arn_model_id.json diff --git a/ddtrace/contrib/internal/botocore/services/bedrock.py b/ddtrace/contrib/internal/botocore/services/bedrock.py index 7c5f26b07a5..00e9aa5756f 100644 --- a/ddtrace/contrib/internal/botocore/services/bedrock.py +++ b/ddtrace/contrib/internal/botocore/services/bedrock.py @@ -24,6 +24,17 @@ _META = "meta" _STABILITY = "stability" +_MODEL_TYPE_IDENTIFIERS = ( + "foundation-model/", + "custom-model/", + "provisioned-model/", + "imported-model/", + "prompt/", + "endpoint/", + "inference-profile/", + "default-prompt-router/", +) + class TracedBotocoreStreamingBody(wrapt.ObjectProxy): """ @@ -320,14 +331,45 @@ def handle_bedrock_response( return result +def _parse_model_id(model_id: str): + """Best effort to extract the model provider and model name from the bedrock model ID. + model_id can be a 1/2 period-separated string or a full AWS ARN, based on the following formats: + 1. Base model: "{model_provider}.{model_name}" + 2. Cross-region model: "{region}.{model_provider}.{model_name}" + 3. Other: Prefixed by AWS ARN "arn:aws{+region?}:bedrock:{region}:{account-id}:" + a. Foundation model: ARN prefix + "foundation-model/{region?}.{model_provider}.{model_name}" + b. Custom model: ARN prefix + "custom-model/{model_provider}.{model_name}" + c. Provisioned model: ARN prefix + "provisioned-model/{model-id}" + d. Imported model: ARN prefix + "imported-module/{model-id}" + e. Prompt management: ARN prefix + "prompt/{prompt-id}" + f. Sagemaker: ARN prefix + "endpoint/{model-id}" + g. Inference profile: ARN prefix + "{application-?}inference-profile/{model-id}" + h. Default prompt router: ARN prefix + "default-prompt-router/{prompt-id}" + If model provider cannot be inferred from the model_id formatting, then default to "custom" + """ + if not model_id.startswith("arn:aws"): + model_meta = model_id.split(".") + if len(model_meta) < 2: + return "custom", model_meta[0] + return model_meta[-2], model_meta[-1] + for identifier in _MODEL_TYPE_IDENTIFIERS: + if identifier not in model_id: + continue + model_id = model_id.rsplit(identifier, 1)[-1] + if identifier in ("foundation-model/", "custom-model/"): + model_meta = model_id.split(".") + if len(model_meta) < 2: + return "custom", model_id + return model_meta[-2], model_meta[-1] + return "custom", model_id + return "custom", "custom" + + def patched_bedrock_api_call(original_func, instance, args, kwargs, function_vars): params = function_vars.get("params") pin = function_vars.get("pin") - model_meta = params.get("modelId").split(".") - if len(model_meta) == 2: - model_provider, model_name = model_meta - else: - _, model_provider, model_name = model_meta # cross-region inference + model_id = params.get("modelId") + model_provider, model_name = _parse_model_id(model_id) integration = function_vars.get("integration") submit_to_llmobs = integration.llmobs_enabled and "embed" not in model_name with core.context_with_data( diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index c9cc13a5a9e..ff2cfc09c6d 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -31,6 +31,8 @@ autopatching autoreload autoreloading aws +AWS +ARN backend backends backport diff --git a/releasenotes/notes/fix-bedrock-model-id-parsing-611aea2ca2e00656.yaml b/releasenotes/notes/fix-bedrock-model-id-parsing-611aea2ca2e00656.yaml new file mode 100644 index 00000000000..c3e13ea3d38 --- /dev/null +++ b/releasenotes/notes/fix-bedrock-model-id-parsing-611aea2ca2e00656.yaml @@ -0,0 +1,4 @@ +--- +fixes: + - | + botocore: Resolves formatting errors in the bedrock integration when parsing request model IDs, which can now accept AWS ARNs. diff --git a/tests/contrib/botocore/bedrock_cassettes/amazon_invoke_model_arn.yaml b/tests/contrib/botocore/bedrock_cassettes/amazon_invoke_model_arn.yaml new file mode 100644 index 00000000000..cd2283c0ce7 --- /dev/null +++ b/tests/contrib/botocore/bedrock_cassettes/amazon_invoke_model_arn.yaml @@ -0,0 +1,52 @@ +interactions: +- request: + body: '{"inputText": "Command: can you explain what Datadog is to someone not + in the tech industry?", "textGenerationConfig": {"maxTokenCount": 50, "stopSequences": + [], "temperature": 0, "topP": 0.9}}' + headers: + Content-Length: + - '193' + User-Agent: + - !!binary | + Qm90bzMvMS4zNC40OSBtZC9Cb3RvY29yZSMxLjM0LjQ5IHVhLzIuMCBvcy9tYWNvcyMyNC4yLjAg + bWQvYXJjaCNhcm02NCBsYW5nL3B5dGhvbiMzLjEwLjUgbWQvcHlpbXBsI0NQeXRob24gY2ZnL3Jl + dHJ5LW1vZGUjbGVnYWN5IEJvdG9jb3JlLzEuMzQuNDk= + X-Amz-Date: + - !!binary | + MjAyNTAxMTRUMjIwNDAyWg== + amz-sdk-invocation-id: + - !!binary | + YjY5NGZlNDgtNDBmNy00YTJlLWI1YTgtYjRiZGVhZTU5MjQ0 + amz-sdk-request: + - !!binary | + YXR0ZW1wdD0x + method: POST + uri: https://bedrock-runtime.us-east-1.amazonaws.com/model/arn%3Aaws%3Abedrock%3Aus-east-1%3A%3Afoundation-model%2Famazon.titan-tg1-large/invoke + response: + body: + string: '{"inputTextTokenCount":18,"results":[{"tokenCount":50,"outputText":"\n\nDatadog + is a monitoring and analytics platform for IT operations, DevOps, and software + development teams. It provides real-time monitoring of infrastructure, applications, + and services, allowing users to identify and resolve issues quickly. Datadog + collects","completionReason":"LENGTH"}]}' + headers: + Connection: + - keep-alive + Content-Length: + - '361' + Content-Type: + - application/json + Date: + - Tue, 14 Jan 2025 22:04:05 GMT + X-Amzn-Bedrock-Input-Token-Count: + - '18' + X-Amzn-Bedrock-Invocation-Latency: + - '2646' + X-Amzn-Bedrock-Output-Token-Count: + - '50' + x-amzn-RequestId: + - b2d0fd44-c29a-4cd4-a97a-6901a48f6264 + status: + code: 200 + message: OK +version: 1 diff --git a/tests/contrib/botocore/test_bedrock.py b/tests/contrib/botocore/test_bedrock.py index 99a1729ff1a..1cf5618bd0e 100644 --- a/tests/contrib/botocore/test_bedrock.py +++ b/tests/contrib/botocore/test_bedrock.py @@ -222,6 +222,15 @@ def test_meta_invoke(bedrock_client, request_vcr): json.loads(response.get("body").read()) +@pytest.mark.snapshot +def test_invoke_model_using_aws_arn_model_id(bedrock_client, request_vcr): + body = json.dumps(_REQUEST_BODIES["amazon"]) + model = "arn:aws:bedrock:us-east-1::foundation-model/amazon.titan-tg1-large" + with request_vcr.use_cassette("amazon_invoke_model_arn.yaml"): + response = bedrock_client.invoke_model(body=body, modelId=model) + json.loads(response.get("body").read()) + + @pytest.mark.snapshot def test_amazon_invoke_stream(bedrock_client, request_vcr): body, model = json.dumps(_REQUEST_BODIES["amazon"]), _MODELS["amazon"] diff --git a/tests/snapshots/tests.contrib.botocore.test_bedrock.test_invoke_model_using_aws_arn_model_id.json b/tests/snapshots/tests.contrib.botocore.test_bedrock.test_invoke_model_using_aws_arn_model_id.json new file mode 100644 index 00000000000..0da1e335083 --- /dev/null +++ b/tests/snapshots/tests.contrib.botocore.test_bedrock.test_invoke_model_using_aws_arn_model_id.json @@ -0,0 +1,39 @@ +[[ + { + "name": "bedrock-runtime.command", + "service": "aws.bedrock-runtime", + "resource": "InvokeModel", + "trace_id": 0, + "span_id": 1, + "parent_id": 0, + "type": "", + "error": 0, + "meta": { + "_dd.base_service": "tests.contrib.botocore", + "_dd.p.dm": "-0", + "_dd.p.tid": "6786dfda00000000", + "bedrock.request.max_tokens": "50", + "bedrock.request.model": "titan-tg1-large", + "bedrock.request.model_provider": "amazon", + "bedrock.request.prompt": "Command: can you explain what Datadog is to someone not in the tech industry?", + "bedrock.request.stop_sequences": "[]", + "bedrock.request.temperature": "0", + "bedrock.request.top_p": "0.9", + "bedrock.response.choices.0.finish_reason": "LENGTH", + "bedrock.response.choices.0.text": "\\n\\nDatadog is a monitoring and analytics platform for IT operations, DevOps, and software development teams. It provides real-t...", + "bedrock.response.duration": "2646", + "bedrock.response.id": "b2d0fd44-c29a-4cd4-a97a-6901a48f6264", + "bedrock.usage.completion_tokens": "50", + "bedrock.usage.prompt_tokens": "18", + "language": "python", + "runtime-id": "cf8ef38d3504475ba71634071f15d00f" + }, + "metrics": { + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "process_id": 96028 + }, + "duration": 2318000, + "start": 1736892378210317000 + }]] From 13b1457af7b43b6a926da6f165f095f0d43448f7 Mon Sep 17 00:00:00 2001 From: lievan <42917263+lievan@users.noreply.github.com> Date: Wed, 15 Jan 2025 15:26:05 -0500 Subject: [PATCH 09/16] chore(llmobs): implement answer relevancy ragas metric (#11915) Implements answer relevancy metric for ragas integration. ### About Answer Relevancy Answer relevancy metric focuses on assessing how pertinent the generated answer is to the given prompt. A lower score is assigned to answers that are incomplete or contain redundant information and higher scores indicate better relevancy. This metric is computed using the question, the retrieved contexts and the answer. The Answer Relevancy is defined as the mean cosine similarity of the original question to a number of artificial questions, which where generated (reverse engineered) based on the response. ## Example trace image ## Checklist - [x] PR author has checked that all the criteria below are met - The PR description includes an overview of the change - The PR description articulates the motivation for the change - The change includes tests OR the PR description describes a testing strategy - The PR description notes risks associated with the change, if any - Newly-added code is easy to change - The change follows the [library release note guidelines](https://ddtrace.readthedocs.io/en/stable/releasenotes.html) - The change includes or references documentation updates if necessary - Backport labels are set (if [applicable](https://ddtrace.readthedocs.io/en/latest/contributing.html#backporting)) ## Reviewer Checklist - [x] Reviewer has checked that all the criteria below are met - Title is accurate - All changes are related to the pull request's stated goal - Avoids breaking [API](https://ddtrace.readthedocs.io/en/stable/versioning.html#interfaces) changes - Testing strategy adequately addresses listed risks - Newly-added code is easy to change - Release note makes sense to a user of the library - If necessary, author has acknowledged and discussed the performance implications of this PR as reported in the benchmarks PR comment - Backport labels are set in a manner that is consistent with the [release branch maintenance policy](https://ddtrace.readthedocs.io/en/latest/contributing.html#backporting) --------- Co-authored-by: lievan Co-authored-by: Yun Kim <35776586+Yun-Kim@users.noreply.github.com> --- .../_evaluators/ragas/answer_relevancy.py | 146 +++++ ddtrace/llmobs/_evaluators/ragas/base.py | 12 + ddtrace/llmobs/_evaluators/ragas/models.py | 5 + ddtrace/llmobs/_evaluators/runner.py | 2 + tests/llmobs/_utils.py | 58 ++ tests/llmobs/conftest.py | 20 + ...evaluators.answer_relevancy_inference.yaml | 557 ++++++++++++++++++ tests/llmobs/test_llmobs_ragas_evaluators.py | 189 ++++++ 8 files changed, 989 insertions(+) create mode 100644 ddtrace/llmobs/_evaluators/ragas/answer_relevancy.py create mode 100644 tests/llmobs/llmobs_cassettes/tests.llmobs.test_llmobs_ragas_evaluators.answer_relevancy_inference.yaml diff --git a/ddtrace/llmobs/_evaluators/ragas/answer_relevancy.py b/ddtrace/llmobs/_evaluators/ragas/answer_relevancy.py new file mode 100644 index 00000000000..9a640e08454 --- /dev/null +++ b/ddtrace/llmobs/_evaluators/ragas/answer_relevancy.py @@ -0,0 +1,146 @@ +import math +from typing import Optional +from typing import Tuple +from typing import Union + +from ddtrace.internal.logger import get_logger +from ddtrace.llmobs._constants import EVALUATION_SPAN_METADATA +from ddtrace.llmobs._evaluators.ragas.base import BaseRagasEvaluator +from ddtrace.llmobs._evaluators.ragas.base import _get_ml_app_for_ragas_trace + + +logger = get_logger(__name__) + + +class RagasAnswerRelevancyEvaluator(BaseRagasEvaluator): + """A class used by EvaluatorRunner to conduct ragas answer relevancy evaluations + on LLM Observability span events. The job of an Evaluator is to take a span and + submit evaluation metrics based on the span's attributes. + """ + + LABEL = "ragas_answer_relevancy" + METRIC_TYPE = "score" + + def __init__(self, llmobs_service): + """ + Initialize an evaluator that uses the ragas library to generate a context precision score on finished LLM spans. + + answer relevancy focuses on assessing how pertinent the generated answer is to a given question. + A lower score is assigned to answers that are incomplete or contain redundant information and higher scores + indicate better relevancy. This metric is computed using the question, contexts, and answer. + + For more information, see https://docs.ragas.io/en/latest/concepts/metrics/available_metrics/answer_relevance/ + + The `ragas.metrics.answer_relevancy` instance is used for answer relevancy scores. + If there is no llm attribute set on this instance, it will be set to the + default `llm_factory()` from ragas which uses openai. + If there is no embedding attribute set on this instance, it will be to to the + default `embedding_factory()` from ragas which uses openai + + :param llmobs_service: An instance of the LLM Observability service used for tracing the evaluation and + submitting evaluation metrics. + + Raises: NotImplementedError if the ragas library is not found or if ragas version is not supported. + """ + super().__init__(llmobs_service) + self.ragas_answer_relevancy_instance = self._get_answer_relevancy_instance() + self.answer_relevancy_output_parser = self.ragas_dependencies.RagasoutputParser( + pydantic_object=self.ragas_dependencies.AnswerRelevanceClassification + ) + + def _get_answer_relevancy_instance(self): + """ + This helper function ensures the answer relevancy instance used in + ragas evaluator is updated with the latest ragas answer relevancy instance + instance AND has an non-null llm + """ + if self.ragas_dependencies.answer_relevancy is None: + return None + ragas_answer_relevancy_instance = self.ragas_dependencies.answer_relevancy + if not ragas_answer_relevancy_instance.llm: + ragas_answer_relevancy_instance.llm = self.ragas_dependencies.llm_factory() + if not ragas_answer_relevancy_instance.embeddings: + ragas_answer_relevancy_instance.embeddings = self.ragas_dependencies.embedding_factory() + return ragas_answer_relevancy_instance + + def evaluate(self, span_event: dict) -> Tuple[Union[float, str], Optional[dict]]: + """ + Performs a answer relevancy evaluation on an llm span event, returning either + - answer relevancy score (float) OR failure reason (str) + - evaluation metadata (dict) + If the ragas answer relevancy instance does not have `llm` set, we set `llm` using the `llm_factory()` + method from ragas which currently defaults to openai's gpt-4o-turbo. + """ + self.ragas_answer_relevancy_instance = self._get_answer_relevancy_instance() + if not self.ragas_answer_relevancy_instance: + return "fail_answer_relevancy_is_none", {} + + evaluation_metadata = {} # type: dict[str, Union[str, dict, list]] + trace_metadata = {} # type: dict[str, Union[str, dict, list]] + + # initialize data we annotate for tracing ragas + score, answer_classifications = (math.nan, None) + + with self.llmobs_service.workflow( + "dd-ragas.answer_relevancy", ml_app=_get_ml_app_for_ragas_trace(span_event) + ) as ragas_ar_workflow: + try: + evaluation_metadata[EVALUATION_SPAN_METADATA] = self.llmobs_service.export_span(span=ragas_ar_workflow) + + answer_relevancy_inputs = self._extract_evaluation_inputs_from_span(span_event) + if answer_relevancy_inputs is None: + logger.debug( + "Failed to extract question and contexts from " + "span sampled for `ragas_answer_relevancy` evaluation" + ) + return "fail_extract_answer_relevancy_inputs", evaluation_metadata + + prompt = self.ragas_answer_relevancy_instance.question_generation.format( + answer=answer_relevancy_inputs["answer"], + context="\n".join(answer_relevancy_inputs["contexts"]), + ) + + trace_metadata["strictness"] = self.ragas_answer_relevancy_instance.strictness + result = self.ragas_answer_relevancy_instance.llm.generate_text( + prompt, n=self.ragas_answer_relevancy_instance.strictness + ) + + try: + answers = [self.answer_relevancy_output_parser.parse(res.text) for res in result.generations[0]] + answers = [answer for answer in answers if answer is not None] + except Exception as e: + logger.debug("Failed to parse answer relevancy output: %s", e) + return "fail_parse_answer_relevancy_output", evaluation_metadata + + gen_questions = [answer.question for answer in answers] + answer_classifications = [ + {"question": answer.question, "noncommittal": answer.noncommittal} for answer in answers + ] + trace_metadata["answer_classifications"] = answer_classifications + if all(q == "" for q in gen_questions): + logger.warning("Invalid JSON response. Expected dictionary with key 'question'") + return "fail_parse_answer_relevancy_output", evaluation_metadata + + # calculate cosine similarity between the question and generated questions + with self.llmobs_service.workflow("dd-ragas.calculate_similarity") as ragas_cs_workflow: + cosine_sim = self.ragas_answer_relevancy_instance.calculate_similarity( + answer_relevancy_inputs["question"], gen_questions + ) + self.llmobs_service.annotate( + span=ragas_cs_workflow, + input_data={ + "question": answer_relevancy_inputs["question"], + "generated_questions": gen_questions, + }, + output_data=cosine_sim.mean(), + ) + + score = cosine_sim.mean() * int(not any(answer.noncommittal for answer in answers)) + return score, evaluation_metadata + finally: + self.llmobs_service.annotate( + span=ragas_ar_workflow, + input_data=span_event, + output_data=score, + metadata=trace_metadata, + ) diff --git a/ddtrace/llmobs/_evaluators/ragas/base.py b/ddtrace/llmobs/_evaluators/ragas/base.py index 1c4840e9485..17cf5807af0 100644 --- a/ddtrace/llmobs/_evaluators/ragas/base.py +++ b/ddtrace/llmobs/_evaluators/ragas/base.py @@ -56,6 +56,14 @@ def __init__(self): self.get_segmenter = get_segmenter + from ragas.metrics import answer_relevancy + + self.answer_relevancy = answer_relevancy + + from ragas.embeddings import embedding_factory + + self.embedding_factory = embedding_factory + from ddtrace.llmobs._evaluators.ragas.models import ContextPrecisionVerification self.ContextPrecisionVerification = ContextPrecisionVerification @@ -68,6 +76,10 @@ def __init__(self): self.StatementsAnswers = StatementsAnswers + from ddtrace.llmobs._evaluators.ragas.models import AnswerRelevanceClassification + + self.AnswerRelevanceClassification = AnswerRelevanceClassification + def _get_ml_app_for_ragas_trace(span_event: dict) -> str: """ diff --git a/ddtrace/llmobs/_evaluators/ragas/models.py b/ddtrace/llmobs/_evaluators/ragas/models.py index 9886c7cf1d3..c5b37ee2b7f 100644 --- a/ddtrace/llmobs/_evaluators/ragas/models.py +++ b/ddtrace/llmobs/_evaluators/ragas/models.py @@ -11,6 +11,11 @@ """ +class AnswerRelevanceClassification(BaseModel): + question: str + noncommittal: int + + class ContextPrecisionVerification(BaseModel): """Answer for the verification task whether the context was useful.""" diff --git a/ddtrace/llmobs/_evaluators/runner.py b/ddtrace/llmobs/_evaluators/runner.py index 099887bea8c..6d23af647ea 100644 --- a/ddtrace/llmobs/_evaluators/runner.py +++ b/ddtrace/llmobs/_evaluators/runner.py @@ -8,6 +8,7 @@ from ddtrace.internal.periodic import PeriodicService from ddtrace.internal.telemetry import telemetry_writer from ddtrace.internal.telemetry.constants import TELEMETRY_NAMESPACE +from ddtrace.llmobs._evaluators.ragas.answer_relevancy import RagasAnswerRelevancyEvaluator from ddtrace.llmobs._evaluators.ragas.context_precision import RagasContextPrecisionEvaluator from ddtrace.llmobs._evaluators.ragas.faithfulness import RagasFaithfulnessEvaluator from ddtrace.llmobs._evaluators.sampler import EvaluatorRunnerSampler @@ -18,6 +19,7 @@ SUPPORTED_EVALUATORS = { RagasFaithfulnessEvaluator.LABEL: RagasFaithfulnessEvaluator, + RagasAnswerRelevancyEvaluator.LABEL: RagasAnswerRelevancyEvaluator, RagasContextPrecisionEvaluator.LABEL: RagasContextPrecisionEvaluator, } diff --git a/tests/llmobs/_utils.py b/tests/llmobs/_utils.py index f35097f7abc..8049245dbc1 100644 --- a/tests/llmobs/_utils.py +++ b/tests/llmobs/_utils.py @@ -714,3 +714,61 @@ def _expected_ragas_faithfulness_spans(ragas_inputs=None): "tags": expected_ragas_trace_tags(), }, ] + + +def _expected_ragas_answer_relevancy_spans(ragas_inputs=None): + if not ragas_inputs: + ragas_inputs = default_ragas_inputs + return [ + { + "trace_id": mock.ANY, + "span_id": mock.ANY, + "parent_id": "undefined", + "name": "dd-ragas.answer_relevancy", + "start_ns": mock.ANY, + "duration": mock.ANY, + "status": "ok", + "meta": { + "span.kind": "workflow", + "input": {"value": mock.ANY}, + "output": {"value": mock.ANY}, + "metadata": {"answer_classifications": mock.ANY, "strictness": mock.ANY}, + }, + "metrics": {}, + "tags": expected_ragas_trace_tags(), + }, + { + "trace_id": mock.ANY, + "span_id": mock.ANY, + "parent_id": mock.ANY, + "name": "dd-ragas.extract_evaluation_inputs_from_span", + "start_ns": mock.ANY, + "duration": mock.ANY, + "status": "ok", + "meta": { + "span.kind": "workflow", + "input": {"value": mock.ANY}, + "output": {"value": mock.ANY}, + "metadata": {}, + }, + "metrics": {}, + "tags": expected_ragas_trace_tags(), + }, + { + "trace_id": mock.ANY, + "span_id": mock.ANY, + "parent_id": mock.ANY, + "name": "dd-ragas.calculate_similarity", + "start_ns": mock.ANY, + "duration": mock.ANY, + "status": "ok", + "meta": { + "span.kind": "workflow", + "input": {"value": mock.ANY}, + "output": {"value": mock.ANY}, + "metadata": {}, + }, + "metrics": {}, + "tags": expected_ragas_trace_tags(), + }, + ] diff --git a/tests/llmobs/conftest.py b/tests/llmobs/conftest.py index 108f7dfe55d..61a028e5caf 100644 --- a/tests/llmobs/conftest.py +++ b/tests/llmobs/conftest.py @@ -150,6 +150,15 @@ def reset_ragas_faithfulness_llm(): ragas.metrics.faithfulness.llm = previous_llm +@pytest.fixture +def reset_ragas_answer_relevancy_llm(): + import ragas + + previous_llm = ragas.metrics.answer_relevancy.llm + yield + ragas.metrics.answer_relevancy.llm = previous_llm + + @pytest.fixture def mock_ragas_evaluator(mock_llmobs_eval_metric_writer, ragas): patcher = mock.patch("ddtrace.llmobs._evaluators.ragas.faithfulness.RagasFaithfulnessEvaluator.evaluate") @@ -159,6 +168,17 @@ def mock_ragas_evaluator(mock_llmobs_eval_metric_writer, ragas): patcher.stop() +@pytest.fixture +def mock_ragas_answer_relevancy_calculate_similarity(): + import numpy + + patcher = mock.patch("ragas.metrics.answer_relevancy.calculate_similarity") + MockRagasCalcSim = patcher.start() + MockRagasCalcSim.return_value = numpy.array([1.0, 1.0, 1.0]) + yield MockRagasCalcSim + patcher.stop() + + @pytest.fixture def tracer(): return DummyTracer() diff --git a/tests/llmobs/llmobs_cassettes/tests.llmobs.test_llmobs_ragas_evaluators.answer_relevancy_inference.yaml b/tests/llmobs/llmobs_cassettes/tests.llmobs.test_llmobs_ragas_evaluators.answer_relevancy_inference.yaml new file mode 100644 index 00000000000..1f537a977b8 --- /dev/null +++ b/tests/llmobs/llmobs_cassettes/tests.llmobs.test_llmobs_ragas_evaluators.answer_relevancy_inference.yaml @@ -0,0 +1,557 @@ +interactions: +- request: + body: '{"messages": [{"content": "Generate a question for the given answer and + Identify if answer is noncommittal. Give noncommittal as 1 if the answer is + noncommittal and 0 if the answer is committal. A noncommittal answer is one + that is evasive, vague, or ambiguous. For example, \"I don''t know\" or \"I''m + not sure\" are noncommittal answers\n\nThe output should be a well-formatted + JSON instance that conforms to the JSON schema below.\n\nAs an example, for + the schema {\"properties\": {\"foo\": {\"title\": \"Foo\", \"description\": + \"a list of strings\", \"type\": \"array\", \"items\": {\"type\": \"string\"}}}, + \"required\": [\"foo\"]}\nthe object {\"foo\": [\"bar\", \"baz\"]} is a well-formatted + instance of the schema. The object {\"properties\": {\"foo\": [\"bar\", \"baz\"]}} + is not well-formatted.\n\nHere is the output JSON schema:\n```\n{\"type\": \"object\", + \"properties\": {\"question\": {\"title\": \"Question\", \"type\": \"string\"}, + \"noncommittal\": {\"title\": \"Noncommittal\", \"type\": \"integer\"}}, \"required\": + [\"question\", \"noncommittal\"]}\n```\n\nDo not return any preamble or explanations, + return only a pure JSON string surrounded by triple backticks (```).\n\nExamples:\n\nanswer: + \"Albert Einstein was born in Germany.\"\ncontext: \"Albert Einstein was a German-born + theoretical physicist who is widely held to be one of the greatest and most + influential scientists of all time\"\noutput: ```{\"question\": \"Where was + Albert Einstein born?\", \"noncommittal\": 0}```\n\nanswer: \"It can change + its skin color based on the temperature of its environment.\"\ncontext: \"A + recent scientific study has discovered a new species of frog in the Amazon rainforest + that has the unique ability to change its skin color based on the temperature + of its environment.\"\noutput: ```{\"question\": \"What unique ability does + the newly discovered species of frog have?\", \"noncommittal\": 0}```\n\nanswer: + \"Everest\"\ncontext: \"The tallest mountain on Earth, measured from sea level, + is a renowned peak located in the Himalayas.\"\noutput: ```{\"question\": \"What + is the tallest mountain on Earth?\", \"noncommittal\": 0}```\n\nanswer: \"I + don''t know about the groundbreaking feature of the smartphone invented in + 2023 as am unaware of information beyond 2022. \"\ncontext: \"In 2023, a groundbreaking + invention was announced: a smartphone with a battery life of one month, revolutionizing + the way people use mobile technology.\"\noutput: ```{\"question\": \"What was + the groundbreaking feature of the smartphone invented in 2023?\", \"noncommittal\": + 1}```\n\nYour actual task:\n\nanswer: \"The capital of France is Paris\"\ncontext: + \"The capital of France is Paris.\"\noutput: \n", "role": "user"}], "model": + "gpt-4o-mini", "n": 3, "stream": false, "temperature": 0.3}' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-length: + - '2795' + content-type: + - application/json + host: + - api.openai.com + user-agent: + - OpenAI/Python 1.52.0 + x-stainless-arch: + - arm64 + x-stainless-async: + - 'false' + x-stainless-lang: + - python + x-stainless-os: + - MacOS + x-stainless-package-version: + - 1.52.0 + x-stainless-retry-count: + - '0' + x-stainless-runtime: + - CPython + x-stainless-runtime-version: + - 3.10.13 + method: POST + uri: https://api.openai.com/v1/chat/completions + response: + body: + string: !!binary | + H4sIAAAAAAAAAwAAAP//7FTBjpswEL3zFdacQ0VoNmS5VJWyvfTWSttDqcAxA7hrbK89rLaN8u+V + CRuItpV6by8c5s17vHn2+BgxBrKGnIHoOIneqvi9/bjf3iVPSI/74ZP87H/e7++e7x8ytc+2sAoM + c/iOgl5Yb4TprUKSRp9h4ZATBtV19nab3d5ku2QEelOjCrTWUrwxcS+1jNMk3cRJFq93E7szUqCH + nH2NGGPsOH6DT13jM+Rs1BorPXrPW4T80sQYOKNCBbj30hPXBKsZFEYT6tF6VVXHAh4H9MF5ATkr + 4EvHiUnPqEMmuJXEFTMN++C4FviugBUrQBstTN9LIq4CKzlVVbX8h8Nm8DzMqQelpvrpYlqZ1jpz + 8BN+qTdSS9+VDrk3Ohj0ZCxEC/KrJNb/k5iSSP+9JCLGvo0LM1zNC9aZ3lJJ5gF1ENym08LAvKcL + 9HYCyRBXi/ruBbjSK2skLpVfxAuCiw7rmTrvJx9qaRbA8ghfu/md9nlyqdu/kZ8BIdAS1qV1WEtx + PfHc5jA8Y39qu6Q8GgaP7kkKLEmiCydRY8MHdb5I4H94wr5spG7RWSfPt6mxZZZinfHDzUZAdIp+ + AQAA//8DAHIOLnJvBQAA + headers: + CF-Cache-Status: + - DYNAMIC + CF-RAY: + - 9017b85e9bb772c2-EWR + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Mon, 13 Jan 2025 19:16:21 GMT + Server: + - cloudflare + Set-Cookie: + - __cf_bm=jNCWyg.Vq7.UysipP_0ZTDahpo.QhHWQvZ5Biaue6Bs-1736795781-1.0.1.1-C.GEfp7jlmfkY9qIXtsRjf9L9W5MzQ2OSXUpBOB0jIjYNSrJBlVnNwuHbaPYKT9.DjEjgPPIK69hkYhC0UtQZA; + path=/; expires=Mon, 13-Jan-25 19:46:21 GMT; domain=.api.openai.com; HttpOnly; + Secure; SameSite=None + - _cfuvid=g4nQujMCF8pE0TfAWPiY5rkdnbhtDE1kP7w0dFQq.v4-1736795781566-0.0.1.1-604800000; + path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None + Transfer-Encoding: + - chunked + X-Content-Type-Options: + - nosniff + access-control-expose-headers: + - X-Request-ID + alt-svc: + - h3=":443"; ma=86400 + openai-organization: + - datadog-staging + openai-processing-ms: + - '541' + openai-version: + - '2020-10-01' + strict-transport-security: + - max-age=31536000; includeSubDomains; preload + x-ratelimit-limit-requests: + - '30000' + x-ratelimit-limit-tokens: + - '150000000' + x-ratelimit-remaining-requests: + - '29999' + x-ratelimit-remaining-tokens: + - '149999320' + x-ratelimit-reset-requests: + - 2ms + x-ratelimit-reset-tokens: + - 0s + x-request-id: + - req_b5c84c23586be35a5fc602178e84a812 + status: + code: 200 + message: OK +- request: + body: '{"input": [[3923, 374, 279, 6864, 315, 9822, 30]], "model": "text-embedding-ada-002", + "encoding_format": "base64"}' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-length: + - '114' + content-type: + - application/json + host: + - api.openai.com + user-agent: + - OpenAI/Python 1.52.0 + x-stainless-arch: + - arm64 + x-stainless-async: + - 'false' + x-stainless-lang: + - python + x-stainless-os: + - MacOS + x-stainless-package-version: + - 1.52.0 + x-stainless-retry-count: + - '0' + x-stainless-runtime: + - CPython + x-stainless-runtime-version: + - 3.10.13 + method: POST + uri: https://api.openai.com/v1/embeddings + response: + body: + string: !!binary | + H4sIAAAAAAAAA1R62a6zurbm/X6KpXWb2gpd8GDdESCQ0NhJIF2pVAKSECCE0NiAj867l8h/dKrq + ZkqT6QkGD3/d8H/866+//q6T4pH2f//z19/vvOv//h/ztXvcx3//89f//Ndff/3113/8fv5/Ix9V + 8rjf80/2G/77Y/65P8a///lL+O8r/3fQfO/usyW6cQiS8fqNqTqZDmFB9CzQZNaOAvxD18wig+yx + MtBK1X4rAQvr8BNO1X4QQBKbG1uX1xwNXnmJIVDSKxUgH/kkaS4FIigf4ir9J+GqqVbI3199XDcX + uajvtNRBWz01LMjftzeRozeg9egZzOXXtJjug5tDdO1dshW/q7BfSw2F69jrDBfHVzIF8SNCmVc/ + mL4LcsR72gpoO778eT4GF+8HyYLbsz6zLc6WbX8/SDZoi5YQm5Kl16XjykCR1Lzo8hI5RVfULAM1 + 2G7IzY8HNKqrAqB1rSs7ewprp+Pxe4TmuxswYGInw+pRZmgp1BzLrn3xuHURclAP+ZbgbKd4NIjP + Z7ADd0s2q3MecnzWDY0a4gMvF/d12Itn24fxmafEum1eaMhbIQMosxgP6XFop29oK7DgboPDe/nm + LTkiHRbZ4stcFB9bmpzUCC3Ec4ulnVeGVBTiWs1PTko2b/Hc8kXxluCwLUeC3fZk8hUyUgir20hc + +WO34+OUVdDot454R68xR6hfEaiK7WHqLb5FfT/YErqXR5eZPdTeoCaXXG1k9UIb59qF3f2NFogU + yyWehvclpO/ltVkthYYTV09lkwdsopqrZjqV8WntyYq/2cL0bY8UoQdGbDc1GDmP+kAOe10vpD3L + AGx1T5hX+0Eo58s8RocycpjFYuRNMOmNhjaWShfcKcKBrr0jSj+uybzrpQ07qoRbKMs6IoHSbc2h + iFcKxGKeM3N5jdCQfLcA/jhpZLMtP6hL754CR/oA4q2igvPL7WCjGj22xKlXZTH4bklhNEeHrkb3 + lPAKJkXL9T4mdo9q3pP7oQTZEnfMuMY7k+/umgVoVUVUk6tPMoivSIfs3QKe+pNXDBtf6lB/YhdC + FvdX+JsPOpRnh0o33U46NfiqoGp9hYXjrkvGg/wawNAvE7HwSi/kkzMA7CSjYuZRLBO2/FwjuCnX + hJGV+jK77NBF4B0si0QK26BJ2NwjEI6NQzbd2HlNRNI92iySFyO7rV0Mb9G1kJVkH7IzxTgsvFOe + L/Hm6VCl1HQkLfREQOI2Vsn6lQYhI0dkqJFd7EjwRkPLa6GOwPPpmhmXaJuMKdqfNdnHKV4mDm+n + ek0n+IaHN7HXHW0nenMewLRNSzxjtQ0nFLUpNF9voJNweJn9a31dgHBPBeK2730xkGVg/96fLpxk + F7JPoWNNJl+J6Dgswm67vrjgasuY6HJVmyOEgKERuzM5FMfcZLa9UqCJRkqHtpzMjjRPUKVjMeBX + ugq8QTkJDzgEg0n2R6kouHG5GMjZSyFxLwENK8/3MBDZrtl6FEjBPxdvCxcteJMgepqc2/aoqoHy + uBLjs2vCaeKiC+XKOOM6887tdHZ2HcpUc4EXix6F7QoZD8iEa8G274Ns1h9yPGt1qWyIrTiNyR26 + pKvSUwgzdlsnZGm9pkAVd0vco9Nzattuoy4e04f9vudwkKYc+lfas7un9uFUSlkFl4PusHsqDt7o + R76urks3JM7tY6DJ3Ly30F0/e+Ipa5IMd9oZiDlnhVlRvi6kgcY24CEJ6FJaeO34ukg1Mn3/Sfzd + 7o24WcsuCrreINaFdHzisftAy4fNiHO6Knys9WUGzW2pM33zunnTXntUEOXHJ1vbyo7TzAkG9BGm + ngVy5YSC2PiWGu6HM0mXyiX5LIq38MMnKqtbLfxIX0sCpjktIWRzLwZwIxWg39nM80jUStWjSuGj + 3W5kw921KT3WOUWy76fMJ9LB5O1XH7R5PkR3P6t2mBYhVd/de8vcgpTJsGkSFZ6ED5h3Nk+4/zEa + 4EZyYetndU2+mEYYRAoaO+jPiPfyxiyRmkYi2ZzuUTLgqyxAYvVHit5c8frLRa1QZwWYal8KRXXK + IIagtM94WHyffDSavoEvrL+41eTSm97La41mvCdOMvacviTTBvuQe+yHRxRUhYK+8gO2Nk67gq+m + +Lq6hZuR2A/b5h13TAmdxf3IrHD58oa5vlGhah+2U/enGe90QMNbHIgrPO4JP6yHK0o/W5NE+Jwl + o/42qj//T8SthNg3lFxYVaXBnO+AvL4kpQWumuvETOie88zRDJj5izj7oxLyASSANIl7YvSntpi2 + WSyo5OYdGF59YjQ9h9oADpZJ9u7nVvSP4uErvPUSKq7rYzI879/rSr4FHhZeCTVHSLs9qOrRp+Vr + gUP2TJstXCXlSMILsvjI+UYFGk42nfdTMpk1UVD6WoyY73yzEOOX7v7Gs3BPUDJMi4SixhmWbK5/ + 3rqWe0XZIVTx9OncduyH+KzW93zEKrfsZGrwOtLm/Ukcq8vC0ZJ1DDSNJ7J5fsWEBUztQKQLjZjT + s04YOXIdSJa9mad6cTJdrl8BUhgfdNVZEZomrrmoR/p6Xt+A860fATwmbhD9bJOQS+S4Rdp9/2B4 + eKlhP6UZRjN/Mt0WgnaQK9lCuegLWFQDBY3hYTK0UhJOzCovZjH9+LIS0zddbt9VMp605gy9q36Z + +VYnj7pqq4OxciT6JQ54Y/Aut3/W/6C6iPft2XWhX9wTtl6Fm5BXm8KAnKQ6Od6zb9uJixVW90zq + mDPlm2S0gh5UiJcbFsRpzKdpqWL1JSodHoZm4j1b9A91o4LDHm9ybSksF0d43k4bcp+yindusKLI + P1YB0+fx7My9CcS4RMRrvz7njtkpqPArh+DmdDOnS/VM0WN4r5m/bgaTf3BoaX2zuWExY2dzEnI1 + h9Pic2KbjJ29SX+vBM0n94I4eV8kfb7wFHgvxh378X2PvTH94dWMTyuTyW9ZAIo+OV6N6redmqG0 + EVvVB0KWkcwHS7oC0m1FwEhCuJUFDA0CiSZ4afsHs/vhJyVXE8tYWYfSimxj+ICrE3LEjin/7u+9 + HzYe01Xv8X1S2LAynzcKKebtRB66BMaFbSg/b/Ymm/EfJe3hROwqDU2mRBpe7fBLxSvl6/3RUyAN + T+cPf0vJST2jmY+Zva0tc3hnESC1oDuq7OKl2Vd3GUMXJgu2uYQ7ky7yRYnE0+1LgtM5Tj5ttqew + xnLNtt6eI66xRoWL31jM0kQ7mVQ1xDCowURlwbMT+dMqe7hR2aYvWhZoSCjO1PvR3hEcFbonGYK1 + AH3d58SxpTZkD+O2gMXWL3/43fapfloAkx/7mS/dhF2bEcOsD9nNXSft6LRZo23Kq0XuZrk2pdvD + V9BBoTvmXxTHHGtdzkB9Ky6GT/81OdN9A/pTf2Hrm702aSllJUh41xMrMFvEhzjFgBFRyWaRGmhi + 0VqFX/2706sy+ft57xA+3iViuqbqDT89+7X3EsGz/h12DziDcqdvWijLJ6f1upqAkWWJl69QQH2K + 9pGGfSkmjmikiAfJpgLho+9YPL1sc/KyIdLyQ2OwgAp3czzkUKtG1Ats565R24W8z1RZ3uZk1qec + B0o+we5+fLFgF+TelCh3+/e+FLldFA7iYsTa092esOQngzeeDAZoutwPDIM+eNOq5ramaqxivn8b + i6nolQztPDuj8OPDqK8pDHl8xiLv14nEllEHStNHbE3Lgg/S/nCF5aexKA+vVjFCWu7RXB9ke2Tf + oqduXmpO/WVU/qYW4hWoCjoOzYKOVR6g6XJ9CQiWF4f4EdP4GFNt+NUXrt8H2aMyzlXYmSwg7kUz + 2p8+VvPv88Hs82f0Jns8PWAtRRGz3Obcsvchs9G8n5i3P50Stu6sLeDjU6K587TD7iR4OqoXKCd+ + EwvtRCpFgR/fbe+P9re+FKTOsSjM+oUJ3UFSYSx2xNIfYjuZhxyD4mUl01/XgznzcQNqvBWIJ2Az + /P2O/FCsCDb958yXbqnScLCZs312Cdc3kor4bmiIP66akF/ytatlOc6JX66X7bgLaf7DF3pByQGN + 1++xg+z89HE58iGcDliaYPmpLaa/0SHpnx+9g3Pbp4RIi7ad9aWi5aFk4iF6lu0An8nW0p3fsV/9 + jmc7fiB6KDfMlN81H6wrH+BZv0SGT9oZsXCqjrA/H65Y+N4tUzCdsIHn7bJhO2NRoK8V9Atgfu4y + 67jzw17bfzDQ98KhXf6pi9eNZIb2yauOEOGgJ4JZKhJgsXoS91AG7XSO5AjG+zn55QV8WLt1rbru + tcalLTctj1dWBJ6TMSxeb18+WfiEwYdPw5zyXCajvPFKNNcb8RLB5aOVhzY4/mNPJ8XqPTbq3xwg + fE/ErEMn6X56zZkCi+2WRuZxS70coe1bl1lhqvP+KUgR2DRf4aftH7wRy/4RDZ50JMGhwOH0coc9 + zH6GFju/KLhE4i1Y0qOf13+c/cKo/PASa8aha0dxey0BPRqM5bJR227jLzrYbZUnlhTHNen3AwYa + 7FYnJA2icKybwwBFD8qvPkIuZumgktTNmTNOB69/dcUEr8vyRHT/TovvrL9U5/ZYEt/JLCQhD10h + PQ5rEv/8eVTEW4D74sx8zeu8qXWFMwjJkeOBXxbetBHyB4xHTWbWofK9sRRXZ/TzM/aHOcUAH9UG + vU9lrJRaxsdfHtJMbMMs+bvxaPrIr0g/OyWz62Pt0R9/zX533m9iwrVA32pz/kOHXWAgoTHVCah6 + 3lH5FVM0deOuRHJWPpn57R/mcCn0q5ZZoUd2yrf1OiPVJ3hx2pKAq1oxblrTheB0YmT7PlzMge3Q + +ee/sZxGOOGRhR8w5ynM7Rdv3j8f+wY+qVETXbg/+FTzlY0OBCyW2DvBG/GN2Wjej3TqJi+UZr0A + wijJbBf073CclgbA/VQU9JentHHCtuhWGjdmH15Dy81iVan7D9aZkQYnr5StNILiHflszg+8Yb9/ + H5FUrSZGivsTDUa4p4hegoz98pUhkvc6xLfRZ2czN5PJ30o+0rZq+ke/98jzXTjxqSbePF/uf9wG + wuOa0ekS1UnTlMxYbXTDIORxWYa/74+qlGbEQw/MqU5fqha6/pGYh5yh4apdK6QwpWLnh6aF03lK + M6jP3oPtiJN6Ux5m5z/7ybfMTTjtxgl+zyMVfideb6SDASTvKnLYRxLqBWm4auBNp59eCtnX1nNg + maWx3WPjtHxdRJaq+KPGdhZKTN56baWW2N4zz1jVyR+/O+drBOtPwRyuF++MZv1HZj1m0tn/o2ZT + LoivTmc++5cY0cA32HaRmIV494cMpOJxZYZxe/OfPljtz+H1D58OperUq1Y0HsxaVWk4xHgY4FxZ + a5LwbV5M+xhJSHraJ7azR8bH45PVaBnuDdrOfDHeJndCUfhSmGGAbcoZJxKU9YYS57JYc+k+GJmG + VLEnbpQVHg+SoIR0dbaIJ25uqBduCYa1kp3YmiqqOeXpNYZQ5Zx4UJJicq/6XnuScWDnZhmGTTeu + S0itdUyp3JhFPShD/cefWupq4N9N67molKQTXdCyMCdjq25V6fgayCZyl3xa32BYzXkDFY9B3bJT + k3QIbWwVPx+XZzImfqhDHgomHo5S0fLFafDR+01EvErJEU276DTB7D+ZmTdVSNPPMUUnPwQqaGIV + jqOnWwi84TTzd4lm/Smgnx7DylAmza2CBVx2Nad8etZhc0F+CXL/CahqvvVkADdVoWNQMGOlvlBf + OYdcrSq2IIbLJo8txQDQzrMyOqmjEtLIfGJI2vCERfrJ0LD6pNUPb7Hs3Lxi/OU9j6I8stCyTXPc + V+sGvp/28wePR398+QAfTcCLN7kWw2VV2aB+TyXZpshFP/6FwrmKs/6XTP6r35N/AGZadmG23ahI + 6EZFm2bu59ZOPtL/5Lu4TsTGaz4WmmBYJtasf0wkf9phDwdVcdn+sUz4hDwUr7YSfbA5X/Ck+XvC + 1puuZNNHXTI2QWyjk6cX5GaKccLybZwjQz9NeJrzMPUcLSOY8YbY+fdj9tV+V8KKYUq2p+Up5PZk + UQiX2ob5QyXysU96/4cnP7+IeG/edHXDS+EPvrYNHiqYXkZFtt4+5EN7ihXIK8MiplkYrSxfXR+G + q7gm61N8SfhGPFA4TtKSCh+hMMflssuRtHmtGaZl4fHi4FY/viVWtl0V0/K+MtBRazs8rGnvtWTY + Y4DwMzHTX5Ji6GMNYPdqSmJ1Pfnjb8F14/qnr/hYN7cBZv1HF20dFNOqRjaSDvsLuSW7ozdMmZSp + b8+/MEcTPuYvf4doeQjwoOzqYnpJngWOn+6ZIRxeHteC7RZumFFi3tbv4nub3AHN+Qlztb3TDsZX + itHmbgxso+1wyGvtGqveuX8yf7nOeDPsQQVciQrbGKji32NbUnDT6svcm2kmLe7eHcz4y3amqIaD + GrxU6M43guUqI8m4fbYlisJCweN4YOFYlAcM9eroUJ4exoTepsnQNEkviLUpJY+yhpWwxmJN3Ju6 + Ql14UwVIjOLAtv5ZaEdOt5GS7nBHtehpIumGGgvNeS+JZ74b/GG1QMGKjdh6Glf+yw8BvfGRLmc/ + 2q9cWoN0YQaxb3qVTPPz0CXzZYLnPGQoiy+AHJkrgs+9Z4rCLfShkZULVYWHFvbOQDHo80yFC+mQ + vPxczwD22ySmbNTFuKdjB/V6TEkadLY5ZvYjA7jDmZinU8L7U7bZ/vIcdrucbXNizadEkZBmZK0M + G3MULGqhGxJF9vNf7+s37kCNtJwu5jyIo1Pcodk/sY0aXPn3l9ekq8hiIdnF7ZDeTQUkWYiZ03X7 + lmvHDdWW02PCe8vJk3Hsc+EPHox8obacRqMLiy0uCTmvuTeJwrEBub359PHwdS4WtmBov/6CQcRj + 8TX36kL96enF5h0V/Lz/GjCa3JnzrzMabSiOEL7d66z34mIKqiRGw1seqDAVZdHoB5+iZNle6DTz + CSefevvLn+f+k4EmJfN02CePZq4XklDk8RguGnnP/atb8sdfzHiCFcIjr3Py9RmqdrVn6VCJiN9L + oOjZM4f88pgpXyBXLfuDzjaNpbd8rlckk1aiy5kPvkPqdnBxHtPcz6nasezXexQ6ZM2M5nY2R3FZ + pyD5SvTL39rx6ok5XFaDRx7fh1FIeWMb6Gwsr1gLdGp2IX/nYGcdop2hla34UMwIrHtXM2PW4xNM + 2xp839ixTWg9zcmfrim6RPcX2T02n6KLL5n0618Qa+5fdGryzOB8OcVse726RacqQgfvYpESN1j3 + 4RCn1RWSLK2ZJ11p0pFlYMFRLyqc5Lrvydz2S2B7NaDyzcqLcd2rzQ9PmbVcjiFF32sFM96yzeps + hPJmsZ5++TxV3J63060CgDE+uWTjPD7mSN2mgvjy8dgvvxdyoazgbp/5f/k9nb4Udc1PJubye8vF + 5Dv4YFm7iHnLg8p/eh42zfrJgt2QJ5NcXA1IUbZh21EKwqkLyxjNeRPVrGbNpa0h1nDB0oaQw+PI + x/ah+pAK7sTW4T3zJi9Ce7Ukp+/8fQNPVjL1CMWKYqwutSMfjHQ7gN48KzLXF+8Eab1F2+XJZT9/ + SX/8fKZdzwz/Gno8HcZJ0ySjIKYqRgW/weEMefpgzMdF2g5Xzz5qanoWiXmuh5DZk9+huT+J5Tnf + 43mj7ZErRpTM+iHpnNvtiAJd63E989VEe6EC1EsL5qYSS4bzpsVgq0fCdmfzUPCkJ3skX9YuO+z1 + rOgzpRbgHd8cZm3KsylfmxVG83qTzRR3nG++aQMvUe3+9HO50N0k+IbhG6ufqSyGrLqliOS0IltN + KcJuBesOZn1I37vFo+0sab/Qon7jk91bCYuRtA4GQj4w8xPj3f2ABfRbz+/MP115KRW4JIJATGW7 + S3hvHnTNiQ495pZ2K9jpVJXoW8UYo7XJC77TLwqc3ajFE611T0qP7zO00zqY+ZdzbhxvKcz3p6K2 + MlrR80KAm3eMmOPasjnOeAOHYDLJr98z+7UUAQkYRXx5M9kznGwt/94fdIi4yofzITsi4f4QsJYe + CZpefWige9yJdPlsOBpzMZHAIajBYuQ+0bgLq3xFacWZY4StSdNjH8ExsL0/7zPi6JsjQ5bucx50 + aFkVX7cw62UMs974ujhbwN+/UwH/+a+//vpfvxMGVX1/vOeDAf1j7P/930cF/h3f438LgvTnGALt + 4uzx9z//dQLh729bV9/+f/d1+fh0f//zF/pz1ODvvu7j9/9z+V/zg/7zX/8HAAD//wMAiTuukd4g + AAA= + headers: + CF-Cache-Status: + - DYNAMIC + CF-RAY: + - 9017b864b936de9b-EWR + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Mon, 13 Jan 2025 19:16:22 GMT + Server: + - cloudflare + Set-Cookie: + - __cf_bm=s1ISh4iUNLQ2Ya1kD2H7qoQ8Iu5E35cNV7LByFeP6S4-1736795782-1.0.1.1-Yiw0sA3z8E8DIIhiXr_yjzMDz_ePLu1gwWVxoqFOBJDcDhE91jZRgy2Kp6FYdcBcS950vyAqL1y9LPmOMIGJxA; + path=/; expires=Mon, 13-Jan-25 19:46:22 GMT; domain=.api.openai.com; HttpOnly; + Secure; SameSite=None + - _cfuvid=beMBNvOtq78fFCvDki.8wgKumD_EQhiZMEDa05JaT7s-1736795782543-0.0.1.1-604800000; + path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None + Transfer-Encoding: + - chunked + X-Content-Type-Options: + - nosniff + access-control-allow-origin: + - '*' + access-control-expose-headers: + - X-Request-ID + alt-svc: + - h3=":443"; ma=86400 + openai-model: + - text-embedding-ada-002 + openai-organization: + - datadog-staging + openai-processing-ms: + - '80' + openai-version: + - '2020-10-01' + strict-transport-security: + - max-age=31536000; includeSubDomains; preload + via: + - envoy-router-678875496d-6fjmx + x-envoy-upstream-service-time: + - '59' + x-ratelimit-limit-requests: + - '10000' + x-ratelimit-limit-tokens: + - '10000000' + x-ratelimit-remaining-requests: + - '9999' + x-ratelimit-remaining-tokens: + - '9999993' + x-ratelimit-reset-requests: + - 6ms + x-ratelimit-reset-tokens: + - 0s + x-request-id: + - req_e589af7ff963668ef0b2722e99fcde70 + status: + code: 200 + message: OK +- request: + body: '{"input": [[3923, 374, 279, 6864, 315, 9822, 30], [3923, 374, 279, 6864, + 315, 9822, 30], [3923, 374, 279, 6864, 315, 9822, 30]], "model": "text-embedding-ada-002", + "encoding_format": "base64"}' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-length: + - '192' + content-type: + - application/json + cookie: + - __cf_bm=s1ISh4iUNLQ2Ya1kD2H7qoQ8Iu5E35cNV7LByFeP6S4-1736795782-1.0.1.1-Yiw0sA3z8E8DIIhiXr_yjzMDz_ePLu1gwWVxoqFOBJDcDhE91jZRgy2Kp6FYdcBcS950vyAqL1y9LPmOMIGJxA; + _cfuvid=beMBNvOtq78fFCvDki.8wgKumD_EQhiZMEDa05JaT7s-1736795782543-0.0.1.1-604800000 + host: + - api.openai.com + user-agent: + - OpenAI/Python 1.52.0 + x-stainless-arch: + - arm64 + x-stainless-async: + - 'false' + x-stainless-lang: + - python + x-stainless-os: + - MacOS + x-stainless-package-version: + - 1.52.0 + x-stainless-retry-count: + - '0' + x-stainless-runtime: + - CPython + x-stainless-runtime-version: + - 3.10.13 + method: POST + uri: https://api.openai.com/v1/embeddings + response: + body: + string: !!binary | + H4sIAAAAAAAAA+x6SxOyOpfuvH/Frj21q+QmWXwzBATlkqggao8EFQERuSRAuvq/d+HbdU6d6Rn1 + YE+sUiMGsvLcVv7z3/766+86KR5p//e//vr7nXf93/8+f3a/9be///XXf/zbX3/99dd//l7/n5GP + Knnc7/kn+w3/fZl/7o/x73/9JfyfT/7voPna3WdLdOMQJOPle6PqZDqEBdGzQJNZOwrwD10ziwyy + x8pAK1X7rQQsrMNPOFX7QQBJbK5sXV5yNHjl+QaBkl6oAPnIJ0lzKRBB+RBX6T8JV021Qv7+4uO6 + OctFfaelDtrqqWFB/r69iRy9Aa1Hz2Auv6TFdB/cHKJL75Kt+F2F/VpqKFzGXme4OL6SKbg9IpR5 + 9YPpuyBHvKetgLbjy5/nY3DxfpAsuD7rmG1xtmz7+0GyQVu0hNiULL0uHVcGiqTmRZfnyCm6omYZ + qMF2Q67+bUCjuioAWte6sNhTWDsdj98jNN/dgAETOxlWjzJDS6HmWHbts8ets5CDesi3BGc7xaPB + LY7BDtwt2aziPOQ41g2NGuIDLxf3ddiLse3D+MxTYl03LzTkrZABlNkND+lxaKdvaCuw4G6Dw3v5 + 5i05Ih0W2eLLXHQ7tjQ5qRFaiHGLpZ1XhlQUbrWan5yUbN5i3PJF8ZbgsC1Hgt32ZPIVMlIIq+tI + XPljt+PjlFXQ6NeOeEevMUeoXxGoiu1h6i2+RX0/2BK6l0eXmT3U3qAm51xtZPVMG+fShd39jRaI + FMslnob3OaTv5aVZLYWGE1dPZZMHbKKaq2Y6lfFp7cmKv9nC9G2PFKEHRmw3NRg5j/pADntdL6Q9 + ywBsdU+YV/tBKOfL/IYOZeQwi92QN8GkNxraWCpdcKcIB7r2jij9uCbzLuc27KgSbqEs64gESrc1 + h+K2UuAm5jkzl5cIDcl3C+CPk0Y22/KDuvTuKXCkDyDeKio4P18PNqrRY0ucelUWg++WFEZzdOhq + dE8Jr2BStFzvb8TuUc17cj+UIFvijhmX287ku7tmAVpVEdXk6pMM4ivSIXu3gKf+5BXDxpc61J/Y + mZDF/RX+5oMOZexQ6arbSacGXxVUra+wcNx1yXiQXwMY+nkiFl7phXxyBoCdZFTMPIplwpafSwRX + 5ZIwslJfZpcdugi8g2WRSGEbNAmbewTCsXHIphs7r4lIukebRfJiZLe1i+EtuhaykuxDdqZ4Cwvv + lOdLvHk6VCk1HUkLPRGQuL2pZP1Kg5CRIzLUyC52JHijoeW1UEfg+XTNjHO0TcYU7WNN9nGKl4nD + 26le0wm+4eFN7HVH24lenQcwbdMSz1htwwlFbQrN1xvoJBxeZv9aXxYg3FOBuO17XwxkGdi/+6cL + J9mF7FPoWJPJVyI6Douw267PLrja8kZ0uarNEULA0IhdTA7FMTeZba8UaKKR0qEtJ7MjzRNU6VgM + +JWuAm9QTsIDDsFgkv1RKgpunM8GcvZSSNxzQMPK8z0MRLZrth4FUvDP2dvCWQveJIieJue2Papq + oDwuxPjsmnCauOhCuTJiXGde3E6xs+tQppoLvFj0KGxXyHhAJlwKtn0fZLP+kGOs1aWyIbbiNCZ3 + 6JKuSk8hzNhtnZCl9ZoCVdwtcY9Oz6ltu426eEwf9nuew0Gacuhfac/untqHUyllFZwPusPuqTh4 + ox/5urou3ZA414+BJnPz3kJ3+eyJp6xJMtxpZyDmxAqzonxdSAO92YCHJKBLaeG14+ss1cj0/Sfx + d7s34mYtuyjoeoNYZ9Lxid/cB1o+bEac00XhY60vM2iuS53pm9fVm/bao4IoPz7Z2lZ2nGZOMKCP + MPUskCsnFMTGt9RwP8QkXSrn5LMo3sIPn6isbrXwI30tCZjmtISQzb0YwI1UgH5nM88jUStVjyqF + j3a9kg1316b0WOcUyb6fMp9IB5O3X33Q5vkQ3f2s2mFahFR9d+8tcwtSJsOmSVR4Ej5g3tk84f7H + aIAbyZmtn9Ul+WIaYRApaOygPyPeyxuzRGoaiWRzukfJgC+yAInVHyl6c8Xrz2e1Qp0VYKp9KRTV + KYMbBKUd42HxffLRaPoGvrD+4laTS296Ly81mvGeOMnYc/qSTBvsQ+6xHx5RUBUK+soP2No47Qq+ + mm6X1TXcjMR+2DbvuGNKKBb3I7PC5csb5vpGhap92E7dn2a80wENb3EgrvC4J/ywHi4o/WxNEuE4 + S0b9bVR/fk/ErYTYN5RcWFWlwZzvgLy+JKUFrprrxEzonvPM0QyY+Ys4+6MS8gEkgDS59cToT20x + bbOboJKrd2B49bmh6TnUBnCwTLJ3P9eifxQPX+Gtl1BxXR+T4Xn/XlbyNfCw8EqoOULa7UFVjz4t + XwscsmfabOEiKUcSnpHFR843KtBwsum8n5LJrImC0tdixHznm4V4e+nubzwL9wQlw7RIKGqcYcnm + +ueta7kXlB1CFU+fzm3HfrjFan3PR6xyy06mBq8jbd6fxLG6LBwtWcdA09tENs+vmLCAqR2IdKER + c3rWCSNHrgPJsjfzVO+WTOfLV4AUxgdddVaEpolrLuqRvp7XN+B860cAj4kbRI9tEnKJHLdIu+8f + DA8vNeynNMNo5k+m20LQDnIlWygXfQGLaqCgMTxMhlZKwolZ5dksph9fVmL6psvtu0rGk9bE0Lvq + l5lvdfKoq7Y6GCtHol/igDcG73L7Z/0Pqot438auC/3inrD1KtyEvNoUBuQk1cnxnn3bTlyssLpn + UsecKd8koxX0oMJtuWHBLb3xaVqqWH2JSoeHoZl4zxb9Q92o4LDHm1xaCsvFEZ7X04bcp6zinRus + KPKPVcD0eTyLuTeBeCsR8dqvz7ljdgoq/MohuDldzelcPVP0GN5r5q+bweQfHFpa32yuWMxYbE5C + ruZwWnxObJOx2Jv090rQfHIviJP3RdLnC0+B92LcsR/f99gb0x9ezfi0Mpn8lgWg6JPj1ah+26kZ + ShuxVX0gZBnJfLCkCyDdVgSMJIRbWcDQIJBogpe2fzC7H35ScjGxjJV1KK3I9gYfcHVCjtgx5d/1 + vffDxmO66j2+TwobVubzSiHFvJ3IQ5fAOLMN5fFmb7IZ/1HSHk7ErtLQZEqk4dUOv1S8Ur7eHz0F + 0vB0/vC3lJzUGM18zOxtbZnDO4sAqQXdUWV3W5p9dZcxdGGyYJtzuDPpIl+USDxdvyQ4xbfk02Z7 + Cmss12zr7TniGmtUOPuNxSxNtJNJVUMMgxpMVBY8O5E/rbKHK5Vt+qJlgYaE4ky9H+0dwVGhe5Ih + WAvQ131OHFtqQ/YwrgtYbP3yh99tn+qnBTD5sZ/50k3YpRkxzPqQXd110o5OmzXaprxY5G6Wa1O6 + PnwFHRS6Y/5Zccyx1uUM1LfiYvj0X5Mz3TegP/Vntr7aa5OWUlaChHc9sQKzRXy4pRgwIirZLFID + TSxaq/Crf3d6VSZ/P+8dwse7REzXVL3hp2e/9l4ieNa/w+4BMSh3+qaFsnxyWq+rCRhZlnj5CgXU + p2gfadiXbsQRjRTxINlUIHz0HbtNL9ucvGyItPzQGCygwt0cDznUqhH1Atu5a9R2Ie8zVZa3OZn1 + KeeBkk+wux9fLNgFuTclyt3+3S9FbheFg7gYsfZ0tycs+cngjSeDAZrO9wPDoA/etKq5rakaq5jv + X8diKnolQzvPzij8+DDqawpDfouxyPt1IrFl1IHS9BFb07Lgg7Q/XGD5aSzKw4tVjJCWezTXB9ke + 2bfoqZuXmlN/GZW/qYV4BaqCjkOzoGOVB2g6X14CguXZIX7END7eqDb86gvX74PsURnnKuxMFhD3 + rBntTx+r+ff5YHb8Gb3JHk8PWEtRxCy3iVv2PmQ2mvcT8/anU8LWnbUFfHxKNHeedtidBE9H9QLl + xG9uQjuRSlHgx3fb+6P9rS8FqXMsCrN+YUJ3kFQYix2x9IfYTuYhx6B4Wcn01+VgznzcgHrbCsQT + sBn+3iM/FCuCTf8586VbqjQcbOZsn13C9Y2kIr4bGuKPqybk53ztalmOc+KX62U77kKa//CFnlFy + QOPle+wgi58+Lkc+hNMBSxMsP7XF9Dc6JP3zo3cQt31KiLRo21lfKloeSiYeomfZDvCZbC3d+R37 + 1e8Y27cHoodyw0z5XfPBuvABnvVLZPikxYiFU3WEfXy4YOF7t0zBdMIGntfzhu2MRYG+VtAvgPm5 + y6zjzg97bf/BQN8Lh3b5py5eV5IZ2ievOkKEg54IZqlIgMXqSdxDGbRTHMkRjPc4+eUFfFi7da26 + 7qXGpS03Lb+trAg8J2NYvFy/fLLwCYMPn4Y5ZVwmo7zxSjTXG/ESweWjlYc2OP5jTyfF6j026t8c + IHxPxKxDJ+l+es2ZAovtlkbmcUs9H6HtW5dZYarz/ilIEdg0X+Gn7R+8Ecv+EQ2edCTBocDh9HKH + Pcx+hhY7vyi4RG5bsKRHP6//OPuFUfnhJdaMQ9eO4vZSAno0GMtlo7bdxl90sNsqTywpjmvS7wcM + NNitTkgaROFYN4cBih6UX32EXMzSQSWpmzNnnA5e/+qKCV7n5Yno/p0W31l/qc71sSS+k1lIQh66 + QHoc1uT28+dRcdsC3Bcx8zWv86bWFWIQkiPHAz8vvGkj5A8Yj5rMrEPle2MprmL08zP2hznFAB/V + Br1PZayUWsbHXx7STGzDLPm78Wj6yC9Ij52S2fWx9uiPv2a/O+83MeFaoG+1Of+hwy4wkNCY6gRU + jXdUft0omrpxVyI5K5/M/PYPczgX+kXLrNAjO+Xbep2R6hO8OG1JwFWtGDet6UJwOjGyfR/O5sB2 + KP75byynEU54ZOEHzHkKc/vFm/fPx76BT2rURBfuDz7VfGWjAwGLJfZO8EZ8ZTaa9yOduskLpVkv + gDBKMtsF/Tscp6UBcD8VBf3lKe0tYVt0LY0rsw+voeVmsarU/QfrzEiDk1fKVhpB8Y58NucH3rDf + v49IqlYTI8X9iQYj3FNEz0HGfvnKEMl7HW7X0WexmZvJ5G8lH2lbNf2j33vk+S6c+FQTb54v9z9u + A+Fxzeh0juqkaUpmrDa6YRDyOC/D3/NHVUoz4qEH5lSnL1ULXf9IzEPO0HDRLhVSmFKx+KFp4RRP + aQZ17D3YjjipN+VhFv/ZT75lbsJpN07w+z9S4Xfi9UY6GEDyriKHfSShXpCGiwbedPrppZB9bT0H + llka2z02TsvXRWSpij9qbGehxOSt11Zqie0984xVnfzxu3O+RrD+FMzhcvZiNOs/Musxk87+HzWb + ckF8dYr57F9uiAa+wbaLxCzEuz9kIBWPCzOM65v/9MFqH4eXP3w6lKpTr1rReDBrVaXhcMPDAHFl + rUnCt3kx7W9IQtLTPrGdPTI+Hp+sRstwb9B25ovxOrkTisKXwgwDbFPOOJGgrDeUOOfFmkv3wcg0 + pIo9caOs8HiQBCWkq9ginri5ol64JhjWSnZia6qo5pSnlxuEKufEg5IUk3vR99qTjAOLm2UYNt24 + LiG11jdK5cYs6kEZ6j/+1FJXA/9uWs9FpSSd6IKWhTkZW3WrSsfXQDaRu+TT+grDas4bqHgM6pad + mqRDaGOr+Pk4P5Mx8UMd8lAw8XCUipYvToOP3m8i4lVKjmjaRacJZv/JzLypQpp+jik6+SFQQROr + cBw93ULgDaeZv0s0608B/fQYVoYyaa4VLOC8qznl07MOmzPyS5D7T0BV860nA7ipCh2Dghkr9YX6 + yjnkalWxBTFcNnlsKQaAdp6V0UkdlZBG5hND0oYnLNJPhobVJ61+eItl5+oV4y/veRTlkYWWbZrj + vlo38P20nz94PPrjywf4aAJevMmlGM6rygb1eyrJNkUu+vEvFM5FnPW/ZPJf/Z78AzDTsguz7UZF + Qlcq2jRzP9d28pH+J9/FdSI2XvOx0ATDMrFm/WMi+dMOezioisv2j2XCJ+Sh22or0Qeb8wVPmp8n + bL3pQjZ91CVjE9xsdPL0glxN8ZawfHvLkaGfJjzNeZgaR8sIZrwhdv79mH2135WwYpiS7Wl5Crk9 + WRTCpbZh/lCJfOyT3v/hyc8vIt6bV13d8FL4g69tg4cKppdRka23D/nQnm4K5JVhEdMsjFaWL64P + w0Vck/Xpdk74RjxQOE7SkgofoTDH5bLLkbR5rRmmZeHx4uBWP74lVrZdFdPyvjLQUWs7PKxp77Vk + 2GOA8DMx01+SYuhvGsDu1ZTE6nryx9+C697qn77iY91cB5j1H120dVBMqxrZSDrsz+Sa7I7eMGVS + pr49/8wcTfiYv/wdouUhwIOyq4vpJXkWOH66Z4ZweHlcC7ZbuGJGiXldv4vvdXIHNOcnzNX2TjsY + X+mGNndjYBtth0Nea5eb6sX9k/nLdcabYQ8q4EpU2MZAFf8e25KCm1Zf5l5NM2lx9+5gxl+2M0U1 + HNTgpUIXXwmWq4wk4/bZligKCwWP44GFY1EeMNSro0N5ehgTep0mQ9MkvSDWppQ8yhpWwhqLNXGv + 6gp14VUVIDGKA9v6sdCOnG4jJd3hjmrR00TSFTUWmvNecpv5bvCH1QIFKzZi62lc+C8/BPTGR7qc + /Wi/cmkN0pkZxL7qVTLN/4fOmS8TPOchQ1l8AeTIXBEc954pCtfQh0ZWzlQVHlrYOwPFoM8zFc6k + Q/Lyc4kB7LdJTNmoi3FPxw7q9ZiSNOhsc8zsRwZwh5iYp1PC+1O22f7yHHY9x7Y5seZTokhIM7JW + ho05Cha10BWJIvv5r/fle+tAjbScLuY8iKPTrUOzf2IbNbjw7y+vSVeRxUKyu7VDejcVkGThxpyu + 27dcO26otpweE95bTp6MY58Lf/Bg5Au15TQaXVhscUlIvObeJArHBuT26tPHw9e5WNiCof36CwYR + j8XX3KsL9aenF5t3VPB4/zVgNLkz518xGm0ojhC+3cus927FFFTJDQ1veaDCVJRFox98ipJle6bT + zCecfOrtL3+e+08GmpTM02GfPJq5XkhCkcdvcNbIe+5fXZM//mLGE6wQHnmdk69jqNrVnqVDJSJ+ + L4GiZ88c8stjpnyBXLXsDzrbNJbe8rlekUxaiS5nPvgOqdvB2XlMcz+naseyX+9R6JA1M5prbI7i + sk5B8pXol7+148UTczivBo88vg+jkPLGNlBsLC9YC3RqdiF/52BnHaKdoZWt+FDMCKx7VzNj1uMT + TNsafN/YsU1oPc3Jny4pOkf3F9k9Np+iu50z6de/INbcv+jU5JlBfD7d2PZycYtOVYQO3sUiJW6w + 7sPhllYXSLK0Zp50oUlHloEFR72ocJLrvidz2y+B7dWAylcrL8Z1rzY/PGXWcjmGFH0vFcx4yzar + 2AjlzWI9/fJ5qrg9b6drBQDj7eSSjfP4mCN1mwpu54/Hfvm9kAtlBXc75v/j93T6UtQ1P5mYy+8t + F5Pv4INl7SLmLQ8q/+l52DTrJwt2Q55McnExIEXZhm1HKQinLixvaM6bqGY1ay5tDbGGM5Y2hBwe + Rz62D9WHVHAntg7vmTd5EdqrJTl95+cbeLKSqUcoVhRjdakd+WCk2wH05lmRub54J0jrLdouTy77 + +Uv64+eYdj0z/Evo8XQYJ02TjIKYqhgV/AqHGPL0wZiPi7QdLp591NQ0FokZ10PI7Mnv0NyfxPKc + 7/G80fbIFSNKZv2QdM71ekSBrvW4nvlqor1QAeqlBXNTiSVDvGkx2OqRsF1sHgqe9GSP5PPaZYe9 + nhV9ptQCvG9Xh1mbMjblS7PCaF5vspluHeebb9rAS1S7P/1cLnRXCb5h+MbqZyqLIauuKSI5rchW + U4qwW8G6g1kf0vdu8Wg7S9ovtKjf+GT3VsJiJK2DgZAPzPzEeHc/YAH91vM7809XnksFzokgEFPZ + 7hLemwddc6JDj7mlXQt2OlUl+lY3jNHa5AXf6WcFYjdq8URr3ZPS4zuGdloHM/9yzo3jNYX5+lTU + VkYrel4IcPWOEXNcWzbHGW/gEEwm+fV7Zr+WIiABo4gvryZ7hpOt5d/7gw4RV/kQH7IjEu4PAWvp + kaDp1YcGut86kS6fDUdjLiYSOAQ1WIzcJxp3YZWvKK04c4ywNWl67CM4Brb3535GHH1zZMjSfc6D + Di2rbpctzHoZw6w3vi7OFvD371TAf/37/8eJAvGfEwX/nCj450TBPycK/jlR8L/mRMF/AwAA///s + 3TEKwkAABMA+rwjXB0JK/yJykMPCxAvmBBv/LqdBfEKQabfZDwy7RAFRQBQQBUQBUUAUEAVEAVFA + FBAFRAFRQBQQBUQBUUAUEAVEAVFAFBAFRAFRsB9RMBAFRAFRQBQQBUQBUUAUEAVEAVFAFBAFRAFR + QBQQBUQBUUAUEAVEAVFAFBAFRAFRQBQQBUQBUfDnoqBp2+P7BWHOY5oqDCjpUbovFejiGLu+Hz5X + Cfc1nlM4bAIhLLc8L+VU8iVd10oNtvWCUHKJ02/e1Kpn8wIAAP//AwArUdmvhGEAAA== + headers: + CF-Cache-Status: + - DYNAMIC + CF-RAY: + - 9017b86929d9de9b-EWR + Connection: + - keep-alive + Content-Encoding: + - gzip + Content-Type: + - application/json + Date: + - Mon, 13 Jan 2025 19:16:22 GMT + Server: + - cloudflare + Transfer-Encoding: + - chunked + X-Content-Type-Options: + - nosniff + access-control-allow-origin: + - '*' + access-control-expose-headers: + - X-Request-ID + alt-svc: + - h3=":443"; ma=86400 + openai-model: + - text-embedding-ada-002 + openai-organization: + - datadog-staging + openai-processing-ms: + - '56' + openai-version: + - '2020-10-01' + strict-transport-security: + - max-age=31536000; includeSubDomains; preload + via: + - envoy-router-867d7fff98-jp2xz + x-envoy-upstream-service-time: + - '34' + x-ratelimit-limit-requests: + - '10000' + x-ratelimit-limit-tokens: + - '10000000' + x-ratelimit-remaining-requests: + - '9999' + x-ratelimit-remaining-tokens: + - '9999979' + x-ratelimit-reset-requests: + - 6ms + x-ratelimit-reset-tokens: + - 0s + x-request-id: + - req_0664830d8f95882874d90777108542e2 + status: + code: 200 + message: OK +version: 1 diff --git a/tests/llmobs/test_llmobs_ragas_evaluators.py b/tests/llmobs/test_llmobs_ragas_evaluators.py index 7fbfad974bd..cc02709baff 100644 --- a/tests/llmobs/test_llmobs_ragas_evaluators.py +++ b/tests/llmobs/test_llmobs_ragas_evaluators.py @@ -3,10 +3,12 @@ import mock import pytest +from ddtrace.llmobs._evaluators.ragas.answer_relevancy import RagasAnswerRelevancyEvaluator from ddtrace.llmobs._evaluators.ragas.context_precision import RagasContextPrecisionEvaluator from ddtrace.llmobs._evaluators.ragas.faithfulness import RagasFaithfulnessEvaluator from ddtrace.span import Span from tests.llmobs._utils import _expected_llmobs_llm_span_event +from tests.llmobs._utils import _expected_ragas_answer_relevancy_spans from tests.llmobs._utils import _expected_ragas_context_precision_spans from tests.llmobs._utils import _expected_ragas_faithfulness_spans from tests.llmobs._utils import _llm_span_with_expected_ragas_inputs_in_messages @@ -17,6 +19,10 @@ pytest.importorskip("ragas", reason="Tests require ragas to be available on user env") +ragas_answer_relevancy_cassette = logs_vcr.use_cassette( + "tests.llmobs.test_llmobs_ragas_evaluators.answer_relevancy_inference.yaml" +) + ragas_context_precision_single_context_cassette = logs_vcr.use_cassette( "tests.llmobs.test_llmobs_ragas_evaluators.test_ragas_context_precision_single_context.yaml" ) @@ -431,3 +437,186 @@ def test_ragas_context_precision_emits_traces(ragas, llmobs, llmobs_events): for child_span in ragas_spans[1:]: assert child_span["trace_id"] == root_span_trace_id assert child_span["parent_id"] == root_span_id + + +def test_ragas_answer_relevancy_init(ragas, llmobs): + rar_evaluator = RagasAnswerRelevancyEvaluator(llmobs) + assert rar_evaluator.llmobs_service == llmobs + assert rar_evaluator.ragas_answer_relevancy_instance == ragas.metrics.answer_relevancy + assert rar_evaluator.ragas_answer_relevancy_instance.llm == ragas.llms.llm_factory() + assert ( + rar_evaluator.ragas_answer_relevancy_instance.embeddings.embeddings + == ragas.embeddings.embedding_factory().embeddings + ) + assert ( + rar_evaluator.ragas_answer_relevancy_instance.embeddings.run_config + == ragas.embeddings.embedding_factory().run_config + ) + + +def test_ragas_answer_relevancy_throws_if_dependencies_not_present(llmobs, mock_ragas_dependencies_not_present, ragas): + with pytest.raises(NotImplementedError, match="Failed to load dependencies for `ragas_answer_relevancy` evaluator"): + RagasAnswerRelevancyEvaluator(llmobs) + + +def test_ragas_answer_relevancy_returns_none_if_inputs_extraction_fails(ragas, mock_llmobs_submit_evaluation, llmobs): + rar_evaluator = RagasAnswerRelevancyEvaluator(llmobs) + failure_msg, _ = rar_evaluator.evaluate(_llm_span_without_io()) + assert failure_msg == "fail_extract_answer_relevancy_inputs" + assert rar_evaluator.llmobs_service.submit_evaluation.call_count == 0 + + +def test_ragas_answer_relevancy_has_modified_answer_relevancy_instance( + ragas, mock_llmobs_submit_evaluation, reset_ragas_answer_relevancy_llm, llmobs +): + """Answer relevancy instance used in ragas evaluator should match the global ragas context precision instance""" + from ragas.llms import BaseRagasLLM + from ragas.metrics import answer_relevancy + + class FirstDummyLLM(BaseRagasLLM): + def __init__(self): + super().__init__() + + def generate_text(self) -> str: + return "dummy llm" + + def agenerate_text(self) -> str: + return "dummy llm" + + answer_relevancy.llm = FirstDummyLLM() + + rar_evaluator = RagasAnswerRelevancyEvaluator(llmobs) + + assert rar_evaluator.ragas_answer_relevancy_instance.llm.generate_text() == "dummy llm" + + class SecondDummyLLM(BaseRagasLLM): + def __init__(self): + super().__init__() + + def generate_text(self) -> str: + return "second dummy llm" + + def agenerate_text(self) -> str: + return "second dummy llm" + + answer_relevancy.llm = SecondDummyLLM() + + rar_evaluator = RagasAnswerRelevancyEvaluator(llmobs) + + assert rar_evaluator.ragas_answer_relevancy_instance.llm.generate_text() == "second dummy llm" + + +def test_ragas_answer_relevancy_submits_evaluation( + ragas, llmobs, mock_llmobs_submit_evaluation, mock_ragas_answer_relevancy_calculate_similarity +): + """Test that evaluation is submitted for a valid llm span where question is in the prompt variables""" + rar_evaluator = RagasAnswerRelevancyEvaluator(llmobs) + llm_span = _llm_span_with_expected_ragas_inputs_in_prompt() + with ragas_answer_relevancy_cassette: + rar_evaluator.run_and_submit_evaluation(llm_span) + rar_evaluator.llmobs_service.submit_evaluation.assert_has_calls( + [ + mock.call( + span_context={ + "span_id": llm_span.get("span_id"), + "trace_id": llm_span.get("trace_id"), + }, + label=RagasAnswerRelevancyEvaluator.LABEL, + metric_type=RagasAnswerRelevancyEvaluator.METRIC_TYPE, + value=mock.ANY, + metadata={ + "_dd.evaluation_span": {"span_id": mock.ANY, "trace_id": mock.ANY}, + }, + ) + ] + ) + + +def test_ragas_answer_relevancy_submits_evaluation_on_span_with_question_in_messages( + ragas, llmobs, mock_llmobs_submit_evaluation, mock_ragas_answer_relevancy_calculate_similarity +): + """Test that evaluation is submitted for a valid llm span where the last message content is the question""" + rar_evaluator = RagasAnswerRelevancyEvaluator(llmobs) + llm_span = _llm_span_with_expected_ragas_inputs_in_messages() + with ragas_answer_relevancy_cassette: + rar_evaluator.run_and_submit_evaluation(llm_span) + rar_evaluator.llmobs_service.submit_evaluation.assert_has_calls( + [ + mock.call( + span_context={ + "span_id": llm_span.get("span_id"), + "trace_id": llm_span.get("trace_id"), + }, + label=RagasAnswerRelevancyEvaluator.LABEL, + metric_type=RagasAnswerRelevancyEvaluator.METRIC_TYPE, + value=mock.ANY, + metadata={ + "_dd.evaluation_span": {"span_id": mock.ANY, "trace_id": mock.ANY}, + }, + ) + ] + ) + + +def test_ragas_answer_relevancy_submits_evaluation_on_span_with_custom_keys( + ragas, llmobs, mock_llmobs_submit_evaluation, mock_ragas_answer_relevancy_calculate_similarity +): + """Test that evaluation is submitted for a valid llm span where the last message content is the question""" + rar_evaluator = RagasAnswerRelevancyEvaluator(llmobs) + llm_span = _expected_llmobs_llm_span_event( + Span("dummy"), + prompt={ + "variables": { + "user_input": "Is france part of europe?", + "context_2": "irrelevant", + "context_3": "France is part of europe", + }, + "_dd_context_variable_keys": ["context_2", "context_3"], + "_dd_query_variable_keys": ["user_input"], + }, + output_messages=[{"content": "France is indeed part of europe"}], + ) + with ragas_answer_relevancy_cassette: + rar_evaluator.run_and_submit_evaluation(llm_span) + rar_evaluator.llmobs_service.submit_evaluation.assert_has_calls( + [ + mock.call( + span_context={ + "span_id": llm_span.get("span_id"), + "trace_id": llm_span.get("trace_id"), + }, + label=RagasAnswerRelevancyEvaluator.LABEL, + metric_type=RagasAnswerRelevancyEvaluator.METRIC_TYPE, + value=mock.ANY, + metadata={ + "_dd.evaluation_span": {"span_id": mock.ANY, "trace_id": mock.ANY}, + }, + ) + ] + ) + + +def test_ragas_answer_relevancy_emits_traces( + ragas, llmobs, llmobs_events, mock_ragas_answer_relevancy_calculate_similarity +): + rar_evaluator = RagasAnswerRelevancyEvaluator(llmobs) + with ragas_answer_relevancy_cassette: + rar_evaluator.evaluate(_llm_span_with_expected_ragas_inputs_in_prompt()) + + ragas_spans = [event for event in llmobs_events if event["name"].startswith("dd-ragas.")] + ragas_spans = sorted(ragas_spans, key=lambda d: d["start_ns"]) + + assert len(ragas_spans) == 3 + # check name, io, span kinds match + assert ragas_spans == _expected_ragas_answer_relevancy_spans() + + # verify the trace structure + root_span = ragas_spans[0] + root_span_id = root_span["span_id"] + assert root_span["parent_id"] == "undefined" + assert root_span["meta"] is not None + + root_span_trace_id = root_span["trace_id"] + for child_span in ragas_spans[1:]: + assert child_span["trace_id"] == root_span_trace_id + assert child_span["parent_id"] == root_span_id From 87a74ae67047d3dd89b5d5ffa5b151ce621bdc48 Mon Sep 17 00:00:00 2001 From: Munir Abdinur Date: Wed, 15 Jan 2025 18:00:49 -0500 Subject: [PATCH 10/16] chore(tracer): deprecate multiple initializations of ddtrace.Tracer [3.0] (#11823) - Logs a deprecation warning if the Tracer is initialized more than once. The global tracer. - Ensures that multiple instances of the DummyTracer and CIVisibility Tracer can be created (maintain the current behavior). This will help reset tracer configurations between tests. - Update some tests that require multiple tracer instances. This will make the 3.0 upgrade easier to do. ## Checklist - [x] PR author has checked that all the criteria below are met - The PR description includes an overview of the change - The PR description articulates the motivation for the change - The change includes tests OR the PR description describes a testing strategy - The PR description notes risks associated with the change, if any - Newly-added code is easy to change - The change follows the [library release note guidelines](https://ddtrace.readthedocs.io/en/stable/releasenotes.html) - The change includes or references documentation updates if necessary - Backport labels are set (if [applicable](https://ddtrace.readthedocs.io/en/latest/contributing.html#backporting)) ## Reviewer Checklist - [x] Reviewer has checked that all the criteria below are met - Title is accurate - All changes are related to the pull request's stated goal - Avoids breaking [API](https://ddtrace.readthedocs.io/en/stable/versioning.html#interfaces) changes - Testing strategy adequately addresses listed risks - Newly-added code is easy to change - Release note makes sense to a user of the library - If necessary, author has acknowledged and discussed the performance implications of this PR as reported in the benchmarks PR comment - Backport labels are set in a manner that is consistent with the [release branch maintenance policy](https://ddtrace.readthedocs.io/en/latest/contributing.html#backporting) --- ddtrace/_trace/tracer.py | 19 ++++- ddtrace/contrib/grpc/__init__.py | 8 +-- ddtrace/contrib/vertica/__init__.py | 3 +- ddtrace/internal/ci_visibility/recorder.py | 8 ++- ddtrace/opentracer/tracer.py | 14 +++- ...ple-tracer-instances-078b920081ba4a36.yaml | 4 ++ tests/integration/test_context_snapshots.py | 3 +- tests/integration/test_debug.py | 34 ++++++--- tests/integration/test_encoding.py | 8 +-- tests/integration/test_integration.py | 37 +++++++--- .../test_integration_civisibility.py | 15 ++-- .../integration/test_integration_snapshots.py | 72 +++++++++++-------- tests/integration/test_priority_sampling.py | 3 +- tests/integration/test_propagation.py | 46 ++---------- tests/integration/test_sampling.py | 23 ++++-- tests/integration/test_settings.py | 2 +- tests/integration/test_trace_stats.py | 19 ++--- tests/integration/test_tracemethods.py | 2 +- tests/integration/utils.py | 2 +- tests/profiling/test_profiler.py | 43 +++++++++-- ...ace_with_wrong_metrics_types_not_sent.json | 25 ------- ...tracetagsprocessor_only_adds_new_tags.json | 2 +- ...propagation.test_trace_tags_multispan.json | 70 ++++++++++++++++++ tests/tracer/test_tracer.py | 24 +++++++ tests/utils.py | 9 +-- 25 files changed, 323 insertions(+), 172 deletions(-) create mode 100644 releasenotes/notes/deprecate-multiple-tracer-instances-078b920081ba4a36.yaml delete mode 100644 tests/snapshots/tests.integration.test_integration_snapshots.test_trace_with_wrong_metrics_types_not_sent.json create mode 100644 tests/snapshots/tests.integration.test_propagation.test_trace_tags_multispan.json diff --git a/ddtrace/_trace/tracer.py b/ddtrace/_trace/tracer.py index 46c00b0c515..83d2d580c2d 100644 --- a/ddtrace/_trace/tracer.py +++ b/ddtrace/_trace/tracer.py @@ -195,6 +195,7 @@ class Tracer(object): """ SHUTDOWN_TIMEOUT = 5 + _instance = None def __init__( self, @@ -209,7 +210,23 @@ def __init__( :param url: The Datadog agent URL. :param dogstatsd_url: The DogStatsD URL. """ - + # Do not set self._instance if this is a subclass of Tracer. Here we only want + # to reference the global instance. + if type(self) is Tracer: + if Tracer._instance is None: + Tracer._instance = self + else: + # ddtrace library does not support context propagation for multiple tracers. + # All instances of ddtrace ContextProviders share the same ContextVars. This means that + # if you create multiple instances of Tracer, spans will be shared between them creating a + # broken experience. + # TODO(mabdinur): Convert this warning to an ValueError in 3.0.0 + deprecate( + "Support for multiple Tracer instances is deprecated", + ". Use ddtrace.tracer instead.", + category=DDTraceDeprecationWarning, + removal_version="3.0.0", + ) self._filters: List[TraceFilter] = [] # globally set tags diff --git a/ddtrace/contrib/grpc/__init__.py b/ddtrace/contrib/grpc/__init__.py index c746edb17cc..c95633b4024 100644 --- a/ddtrace/contrib/grpc/__init__.py +++ b/ddtrace/contrib/grpc/__init__.py @@ -46,13 +46,12 @@ import grpc from ddtrace import patch - from ddtrace.trace import Pin, Tracer + from ddtrace.trace import Pin patch(grpc=True) - custom_tracer = Tracer() # override the pin on the client - Pin.override(grpc.Channel, service='mygrpc', tracer=custom_tracer) + Pin.override(grpc.Channel, service='mygrpc') with grpc.insecure_channel('localhost:50051') as channel: # create stubs and send requests pass @@ -66,10 +65,9 @@ from ddtrace.trace import Pin, Tracer patch(grpc=True) - custom_tracer = Tracer() # override the pin on the server - Pin.override(grpc.Server, service='mygrpc', tracer=custom_tracer) + Pin.override(grpc.Server, service='mygrpc') server = grpc.server(logging_pool.pool(2)) server.add_insecure_port('localhost:50051') add_MyServicer_to_server(MyServicer(), server) diff --git a/ddtrace/contrib/vertica/__init__.py b/ddtrace/contrib/vertica/__init__.py index e4fc457ac13..4da8a844e83 100644 --- a/ddtrace/contrib/vertica/__init__.py +++ b/ddtrace/contrib/vertica/__init__.py @@ -33,11 +33,10 @@ import vertica_python - custom_tracer = Tracer() conn = vertica_python.connect(**YOUR_VERTICA_CONFIG) # override the service and tracer to be used - Pin.override(conn, service='myverticaservice', tracer=custom_tracer) + Pin.override(conn, service='myverticaservice') """ diff --git a/ddtrace/internal/ci_visibility/recorder.py b/ddtrace/internal/ci_visibility/recorder.py index 609475506d3..a69d1d72c6b 100644 --- a/ddtrace/internal/ci_visibility/recorder.py +++ b/ddtrace/internal/ci_visibility/recorder.py @@ -145,6 +145,12 @@ def _do_request(method, url, payload, headers, timeout=DEFAULT_TIMEOUT): return result +class CIVisibilityTracer(Tracer): + def __init__(self, *args, **kwargs): + # Allows for multiple instances of the civis tracer to be created without logging a warning + super(CIVisibilityTracer, self).__init__(*args, **kwargs) + + class CIVisibility(Service): _instance = None # type: Optional[CIVisibility] enabled = False @@ -166,7 +172,7 @@ def __init__(self, tracer=None, config=None, service=None): log.debug("Using _CI_DD_AGENT_URL for CI Visibility tracer: %s", env_agent_url) url = env_agent_url - self.tracer = Tracer(context_provider=CIContextProvider(), url=url) + self.tracer = CIVisibilityTracer(context_provider=CIContextProvider(), url=url) else: self.tracer = ddtrace.tracer diff --git a/ddtrace/opentracer/tracer.py b/ddtrace/opentracer/tracer.py index 489c025037a..110fad2401c 100644 --- a/ddtrace/opentracer/tracer.py +++ b/ddtrace/opentracer/tracer.py @@ -18,6 +18,7 @@ from ddtrace.internal.constants import SPAN_API_OPENTRACING from ddtrace.internal.utils.config import get_application_name from ddtrace.settings import ConfigException +from ddtrace.vendor.debtcollector import deprecate from ..internal.logger import get_logger from .propagation import HTTPPropagator @@ -70,8 +71,8 @@ def __init__( If ``None`` is provided, defaults to :class:`opentracing.scope_managers.ThreadLocalScopeManager`. :param dd_tracer: (optional) the Datadog tracer for this tracer to use. This - should only be passed if a custom Datadog tracer is being used. Defaults - to the global ``ddtrace.tracer`` tracer. + parameter is deprecated and will be removed in v3.0.0. The + to the global tracer (``ddtrace.tracer``) should always be used. """ # Merge the given config with the default into a new dict self._config = DEFAULT_CONFIG.copy() @@ -99,7 +100,14 @@ def __init__( self._scope_manager = scope_manager or ThreadLocalScopeManager() dd_context_provider = get_context_provider_for_scope_manager(self._scope_manager) - self._dd_tracer = dd_tracer or ddtrace.tracer or DatadogTracer() + if dd_tracer is not None: + deprecate( + "The ``dd_tracer`` parameter is deprecated", + message="The global tracer (``ddtrace.tracer``) will be used instead.", + removal_version="3.0.0", + ) + + self._dd_tracer = dd_tracer or ddtrace.tracer self._dd_tracer.set_tags(self._config.get(keys.GLOBAL_TAGS)) # type: ignore[arg-type] self._dd_tracer.configure( enabled=self._config.get(keys.ENABLED), diff --git a/releasenotes/notes/deprecate-multiple-tracer-instances-078b920081ba4a36.yaml b/releasenotes/notes/deprecate-multiple-tracer-instances-078b920081ba4a36.yaml new file mode 100644 index 00000000000..7b96d366269 --- /dev/null +++ b/releasenotes/notes/deprecate-multiple-tracer-instances-078b920081ba4a36.yaml @@ -0,0 +1,4 @@ +--- +deprecations: + - | + tracing: Deprecates the use of multiple tracer instances in the same process. The global tracer (``ddtrace.tracer``) `should be used instead. diff --git a/tests/integration/test_context_snapshots.py b/tests/integration/test_context_snapshots.py index 4ed44bd558e..612422064a0 100644 --- a/tests/integration/test_context_snapshots.py +++ b/tests/integration/test_context_snapshots.py @@ -1,9 +1,8 @@ import pytest +from tests.integration.utils import AGENT_VERSION from tests.utils import snapshot -from .test_integration import AGENT_VERSION - pytestmark = pytest.mark.skipif(AGENT_VERSION != "testagent", reason="Tests only compatible with a testagent") diff --git a/tests/integration/test_debug.py b/tests/integration/test_debug.py index f01ff1edfb3..8c51db4bf7c 100644 --- a/tests/integration/test_debug.py +++ b/tests/integration/test_debug.py @@ -1,10 +1,8 @@ -from datetime import datetime import json import logging import os import re import subprocess -import sys from typing import List from typing import Optional @@ -17,11 +15,10 @@ from ddtrace.internal import debug from ddtrace.internal.writer import AgentWriter from ddtrace.internal.writer import TraceWriter +from tests.integration.utils import AGENT_VERSION from tests.subprocesstest import SubprocessTestCase from tests.subprocesstest import run_in_subprocess -from .test_integration import AGENT_VERSION - pytestmark = pytest.mark.skipif(AGENT_VERSION == "testagent", reason="The test agent doesn't support startup logs.") @@ -36,7 +33,14 @@ def __eq__(self, other): return Match() +@pytest.mark.subprocess() def test_standard_tags(): + from datetime import datetime + import sys + + import ddtrace + from ddtrace.internal import debug + f = debug.collect(ddtrace.tracer) date = f.get("date") @@ -94,7 +98,7 @@ def test_standard_tags(): assert f.get("tracer_enabled") is True assert f.get("sampler_type") == "DatadogSampler" assert f.get("priority_sampler_type") == "N/A" - assert f.get("service") == "tests.integration" + assert f.get("service") == "ddtrace_subprocess_dir" assert f.get("dd_version") == "" assert f.get("debug") is False assert f.get("enabled_cli") is False @@ -110,8 +114,13 @@ def test_standard_tags(): assert icfg["flask"] == "N/A" +@pytest.mark.subprocess() def test_debug_post_configure(): - tracer = ddtrace.Tracer() + import re + + from ddtrace import tracer + from ddtrace.internal import debug + tracer.configure( hostname="0.0.0.0", port=1234, @@ -122,16 +131,21 @@ def test_debug_post_configure(): agent_url = f.get("agent_url") assert agent_url == "http://0.0.0.0:1234" - assert f.get("is_global_tracer") is False + assert f.get("is_global_tracer") is True assert f.get("tracer_enabled") is True agent_error = f.get("agent_error") # Error code can differ between Python version assert re.match("^Agent not reachable.*Connection refused", agent_error) - # Tracer doesn't support re-configure()-ing with a UDS after an initial - # configure with normal http settings. So we need a new tracer instance. - tracer = ddtrace.Tracer() + +@pytest.mark.subprocess() +def test_debug_post_configure_uds(): + import re + + from ddtrace import tracer + from ddtrace.internal import debug + tracer.configure(uds_path="/file.sock") f = debug.collect(tracer) diff --git a/tests/integration/test_encoding.py b/tests/integration/test_encoding.py index 43c47ac4840..7138ff94e00 100644 --- a/tests/integration/test_encoding.py +++ b/tests/integration/test_encoding.py @@ -18,7 +18,7 @@ def test_simple_trace_accepted_by_agent(self): for _ in range(999): with tracer.trace("child"): pass - tracer.shutdown() + tracer.flush() log.warning.assert_not_called() log.error.assert_not_called() @@ -39,7 +39,7 @@ def test_trace_with_meta_accepted_by_agent(self, tags): for _ in range(999): with tracer.trace("child") as child: child.set_tags(tags) - tracer.shutdown() + tracer.flush() log.warning.assert_not_called() log.error.assert_not_called() @@ -60,7 +60,7 @@ def test_trace_with_metrics_accepted_by_agent(self, metrics): for _ in range(999): with tracer.trace("child") as child: child.set_metrics(metrics) - tracer.shutdown() + tracer.flush() log.warning.assert_not_called() log.error.assert_not_called() @@ -79,6 +79,6 @@ def test_trace_with_links_accepted_by_agent(self, span_links_kwargs): for _ in range(10): with tracer.trace("child") as child: child.set_link(**span_links_kwargs) - tracer.shutdown() + tracer.flush() log.warning.assert_not_called() log.error.assert_not_called() diff --git a/tests/integration/test_integration.py b/tests/integration/test_integration.py index 78606bbde14..529bdbcd40b 100644 --- a/tests/integration/test_integration.py +++ b/tests/integration/test_integration.py @@ -10,12 +10,8 @@ from ddtrace import Tracer from ddtrace.internal.atexit import register_on_exit_signal from ddtrace.internal.runtime import container -from ddtrace.internal.writer import AgentWriter -from tests.integration.utils import AGENT_VERSION -from tests.integration.utils import BadEncoder from tests.integration.utils import import_ddtrace_in_subprocess from tests.integration.utils import parametrize_with_all_encodings -from tests.integration.utils import send_invalid_payload_and_get_logs from tests.integration.utils import skip_if_testagent from tests.utils import call_program @@ -23,8 +19,11 @@ FOUR_KB = 1 << 12 +@pytest.mark.subprocess() def test_configure_keeps_api_hostname_and_port(): - tracer = Tracer() + from ddtrace import tracer + from tests.integration.utils import AGENT_VERSION + assert tracer._writer.agent_url == "http://localhost:{}".format("9126" if AGENT_VERSION == "testagent" else "8126") tracer.configure(hostname="127.0.0.1", port=8127) assert tracer._writer.agent_url == "http://127.0.0.1:8127" @@ -506,8 +505,12 @@ def test_validate_headers_in_payload_to_intake_with_nested_spans(): assert headers.get("X-Datadog-Trace-Count") == "10" +@parametrize_with_all_encodings def test_trace_with_invalid_client_endpoint_generates_error_log(): - t = Tracer() + import mock + + from ddtrace import tracer as t + for client in t._writer._clients: client.ENDPOINT = "/bad" with mock.patch("ddtrace.internal.writer.writer.log") as log: @@ -526,7 +529,12 @@ def test_trace_with_invalid_client_endpoint_generates_error_log(): @skip_if_testagent +@pytest.mark.subprocess(err=None) def test_trace_with_invalid_payload_generates_error_log(): + import mock + + from tests.integration.utils import send_invalid_payload_and_get_logs + log = send_invalid_payload_and_get_logs() log.error.assert_has_calls( [ @@ -541,11 +549,11 @@ def test_trace_with_invalid_payload_generates_error_log(): @skip_if_testagent -@pytest.mark.subprocess(env={"_DD_TRACE_WRITER_LOG_ERROR_PAYLOADS": "true", "DD_TRACE_API_VERSION": "v0.5"}) +@pytest.mark.subprocess(env={"_DD_TRACE_WRITER_LOG_ERROR_PAYLOADS": "true", "DD_TRACE_API_VERSION": "v0.5"}, err=None) def test_trace_with_invalid_payload_logs_payload_when_LOG_ERROR_PAYLOADS(): import mock - from tests.integration.test_integration import send_invalid_payload_and_get_logs + from tests.integration.utils import send_invalid_payload_and_get_logs log = send_invalid_payload_and_get_logs() log.error.assert_has_calls( @@ -562,12 +570,12 @@ def test_trace_with_invalid_payload_logs_payload_when_LOG_ERROR_PAYLOADS(): @skip_if_testagent -@pytest.mark.subprocess(env={"_DD_TRACE_WRITER_LOG_ERROR_PAYLOADS": "true", "DD_TRACE_API_VERSION": "v0.5"}) +@pytest.mark.subprocess(env={"_DD_TRACE_WRITER_LOG_ERROR_PAYLOADS": "true", "DD_TRACE_API_VERSION": "v0.5"}, err=None) def test_trace_with_non_bytes_payload_logs_payload_when_LOG_ERROR_PAYLOADS(): import mock - from tests.integration.test_integration import send_invalid_payload_and_get_logs from tests.integration.utils import BadEncoder + from tests.integration.utils import send_invalid_payload_and_get_logs class NonBytesBadEncoder(BadEncoder): def encode(self): @@ -590,7 +598,11 @@ def encode_traces(self, traces): ) +@pytest.mark.subprocess(err=None) def test_trace_with_failing_encoder_generates_error_log(): + from tests.integration.utils import BadEncoder + from tests.integration.utils import send_invalid_payload_and_get_logs + class ExceptionBadEncoder(BadEncoder): def encode(self): raise Exception() @@ -620,8 +632,11 @@ def test_api_version_downgrade_generates_no_warning_logs(): log.error.assert_not_called() +@pytest.mark.subprocess() def test_synchronous_writer_shutdown_raises_no_exception(): - tracer = Tracer() + from ddtrace import tracer + from ddtrace.internal.writer import AgentWriter + tracer.configure(writer=AgentWriter(tracer._writer.agent_url, sync_mode=True)) tracer.shutdown() diff --git a/tests/integration/test_integration_civisibility.py b/tests/integration/test_integration_civisibility.py index 8a504f6a220..6cb284457f8 100644 --- a/tests/integration/test_integration_civisibility.py +++ b/tests/integration/test_integration_civisibility.py @@ -3,17 +3,14 @@ import mock import pytest -from ddtrace._trace.tracer import Tracer from ddtrace.internal import agent from ddtrace.internal.ci_visibility import CIVisibility from ddtrace.internal.ci_visibility._api_client import TestVisibilityAPISettings from ddtrace.internal.ci_visibility.constants import AGENTLESS_ENDPOINT -from ddtrace.internal.ci_visibility.constants import COVERAGE_TAG_NAME from ddtrace.internal.ci_visibility.constants import EVP_PROXY_AGENT_ENDPOINT from ddtrace.internal.ci_visibility.constants import EVP_SUBDOMAIN_HEADER_EVENT_VALUE from ddtrace.internal.ci_visibility.constants import EVP_SUBDOMAIN_HEADER_NAME -from ddtrace.internal.ci_visibility.writer import CIVisibilityWriter -from ddtrace.internal.utils.http import Response +from ddtrace.internal.ci_visibility.recorder import CIVisibilityTracer as Tracer from tests.ci_visibility.util import _get_default_civisibility_ddconfig from tests.utils import override_env @@ -74,9 +71,17 @@ def test_civisibility_intake_with_apikey(): CIVisibility.disable() +@pytest.mark.subprocess() def test_civisibility_intake_payloads(): + import mock + + from ddtrace import tracer as t + from ddtrace.internal.ci_visibility.constants import COVERAGE_TAG_NAME + from ddtrace.internal.ci_visibility.recorder import CIVisibilityWriter + from ddtrace.internal.utils.http import Response + from tests.utils import override_env + with override_env(dict(DD_API_KEY="foobar.baz")): - t = Tracer() t.configure(writer=CIVisibilityWriter(reuse_connections=True, coverage_enabled=True)) t._writer._conn = mock.MagicMock() with mock.patch("ddtrace.internal.writer.Response.from_http_response") as from_http_response: diff --git a/tests/integration/test_integration_snapshots.py b/tests/integration/test_integration_snapshots.py index bd48faa34a6..6e656081eb1 100644 --- a/tests/integration/test_integration_snapshots.py +++ b/tests/integration/test_integration_snapshots.py @@ -7,23 +7,21 @@ from ddtrace import Tracer from ddtrace import tracer -from ddtrace.constants import AUTO_KEEP -from ddtrace.constants import SAMPLING_PRIORITY_KEY -from ddtrace.constants import USER_KEEP -from ddtrace.internal.writer import AgentWriter +from tests.integration.utils import AGENT_VERSION from tests.integration.utils import mark_snapshot from tests.integration.utils import parametrize_with_all_encodings from tests.utils import override_global_config from tests.utils import snapshot -from .test_integration import AGENT_VERSION - pytestmark = pytest.mark.skipif(AGENT_VERSION != "testagent", reason="Tests only compatible with a testagent") @snapshot(include_tracer=True) +@pytest.mark.subprocess() def test_single_trace_single_span(tracer): + from ddtrace import tracer + s = tracer.trace("operation", service="my-svc") s.set_tag("k", "v") # numeric tag @@ -31,11 +29,14 @@ def test_single_trace_single_span(tracer): s.set_metric("float_metric", 12.34) s.set_metric("int_metric", 4321) s.finish() - tracer.shutdown() + tracer.flush() @snapshot(include_tracer=True) +@pytest.mark.subprocess() def test_multiple_traces(tracer): + from ddtrace import tracer + with tracer.trace("operation1", service="my-svc") as s: s.set_tag("k", "v") s.set_tag("num", 1234) @@ -49,15 +50,22 @@ def test_multiple_traces(tracer): s.set_metric("float_metric", 12.34) s.set_metric("int_metric", 4321) tracer.trace("child").finish() - tracer.shutdown() + tracer.flush() -@pytest.mark.parametrize( - "writer", - ("default", "sync"), -) @snapshot(include_tracer=True) -def test_filters(writer, tracer): +@pytest.mark.subprocess( + parametrize={"DD_WRITER_MODE": ["default", "sync"]}, + token="tests.integration.test_integration_snapshots.test_filters", +) +def test_filters(): + import os + + from ddtrace import tracer + from ddtrace.internal.writer import AgentWriter + + writer = os.environ.get("DD_WRITER_MODE", "default") + if writer == "sync": writer = AgentWriter( tracer.agent_trace_url, @@ -89,15 +97,18 @@ def process_trace(self, trace): with tracer.trace("root"): with tracer.trace("child"): pass - tracer.shutdown() + tracer.flush() # Have to use sync mode snapshot so that the traces are associated to this # test case since we use a custom writer (that doesn't have the trace headers # injected). +@pytest.mark.subprocess() @snapshot(async_mode=False) def test_synchronous_writer(): - tracer = Tracer() + from ddtrace import tracer + from ddtrace.internal.writer import AgentWriter + writer = AgentWriter(tracer._writer.agent_url, sync_mode=True) tracer.configure(writer=writer) with tracer.trace("operation1", service="my-svc"): @@ -117,19 +128,18 @@ def test_tracer_trace_across_popen(): the child span has does not have '_dd.p.dm' shows that sampling was run before fork automatically. """ - tracer = Tracer() def task(tracer): with tracer.trace("child"): pass - tracer.shutdown() + tracer.flush() with tracer.trace("parent"): p = multiprocessing.Process(target=task, args=(tracer,)) p.start() p.join() - tracer.shutdown() + tracer.flush() @snapshot(async_mode=False) @@ -140,31 +150,34 @@ def test_tracer_trace_across_multiple_popens(): the child span has does not have '_dd.p.dm' shows that sampling was run before fork automatically. """ - tracer = Tracer() def task(tracer): def task2(tracer): with tracer.trace("child2"): pass - tracer.shutdown() + tracer.flush() with tracer.trace("child1"): p = multiprocessing.Process(target=task2, args=(tracer,)) p.start() p.join() - tracer.shutdown() + tracer.flush() with tracer.trace("parent"): p = multiprocessing.Process(target=task, args=(tracer,)) p.start() p.join() - tracer.shutdown() + tracer.flush() @snapshot() +@pytest.mark.subprocess() def test_wrong_span_name_type_not_sent(): """Span names should be a text type.""" - tracer = Tracer() + import mock + + from ddtrace import tracer + with mock.patch("ddtrace._trace.span.log") as log: with tracer.trace(123): pass @@ -180,11 +193,9 @@ def test_wrong_span_name_type_not_sent(): ], ) @pytest.mark.parametrize("encoding", ["v0.4", "v0.5"]) -@snapshot() def test_trace_with_wrong_meta_types_not_sent(encoding, meta, monkeypatch): """Wrong meta types should raise TypeErrors during encoding and fail to send to the agent.""" with override_global_config(dict(_trace_api=encoding)): - tracer = Tracer() with mock.patch("ddtrace._trace.span.log") as log: with tracer.trace("root") as root: root._meta = meta @@ -218,14 +229,19 @@ def test_trace_with_wrong_metrics_types_not_sent(encoding, metrics, monkeypatch) log.exception.assert_called_once_with("error closing trace") -@snapshot() +@pytest.mark.subprocess() +@pytest.mark.snapshot() def test_tracetagsprocessor_only_adds_new_tags(): - tracer = Tracer() + from ddtrace import tracer + from ddtrace.constants import AUTO_KEEP + from ddtrace.constants import SAMPLING_PRIORITY_KEY + from ddtrace.constants import USER_KEEP + with tracer.trace(name="web.request") as span: span.context.sampling_priority = AUTO_KEEP span.set_metric(SAMPLING_PRIORITY_KEY, USER_KEEP) - tracer.shutdown() + tracer.flush() # Override the token so that both parameterizations of the test use the same snapshot diff --git a/tests/integration/test_priority_sampling.py b/tests/integration/test_priority_sampling.py index 59177be57cb..653ef96d49e 100644 --- a/tests/integration/test_priority_sampling.py +++ b/tests/integration/test_priority_sampling.py @@ -9,12 +9,11 @@ from ddtrace.internal.encoding import MsgpackEncoderV04 as Encoder from ddtrace.internal.writer import AgentWriter from ddtrace.tracer import Tracer +from tests.integration.utils import AGENT_VERSION from tests.integration.utils import parametrize_with_all_encodings from tests.integration.utils import skip_if_testagent from tests.utils import override_global_config -from .test_integration import AGENT_VERSION - def _turn_tracer_into_dummy(tracer): """Override tracer's writer's write() method to keep traces instead of sending them away""" diff --git a/tests/integration/test_propagation.py b/tests/integration/test_propagation.py index 5bd0a122a8c..bcad0ed4432 100644 --- a/tests/integration/test_propagation.py +++ b/tests/integration/test_propagation.py @@ -1,44 +1,15 @@ import pytest -from ddtrace import Tracer +from ddtrace import tracer from ddtrace.constants import MANUAL_DROP_KEY from ddtrace.propagation.http import HTTPPropagator -from tests.utils import override_global_config - -from .test_integration import AGENT_VERSION +from tests.integration.utils import AGENT_VERSION pytestmark = pytest.mark.skipif(AGENT_VERSION != "testagent", reason="Tests only compatible with a testagent") -@pytest.fixture( - params=[ - dict(global_config=dict()), - dict( - global_config=dict(_x_datadog_tags_max_length="0", _x_datadog_tags_enabled=False), - ), - dict(global_config=dict(), partial_flush_enabled=True, partial_flush_min_spans=2), - ] -) -def tracer(request): - global_config = request.param.get("global_config", dict()) - partial_flush_enabled = request.param.get("partial_flush_enabled") - partial_flush_min_spans = request.param.get("partial_flush_min_spans") - with override_global_config(global_config): - tracer = Tracer() - kwargs = dict() - if partial_flush_enabled: - kwargs["partial_flush_enabled"] = partial_flush_enabled - if partial_flush_min_spans: - kwargs["partial_flush_min_spans"] = partial_flush_min_spans - tracer.configure(**kwargs) - yield tracer - tracer.shutdown() - - -@pytest.mark.snapshot() def test_trace_tags_multispan(): - tracer = Tracer() headers = { "x-datadog-trace-id": "1234", "x-datadog-parent-id": "5678", @@ -61,15 +32,8 @@ def test_trace_tags_multispan(): gc.finish() -@pytest.fixture -def downstream_tracer(): - tracer = Tracer() - yield tracer - tracer.shutdown() - - @pytest.mark.snapshot() -def test_sampling_decision_downstream(downstream_tracer): +def test_sampling_decision_downstream(): """ Ensures that set_tag(MANUAL_DROP_KEY) on a span causes the sampling decision meta and sampling priority metric to be set appropriately indicating rejection @@ -81,7 +45,7 @@ def test_sampling_decision_downstream(downstream_tracer): "x-datadog-tags": "_dd.p.dm=-1", } kept_trace_context = HTTPPropagator.extract(headers_indicating_kept_trace) - downstream_tracer.context_provider.activate(kept_trace_context) + tracer.context_provider.activate(kept_trace_context) - with downstream_tracer.trace("p", service="downstream") as span_to_reject: + with tracer.trace("p", service="downstream") as span_to_reject: span_to_reject.set_tag(MANUAL_DROP_KEY) diff --git a/tests/integration/test_sampling.py b/tests/integration/test_sampling.py index 234d12d283c..66496342dee 100644 --- a/tests/integration/test_sampling.py +++ b/tests/integration/test_sampling.py @@ -1,4 +1,3 @@ -import mock import pytest from ddtrace._trace.sampler import DatadogSampler @@ -7,10 +6,9 @@ from ddtrace.constants import MANUAL_DROP_KEY from ddtrace.constants import MANUAL_KEEP_KEY from ddtrace.internal.writer import AgentWriter +from tests.integration.utils import AGENT_VERSION from tests.utils import snapshot -from .test_integration import AGENT_VERSION - pytestmark = pytest.mark.skipif(AGENT_VERSION != "testagent", reason="Tests only compatible with a testagent") RESOURCE = "mycoolre$ource" # codespell:ignore @@ -19,6 +17,9 @@ def snapshot_parametrized_with_writers(f): def _patch(writer, tracer): + old_sampler = tracer._sampler + old_writer = tracer._writer + old_tags = tracer._tags if writer == "sync": writer = AgentWriter( tracer.agent_trace_url, @@ -29,11 +30,13 @@ def _patch(writer, tracer): writer._headers = tracer._writer._headers else: writer = tracer._writer - tracer.configure(writer=writer) try: return f(writer, tracer) finally: - tracer.shutdown() + tracer.flush() + # Reset tracer configurations to avoid leaking state between tests + tracer.configure(sampler=old_sampler, writer=old_writer) + tracer._tags = old_tags wrapped = snapshot(include_tracer=True, token_override=f.__name__)(_patch) return pytest.mark.parametrize( @@ -298,10 +301,14 @@ def test_extended_sampling_float_special_case_match_star(writer, tracer): span.set_tag("tag", 20.1) +@pytest.mark.subprocess() def test_rate_limiter_on_spans(tracer): """ Ensure that the rate limiter is applied to spans """ + from ddtrace import tracer + from ddtrace.sampler import DatadogSampler + # Rate limit is only applied if a sample rate or trace sample rule is set tracer.configure(sampler=DatadogSampler(default_sample_rate=1, rate_limit=10)) spans = [] @@ -325,10 +332,16 @@ def test_rate_limiter_on_spans(tracer): assert dropped_span.context.sampling_priority < 0 +@pytest.mark.subprocess() def test_rate_limiter_on_long_running_spans(tracer): """ Ensure that the rate limiter is applied on increasing time intervals """ + import mock + + from ddtrace import tracer + from ddtrace.sampler import DatadogSampler + tracer.configure(sampler=DatadogSampler(rate_limit=5)) with mock.patch("ddtrace.internal.rate_limiter.time.monotonic_ns", return_value=1617333414): diff --git a/tests/integration/test_settings.py b/tests/integration/test_settings.py index 55e8d1e76d8..249b0211bb4 100644 --- a/tests/integration/test_settings.py +++ b/tests/integration/test_settings.py @@ -2,7 +2,7 @@ import pytest -from .test_integration import AGENT_VERSION +from tests.integration.utils import AGENT_VERSION def _get_telemetry_config_items(events, item_name): diff --git a/tests/integration/test_trace_stats.py b/tests/integration/test_trace_stats.py index 0e19a44f1dd..f1eefcea709 100644 --- a/tests/integration/test_trace_stats.py +++ b/tests/integration/test_trace_stats.py @@ -5,25 +5,23 @@ import mock import pytest -from ddtrace import Tracer from ddtrace._trace.sampler import DatadogSampler from ddtrace._trace.sampler import SamplingRule from ddtrace.constants import SPAN_MEASURED_KEY from ddtrace.ext import http from ddtrace.internal.processor.stats import SpanStatsProcessorV06 +from tests.integration.utils import AGENT_VERSION +from tests.utils import DummyTracer from tests.utils import override_global_config -from .test_integration import AGENT_VERSION - pytestmark = pytest.mark.skipif(AGENT_VERSION != "testagent", reason="Tests only compatible with a testagent") @pytest.fixture def stats_tracer(): - # type: (float) -> Generator[Tracer, None, None] with override_global_config(dict(_trace_compute_stats=True)): - tracer = Tracer() + tracer = DummyTracer() yield tracer tracer.shutdown() @@ -70,7 +68,7 @@ def test_compute_stats_default_and_configure(run_python_code_in_subprocess, envv """Ensure stats computation can be enabled.""" # Test enabling via `configure` - t = Tracer() + t = DummyTracer() assert not t._compute_stats assert not any(isinstance(p, SpanStatsProcessorV06) for p in t._span_processors) t.configure(compute_stats_enabled=True) @@ -100,14 +98,16 @@ def test_compute_stats_default_and_configure(run_python_code_in_subprocess, envv assert status == 0, out + err -def test_apm_opt_out_compute_stats_and_configure(run_python_code_in_subprocess): +@pytest.mark.subprocess(err=None) +def test_apm_opt_out_compute_stats_and_configure(): """ Ensure stats computation is disabled, but reported as enabled, if APM is opt-out. """ + from ddtrace import tracer as t + from ddtrace.internal.processor.stats import SpanStatsProcessorV06 # Test via `configure` - t = Tracer() assert not t._compute_stats assert not any(isinstance(p, SpanStatsProcessorV06) for p in t._span_processors) t.configure(appsec_enabled=True, appsec_standalone_enabled=True) @@ -116,8 +116,9 @@ def test_apm_opt_out_compute_stats_and_configure(run_python_code_in_subprocess): assert not t._compute_stats # but it's reported as enabled assert t._writer._headers.get("Datadog-Client-Computed-Stats") == "yes" - t.configure(appsec_enabled=False, appsec_standalone_enabled=False) + +def test_apm_opt_out_compute_stats_and_configure_env(run_python_code_in_subprocess): # Test via environment variable env = os.environ.copy() env.update({"DD_EXPERIMENTAL_APPSEC_STANDALONE_ENABLED": "true", "DD_APPSEC_ENABLED": "true"}) diff --git a/tests/integration/test_tracemethods.py b/tests/integration/test_tracemethods.py index 8568cbc3737..15129c56161 100644 --- a/tests/integration/test_tracemethods.py +++ b/tests/integration/test_tracemethods.py @@ -5,7 +5,7 @@ import pytest -from .test_integration import AGENT_VERSION +from tests.integration.utils import AGENT_VERSION pytestmark = pytest.mark.skipif(AGENT_VERSION != "testagent", reason="Tests only compatible with a testagent") diff --git a/tests/integration/utils.py b/tests/integration/utils.py index dea4a091ed4..21822ea6e59 100644 --- a/tests/integration/utils.py +++ b/tests/integration/utils.py @@ -33,7 +33,7 @@ def send_invalid_payload_and_get_logs(encoder_cls=BadEncoder): client.encoder = encoder_cls() with mock.patch("ddtrace.internal.writer.writer.log") as log: t.trace("asdf").finish() - t.shutdown() + t.flush() return log diff --git a/tests/profiling/test_profiler.py b/tests/profiling/test_profiler.py index 7f98bbf6aa8..879a50afd54 100644 --- a/tests/profiling/test_profiler.py +++ b/tests/profiling/test_profiler.py @@ -232,36 +232,66 @@ def _check_url(prof, url, api_key, endpoint_path="profiling/v1/input"): pytest.fail("Unable to find HTTP exporter") +@pytest.mark.subprocess() def test_tracer_url(): - t = ddtrace.Tracer() + import os + + from ddtrace import tracer as t + from ddtrace.profiling import profiler + from tests.profiling.test_profiler import _check_url + t.configure(hostname="foobar") prof = profiler.Profiler(tracer=t) _check_url(prof, "http://foobar:8126", os.environ.get("DD_API_KEY")) +@pytest.mark.subprocess() def test_tracer_url_https(): - t = ddtrace.Tracer() + import os + + from ddtrace import tracer as t + from ddtrace.profiling import profiler + from tests.profiling.test_profiler import _check_url + t.configure(hostname="foobar", https=True) prof = profiler.Profiler(tracer=t) _check_url(prof, "https://foobar:8126", os.environ.get("DD_API_KEY")) +@pytest.mark.subprocess() def test_tracer_url_uds_hostname(): - t = ddtrace.Tracer() + import os + + from ddtrace import tracer as t + from ddtrace.profiling import profiler + from tests.profiling.test_profiler import _check_url + t.configure(hostname="foobar", uds_path="/foobar") prof = profiler.Profiler(tracer=t) _check_url(prof, "unix://foobar/foobar", os.environ.get("DD_API_KEY")) +@pytest.mark.subprocess() def test_tracer_url_uds(): - t = ddtrace.Tracer() + import os + + from ddtrace import tracer as t + from ddtrace.profiling import profiler + from tests.profiling.test_profiler import _check_url + t.configure(uds_path="/foobar") prof = profiler.Profiler(tracer=t) _check_url(prof, "unix:///foobar", os.environ.get("DD_API_KEY")) +@pytest.mark.subprocess() def test_tracer_url_configure_after(): - t = ddtrace.Tracer() + import os + + from ddtrace import tracer as t + from ddtrace.profiling import profiler + from tests.profiling.test_profiler import _check_url + prof = profiler.Profiler(tracer=t) t.configure(hostname="foobar") _check_url(prof, "http://foobar:8126", os.environ.get("DD_API_KEY")) @@ -276,11 +306,10 @@ def test_env_no_api_key(): def test_env_endpoint_url(): import os - import ddtrace + from ddtrace import tracer as t from ddtrace.profiling import profiler from tests.profiling.test_profiler import _check_url - t = ddtrace.Tracer() prof = profiler.Profiler(tracer=t) _check_url(prof, "http://foobar:123", os.environ.get("DD_API_KEY")) diff --git a/tests/snapshots/tests.integration.test_integration_snapshots.test_trace_with_wrong_metrics_types_not_sent.json b/tests/snapshots/tests.integration.test_integration_snapshots.test_trace_with_wrong_metrics_types_not_sent.json deleted file mode 100644 index a1a67aeefc8..00000000000 --- a/tests/snapshots/tests.integration.test_integration_snapshots.test_trace_with_wrong_metrics_types_not_sent.json +++ /dev/null @@ -1,25 +0,0 @@ -[[ - { - "name": "parent", - "service": "tests.integration", - "resource": "parent", - "trace_id": 0, - "span_id": 1, - "parent_id": 0, - "type": "", - "error": 0, - "meta": { - "_dd.p.dm": "-0", - "_dd.p.tid": "65f8a77100000000", - "language": "python", - "runtime-id": "005360373bf04c7fb732555994db4f78" - }, - "metrics": { - "_dd.top_level": 1, - "_dd.tracer_kr": 1.0, - "_sampling_priority_v1": 1, - "process_id": 5837 - }, - "duration": 1004386709, - "start": 1710794609240060721 - }]] diff --git a/tests/snapshots/tests.integration.test_integration_snapshots.test_tracetagsprocessor_only_adds_new_tags.json b/tests/snapshots/tests.integration.test_integration_snapshots.test_tracetagsprocessor_only_adds_new_tags.json index 9298b2342cd..08108bdeff9 100644 --- a/tests/snapshots/tests.integration.test_integration_snapshots.test_tracetagsprocessor_only_adds_new_tags.json +++ b/tests/snapshots/tests.integration.test_integration_snapshots.test_tracetagsprocessor_only_adds_new_tags.json @@ -1,7 +1,7 @@ [[ { "name": "web.request", - "service": "tests.integration", + "service": "ddtrace_subprocess_dir", "resource": "web.request", "trace_id": 0, "span_id": 1, diff --git a/tests/snapshots/tests.integration.test_propagation.test_trace_tags_multispan.json b/tests/snapshots/tests.integration.test_propagation.test_trace_tags_multispan.json new file mode 100644 index 00000000000..000f5f143c7 --- /dev/null +++ b/tests/snapshots/tests.integration.test_propagation.test_trace_tags_multispan.json @@ -0,0 +1,70 @@ +[[ + { + "name": "p", + "service": "tests.integration", + "resource": "p", + "trace_id": 0, + "span_id": 1, + "parent_id": 5678, + "type": "", + "error": 0, + "meta": { + "_dd.p.dm": "-1", + "_dd.p.test": "value", + "language": "python", + "runtime-id": "65e7346cd27a4fcbb1a2ccb98722fed3" + }, + "metrics": { + "_dd.top_level": 1, + "_dd.tracer_kr": 1.0, + "_sampling_priority_v1": 1, + "process_id": 4531 + }, + "duration": 48000, + "start": 1735013581776627000 + }, + { + "name": "c1", + "service": "tests.integration", + "resource": "c1", + "trace_id": 0, + "span_id": 2, + "parent_id": 1, + "type": "", + "error": 0, + "meta": { + "_dd.p.test": "value" + }, + "duration": 5000, + "start": 1735013581776649000 + }, + { + "name": "c2", + "service": "tests.integration", + "resource": "c2", + "trace_id": 0, + "span_id": 3, + "parent_id": 1, + "type": "", + "error": 0, + "meta": { + "_dd.p.test": "value" + }, + "duration": 7000, + "start": 1735013581776662000 + }, + { + "name": "gc", + "service": "tests.integration", + "resource": "gc", + "trace_id": 0, + "span_id": 4, + "parent_id": 3, + "type": "", + "error": 0, + "meta": { + "_dd.p.test": "value" + }, + "duration": 11000, + "start": 1735013581776667000 + }]] diff --git a/tests/tracer/test_tracer.py b/tests/tracer/test_tracer.py index cae00259086..164b04ee5d1 100644 --- a/tests/tracer/test_tracer.py +++ b/tests/tracer/test_tracer.py @@ -2088,3 +2088,27 @@ def test_gc_not_used_on_root_spans(): # print("referrers:", [f"object {objects.index(r)}" for r in gc.get_referrers(obj)[:-2]]) # print("referents:", [f"object {objects.index(r)}" if r in objects else r for r in gc.get_referents(obj)]) # print("--------------------") + + +@pytest.mark.subprocess() +def test_multiple_tracer_instances(): + import warnings + + with warnings.catch_warnings(record=True) as warns: + warnings.simplefilter("always") + import ddtrace + + assert ddtrace.tracer is not None + for w in warns: + # Ensure the warning is not about multiple tracer instances is not logged when importing ddtrace + assert "Support for multiple Tracer instances is deprecated" not in str(w.message) + + warns.clear() + t = ddtrace.Tracer() + # TODO: Update this assertion when the deprecation is removed and the tracer becomes a singleton + assert t is not ddtrace.tracer + assert len(warns) == 1 + assert ( + str(warns[0].message) == "Support for multiple Tracer instances is deprecated and will be " + "removed in version '3.0.0'. Use ddtrace.tracer instead." + ) diff --git a/tests/utils.py b/tests/utils.py index 5b98fe42d1b..c2ac19324f7 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -1156,11 +1156,6 @@ def wrapper(wrapped, instance, args, kwargs): else: clsname = "" - if include_tracer: - tracer = Tracer() - else: - tracer = ddtrace.tracer - module = inspect.getmodule(wrapped) # Use the fully qualified function name as a unique test token to @@ -1174,14 +1169,14 @@ def wrapper(wrapped, instance, args, kwargs): with snapshot_context( token, ignores=ignores, - tracer=tracer, + tracer=ddtrace.tracer, async_mode=async_mode, variants=variants, wait_for_num_traces=wait_for_num_traces, ): # Run the test. if include_tracer: - kwargs["tracer"] = tracer + kwargs["tracer"] = ddtrace.tracer return wrapped(*args, **kwargs) return wrapper From d696c67d9c597d74ed01078c5cf0274f434ecbc5 Mon Sep 17 00:00:00 2001 From: Christophe Papazian <114495376+christophe-papazian@users.noreply.github.com> Date: Thu, 16 Jan 2025 09:49:47 +0100 Subject: [PATCH 11/16] feat(asm): update the ATO support with the new specifications (#11932) - Add support for ATO V3 RFC by adding missing tags - Update unit threat tests accordingly APPSEC-56315 Once merged, this will also be tested with ATO V3 system tests. https://github.com/DataDog/system-tests/pull/3828 ## Checklist - [x] PR author has checked that all the criteria below are met - The PR description includes an overview of the change - The PR description articulates the motivation for the change - The change includes tests OR the PR description describes a testing strategy - The PR description notes risks associated with the change, if any - Newly-added code is easy to change - The change follows the [library release note guidelines](https://ddtrace.readthedocs.io/en/stable/releasenotes.html) - The change includes or references documentation updates if necessary - Backport labels are set (if [applicable](https://ddtrace.readthedocs.io/en/latest/contributing.html#backporting)) ## Reviewer Checklist - [x] Reviewer has checked that all the criteria below are met - Title is accurate - All changes are related to the pull request's stated goal - Avoids breaking [API](https://ddtrace.readthedocs.io/en/stable/versioning.html#interfaces) changes - Testing strategy adequately addresses listed risks - Newly-added code is easy to change - Release note makes sense to a user of the library - If necessary, author has acknowledged and discussed the performance implications of this PR as reported in the benchmarks PR comment - Backport labels are set in a manner that is consistent with the [release branch maintenance policy](https://ddtrace.readthedocs.io/en/latest/contributing.html#backporting) --- ddtrace/appsec/_constants.py | 2 + ddtrace/appsec/_trace_utils.py | 59 ++++++++++--------- .../notes/ATO_V3-e7f73ecf00d1474b.yaml | 4 ++ .../appsec/contrib_appsec/fastapi_app/app.py | 12 ++-- tests/appsec/contrib_appsec/flask_app/app.py | 12 ++-- tests/appsec/contrib_appsec/utils.py | 5 ++ 6 files changed, 56 insertions(+), 38 deletions(-) create mode 100644 releasenotes/notes/ATO_V3-e7f73ecf00d1474b.yaml diff --git a/ddtrace/appsec/_constants.py b/ddtrace/appsec/_constants.py index 45a96834cc1..d16de0e1379 100644 --- a/ddtrace/appsec/_constants.py +++ b/ddtrace/appsec/_constants.py @@ -75,6 +75,8 @@ class APPSEC(metaclass=Constant_Class): CUSTOM_EVENT_PREFIX: Literal["appsec.events"] = "appsec.events" USER_LOGIN_EVENT_PREFIX: Literal["_dd.appsec.events.users.login"] = "_dd.appsec.events.users.login" USER_LOGIN_EVENT_PREFIX_PUBLIC: Literal["appsec.events.users.login"] = "appsec.events.users.login" + USER_LOGIN_USERID: Literal["_dd.appsec.usr.id"] = "_dd.appsec.usr.id" + USER_LOGIN_USERNAME: Literal["_dd.appsec.usr.login"] = "_dd.appsec.usr.login" USER_LOGIN_EVENT_SUCCESS_TRACK: Literal[ "appsec.events.users.login.success.track" ] = "appsec.events.users.login.success.track" diff --git a/ddtrace/appsec/_trace_utils.py b/ddtrace/appsec/_trace_utils.py index 3603c18533c..8609344f05a 100644 --- a/ddtrace/appsec/_trace_utils.py +++ b/ddtrace/appsec/_trace_utils.py @@ -71,6 +71,9 @@ def _track_user_login_common( span.set_tag_str("%s.%s" % (tag_metadata_prefix, k), str(v)) if login: + span.set_tag_str(f"{APPSEC.USER_LOGIN_EVENT_PREFIX_PUBLIC}.{success_str}.usr.login", login) + if login_events_mode != LOGIN_EVENTS_MODE.SDK: + span.set_tag_str(APPSEC.USER_LOGIN_USERNAME, login) span.set_tag_str("%s.login" % tag_prefix, login) if email: @@ -130,6 +133,8 @@ def track_user_login_success_event( if in_asm_context(): call_waf_callback(custom_data={"REQUEST_USER_ID": str(user_id), "LOGIN_SUCCESS": real_mode}) + if login_events_mode != LOGIN_EVENTS_MODE.SDK: + span.set_tag_str(APPSEC.USER_LOGIN_USERID, str(user_id)) set_user(tracer, user_id, name, email, scope, role, session_id, propagate, span) @@ -154,7 +159,7 @@ def track_user_login_failure_event( real_mode = login_events_mode if login_events_mode != LOGIN_EVENTS_MODE.AUTO else asm_config._user_event_mode if real_mode == LOGIN_EVENTS_MODE.DISABLED: return - span = _track_user_login_common(tracer, False, metadata, login_events_mode) + span = _track_user_login_common(tracer, False, metadata, login_events_mode, login) if not span: return if exists is not None: @@ -163,6 +168,8 @@ def track_user_login_failure_event( if user_id: if real_mode == LOGIN_EVENTS_MODE.ANON and isinstance(user_id, str): user_id = _hash_user_id(user_id) + if login_events_mode != LOGIN_EVENTS_MODE.SDK: + span.set_tag_str(APPSEC.USER_LOGIN_USERID, str(user_id)) span.set_tag_str("%s.failure.%s" % (APPSEC.USER_LOGIN_EVENT_PREFIX_PUBLIC, user.ID), str(user_id)) # if called from the SDK, set the login, email and name if login_events_mode in (LOGIN_EVENTS_MODE.SDK, LOGIN_EVENTS_MODE.AUTO): @@ -183,7 +190,7 @@ def track_user_signup_event( if span: success_str = "true" if success else "false" span.set_tag_str(APPSEC.USER_SIGNUP_EVENT, success_str) - span.set_tag_str(user.ID, user_id) + span.set_tag_str(user.ID, str(user_id)) _asm_manual_keep(span) # This is used to mark if the call was done from the SDK of the automatic login events @@ -295,23 +302,16 @@ def block_request_if_user_blocked(tracer: Tracer, userid: str) -> None: _asm_request_context.block_request() -def _on_django_login( - pin, - request, - user, - mode, - info_retriever, - django_config, -): +def _on_django_login(pin, request, user, mode, info_retriever, django_config): if user: from ddtrace.contrib.internal.django.compat import user_is_authenticated + user_id, user_extra = info_retriever.get_user_info( + login=django_config.include_user_login, + email=django_config.include_user_email, + name=django_config.include_user_realname, + ) if user_is_authenticated(user): - user_id, user_extra = info_retriever.get_user_info( - login=django_config.include_user_login, - email=django_config.include_user_email, - name=django_config.include_user_realname, - ) with pin.tracer.trace("django.contrib.auth.login", span_type=SpanTypes.AUTH): session_key = getattr(request, "session_key", None) track_user_login_success_event( @@ -324,8 +324,10 @@ def _on_django_login( ) else: # Login failed and the user is unknown (may exist or not) - user_id = info_retriever.get_userid() - track_user_login_failure_event(pin.tracer, user_id=user_id, login_events_mode=mode) + # DEV: DEAD CODE? + track_user_login_failure_event( + pin.tracer, user_id=user_id, login_events_mode=mode, login=user_extra.get("login", None) + ) def _on_django_auth(result_user, mode, kwargs, pin, info_retriever, django_config): @@ -344,17 +346,18 @@ def _on_django_auth(result_user, mode, kwargs, pin, info_retriever, django_confi if not result_user: with pin.tracer.trace("django.contrib.auth.login", span_type=SpanTypes.AUTH): exists = info_retriever.user_exists() - if exists: - user_id, user_extra = info_retriever.get_user_info( - login=django_config.include_user_login, - email=django_config.include_user_email, - name=django_config.include_user_realname, - ) - track_user_login_failure_event( - pin.tracer, user_id=user_id, login_events_mode=mode, exists=True, **user_extra - ) - else: - track_user_login_failure_event(pin.tracer, user_id=user_id, login_events_mode=mode, exists=False) + user_id_found, user_extra = info_retriever.get_user_info( + login=django_config.include_user_login, + email=django_config.include_user_email, + name=django_config.include_user_realname, + ) + if user_extra.get("login") is None: + user_extra["login"] = user_id + user_id = user_id_found or user_id + + track_user_login_failure_event( + pin.tracer, user_id=user_id, login_events_mode=mode, exists=exists, **user_extra + ) return False, None diff --git a/releasenotes/notes/ATO_V3-e7f73ecf00d1474b.yaml b/releasenotes/notes/ATO_V3-e7f73ecf00d1474b.yaml new file mode 100644 index 00000000000..0a2757dba6a --- /dev/null +++ b/releasenotes/notes/ATO_V3-e7f73ecf00d1474b.yaml @@ -0,0 +1,4 @@ +--- +features: + - | + ASM: This introduces full support for Automated user lifecycle tracking for login events (success and failure) diff --git a/tests/appsec/contrib_appsec/fastapi_app/app.py b/tests/appsec/contrib_appsec/fastapi_app/app.py index 2a97a919395..3403df6f844 100644 --- a/tests/appsec/contrib_appsec/fastapi_app/app.py +++ b/tests/appsec/contrib_appsec/fastapi_app/app.py @@ -235,23 +235,25 @@ def authenticate(username: str, password: str) -> Optional[str]: return USERS[username]["id"] else: appsec_trace_utils.track_user_login_failure_event( - tracer, user_id=USERS[username]["id"], exists=True, login_events_mode="auto" + tracer, user_id=USERS[username]["id"], exists=True, login_events_mode="auto", login=username ) return None appsec_trace_utils.track_user_login_failure_event( - tracer, user_id=username, exists=False, login_events_mode="auto" + tracer, user_id=username, exists=False, login_events_mode="auto", login=username ) return None - def login(user_id: str) -> None: + def login(user_id: str, username: str) -> None: """login user""" - appsec_trace_utils.track_user_login_success_event(tracer, user_id=user_id, login_events_mode="auto") + appsec_trace_utils.track_user_login_success_event( + tracer, user_id=user_id, login_events_mode="auto", login=username + ) username = request.query_params.get("username") password = request.query_params.get("password") user_id = authenticate(username=username, password=password) if user_id is not None: - login(user_id) + login(user_id, username) return HTMLResponse("OK") return HTMLResponse("login failure", status_code=401) diff --git a/tests/appsec/contrib_appsec/flask_app/app.py b/tests/appsec/contrib_appsec/flask_app/app.py index 5a5776cd098..aee42be7b54 100644 --- a/tests/appsec/contrib_appsec/flask_app/app.py +++ b/tests/appsec/contrib_appsec/flask_app/app.py @@ -188,22 +188,24 @@ def authenticate(username: str, password: str) -> Optional[str]: return USERS[username]["id"] else: appsec_trace_utils.track_user_login_failure_event( - tracer, user_id=USERS[username]["id"], exists=True, login_events_mode="auto" + tracer, user_id=USERS[username]["id"], exists=True, login_events_mode="auto", login=username ) return None appsec_trace_utils.track_user_login_failure_event( - tracer, user_id=username, exists=False, login_events_mode="auto" + tracer, user_id=username, exists=False, login_events_mode="auto", login=username ) return None - def login(user_id: str) -> None: + def login(user_id: str, login: str) -> None: """login user""" - appsec_trace_utils.track_user_login_success_event(tracer, user_id=user_id, login_events_mode="auto") + appsec_trace_utils.track_user_login_success_event( + tracer, user_id=user_id, login_events_mode="auto", login=login + ) username = request.args.get("username") password = request.args.get("password") user_id = authenticate(username=username, password=password) if user_id is not None: - login(user_id) + login(user_id, username) return "OK" return "login failure", 401 diff --git a/tests/appsec/contrib_appsec/utils.py b/tests/appsec/contrib_appsec/utils.py index 312c15b5fed..e614e33a51b 100644 --- a/tests/appsec/contrib_appsec/utils.py +++ b/tests/appsec/contrib_appsec/utils.py @@ -1478,9 +1478,14 @@ def test_auto_user_events( assert get_tag("_dd.appsec.events.users.login.failure.sdk") == "true" else: assert get_tag("_dd.appsec.events.users.login.success.sdk") is None + if mode == "identification": + assert get_tag("_dd.appsec.usr.login") == user else: assert get_tag("appsec.events.users.login.success.track") == "true" assert get_tag("usr.id") == user_id_hash + assert get_tag("_dd.appsec.usr.id") == user_id_hash + if mode == "identification": + assert get_tag("_dd.appsec.usr.login") == user # check for manual instrumentation tag in manual instrumented frameworks if interface.name in ["flask", "fastapi"]: assert get_tag("_dd.appsec.events.users.login.success.sdk") == "true" From c46c3028d8b89749b0bc63ea4b2e215d4d75fe8b Mon Sep 17 00:00:00 2001 From: Alberto Vara Date: Thu, 16 Jan 2025 10:20:37 +0100 Subject: [PATCH 12/16] chore(appsec): migrate test integrations to gitlab (#11931) Partial migration of the tests in `appsec/integrations/` ## Checklist - [x] PR author has checked that all the criteria below are met - The PR description includes an overview of the change - The PR description articulates the motivation for the change - The change includes tests OR the PR description describes a testing strategy - The PR description notes risks associated with the change, if any - Newly-added code is easy to change - The change follows the [library release note guidelines](https://ddtrace.readthedocs.io/en/stable/releasenotes.html) - The change includes or references documentation updates if necessary - Backport labels are set (if [applicable](https://ddtrace.readthedocs.io/en/latest/contributing.html#backporting)) ## Reviewer Checklist - [x] Reviewer has checked that all the criteria below are met - Title is accurate - All changes are related to the pull request's stated goal - Avoids breaking [API](https://ddtrace.readthedocs.io/en/stable/versioning.html#interfaces) changes - Testing strategy adequately addresses listed risks - Newly-added code is easy to change - Release note makes sense to a user of the library - If necessary, author has acknowledged and discussed the performance implications of this PR as reported in the benchmarks PR comment - Backport labels are set in a manner that is consistent with the [release branch maintenance policy](https://ddtrace.readthedocs.io/en/latest/contributing.html#backporting) --- .circleci/config.templ.yml | 4 +- .riot/requirements/1147cef.txt | 62 ------------------ .riot/requirements/1221a04.txt | 36 ----------- .riot/requirements/131666a.txt | 36 ----------- .riot/requirements/16ed652.txt | 33 ---------- .riot/requirements/173e759.txt | 64 ------------------- .riot/requirements/17b5eda.txt | 32 ---------- .riot/requirements/1a1ddb4.txt | 36 ----------- .riot/requirements/1b0d603.txt | 64 ------------------- .riot/requirements/1e2d655.txt | 60 ----------------- .riot/requirements/1e81527.txt | 36 ----------- .riot/requirements/1fb9968.txt | 59 ----------------- .riot/requirements/2b94418.txt | 34 ---------- .riot/requirements/5e31227.txt | 23 +++++++ .riot/requirements/628e8fe.txt | 23 +++++++ .riot/requirements/8dd53b1.txt | 25 ++++++++ .riot/requirements/968fdc9.txt | 23 +++++++ .riot/requirements/e53ccba.txt | 33 ---------- hatch.toml | 46 ++++++++++++- riotfile.py | 40 +----------- tests/appsec/app.py | 2 +- .../integrations/django_tests/__init__.py | 0 .../integrations/flask_tests/__init__.py | 0 .../module_with_import_errors.py | 0 .../test_flask_remoteconfig.py | 11 ++-- .../test_gunicorn_handlers.py | 0 ...est_iast_flask_entrypoint_iast_patches.py} | 46 ++++++++++--- .../test_iast_flask_patching.py} | 6 +- .../test_iast_flask_telemetry.py} | 0 .../test_iast_langchain.py} | 0 .../test_iast_psycopg2.py} | 0 .../integrations/{ => flask_tests}/utils.py | 6 +- tests/appsec/suitespec.yml | 17 ++++- 33 files changed, 209 insertions(+), 648 deletions(-) delete mode 100644 .riot/requirements/1147cef.txt delete mode 100644 .riot/requirements/1221a04.txt delete mode 100644 .riot/requirements/131666a.txt delete mode 100644 .riot/requirements/16ed652.txt delete mode 100644 .riot/requirements/173e759.txt delete mode 100644 .riot/requirements/17b5eda.txt delete mode 100644 .riot/requirements/1a1ddb4.txt delete mode 100644 .riot/requirements/1b0d603.txt delete mode 100644 .riot/requirements/1e2d655.txt delete mode 100644 .riot/requirements/1e81527.txt delete mode 100644 .riot/requirements/1fb9968.txt delete mode 100644 .riot/requirements/2b94418.txt create mode 100644 .riot/requirements/5e31227.txt create mode 100644 .riot/requirements/628e8fe.txt create mode 100644 .riot/requirements/8dd53b1.txt create mode 100644 .riot/requirements/968fdc9.txt delete mode 100644 .riot/requirements/e53ccba.txt create mode 100644 tests/appsec/integrations/django_tests/__init__.py create mode 100644 tests/appsec/integrations/flask_tests/__init__.py rename tests/appsec/integrations/{ => flask_tests}/module_with_import_errors.py (100%) rename tests/appsec/integrations/{ => flask_tests}/test_flask_remoteconfig.py (94%) rename tests/appsec/integrations/{ => flask_tests}/test_gunicorn_handlers.py (100%) rename tests/appsec/integrations/{test_flask_entrypoint_iast_patches.py => flask_tests/test_iast_flask_entrypoint_iast_patches.py} (85%) rename tests/appsec/integrations/{test_flask_iast_patching.py => flask_tests/test_iast_flask_patching.py} (93%) rename tests/appsec/integrations/{test_flask_telemetry.py => flask_tests/test_iast_flask_telemetry.py} (100%) rename tests/appsec/integrations/{test_langchain.py => flask_tests/test_iast_langchain.py} (100%) rename tests/appsec/integrations/{test_psycopg2.py => flask_tests/test_iast_psycopg2.py} (100%) rename tests/appsec/integrations/{ => flask_tests}/utils.py (100%) diff --git a/.circleci/config.templ.yml b/.circleci/config.templ.yml index 05da52d643a..29cb1d886db 100644 --- a/.circleci/config.templ.yml +++ b/.circleci/config.templ.yml @@ -404,12 +404,12 @@ jobs: paths: - "." - appsec_integrations: + appsec_integrations_pygoat: <<: *machine_executor parallelism: 13 steps: - run_test: - pattern: 'appsec_integrations' + pattern: 'appsec_integrations_pygoat' snapshot: true run_agent_checks: false docker_services: "pygoat" diff --git a/.riot/requirements/1147cef.txt b/.riot/requirements/1147cef.txt deleted file mode 100644 index a760b2a10c4..00000000000 --- a/.riot/requirements/1147cef.txt +++ /dev/null @@ -1,62 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.10 -# by the following command: -# -# pip-compile --no-annotate --resolver=backtracking .riot/requirements/1147cef.in -# -aiohttp==3.9.1 -aiosignal==1.3.1 -annotated-types==0.6.0 -anyio==4.2.0 -async-timeout==4.0.3 -attrs==23.2.0 -blinker==1.7.0 -certifi==2023.11.17 -charset-normalizer==3.3.2 -click==8.1.7 -coverage[toml]==7.4.0 -dataclasses-json==0.6.3 -exceptiongroup==1.2.0 -flask==3.0.0 -frozenlist==1.4.1 -greenlet==3.0.3 -gunicorn==21.2.0 -hypothesis==6.45.0 -idna==3.6 -iniconfig==2.0.0 -itsdangerous==2.1.2 -jinja2==3.1.2 -jsonpatch==1.33 -jsonpointer==2.4 -langchain==0.0.354 -langchain-community==0.0.8 -langchain-core==0.1.5 -langchain-experimental==0.0.47 -langsmith==0.0.77 -markupsafe==2.1.3 -marshmallow==3.20.1 -mock==5.1.0 -multidict==6.0.4 -mypy-extensions==1.0.0 -numpy==1.26.3 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -psycopg2-binary==2.9.9 -pydantic==2.5.3 -pydantic-core==2.14.6 -pytest==7.4.4 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pyyaml==6.0.1 -requests==2.31.0 -sniffio==1.3.0 -sortedcontainers==2.4.0 -sqlalchemy==2.0.25 -tenacity==8.2.3 -tomli==2.0.1 -typing-extensions==4.9.0 -typing-inspect==0.9.0 -urllib3==2.1.0 -werkzeug==3.0.1 -yarl==1.9.4 diff --git a/.riot/requirements/1221a04.txt b/.riot/requirements/1221a04.txt deleted file mode 100644 index b07a317c8ce..00000000000 --- a/.riot/requirements/1221a04.txt +++ /dev/null @@ -1,36 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.7 -# by the following command: -# -# pip-compile --no-annotate --resolver=backtracking .riot/requirements/1221a04.in -# -attrs==23.1.0 -certifi==2023.7.22 -charset-normalizer==3.3.2 -click==8.1.7 -coverage[toml]==7.2.7 -exceptiongroup==1.1.3 -flask==2.2.5 -gunicorn==21.2.0 -hypothesis==6.45.0 -idna==3.4 -importlib-metadata==6.7.0 -iniconfig==2.0.0 -itsdangerous==2.1.2 -jinja2==3.1.2 -markupsafe==2.1.3 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.2.0 -psycopg2-binary==2.9.9 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.11.1 -requests==2.31.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -typing-extensions==4.7.1 -urllib3==2.0.7 -werkzeug==2.2.3 -zipp==3.15.0 diff --git a/.riot/requirements/131666a.txt b/.riot/requirements/131666a.txt deleted file mode 100644 index e2346f500df..00000000000 --- a/.riot/requirements/131666a.txt +++ /dev/null @@ -1,36 +0,0 @@ -# -# This file is autogenerated by pip-compile with python 3.9 -# To update, run: -# -# pip-compile --no-annotate --resolver=backtracking .riot/requirements/131666a.in -# -attrs==23.1.0 -blinker==1.7.0 -certifi==2023.7.22 -charset-normalizer==3.3.2 -click==8.1.7 -coverage[toml]==7.3.2 -exceptiongroup==1.1.3 -flask==2.3.3 -gunicorn==21.2.0 -hypothesis==6.45.0 -idna==3.4 -importlib-metadata==6.8.0 -iniconfig==2.0.0 -itsdangerous==2.1.2 -jinja2==3.1.2 -markupsafe==2.1.3 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -psycopg2-binary==2.9.9 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -requests==2.31.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -urllib3==2.1.0 -werkzeug==3.0.1 -zipp==3.17.0 diff --git a/.riot/requirements/16ed652.txt b/.riot/requirements/16ed652.txt deleted file mode 100644 index e4914dd4ab4..00000000000 --- a/.riot/requirements/16ed652.txt +++ /dev/null @@ -1,33 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/16ed652.in -# -attrs==23.1.0 -certifi==2023.7.22 -charset-normalizer==3.3.2 -click==7.1.2 -coverage[toml]==7.3.2 -exceptiongroup==1.1.3 -flask==1.1.4 -gunicorn==21.2.0 -hypothesis==6.45.0 -idna==3.4 -iniconfig==2.0.0 -itsdangerous==1.1.0 -jinja2==2.11.3 -markupsafe==1.1.1 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -psycopg2-binary==2.9.9 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -requests==2.31.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -urllib3==2.1.0 -werkzeug==1.0.1 diff --git a/.riot/requirements/173e759.txt b/.riot/requirements/173e759.txt deleted file mode 100644 index 305b7740f25..00000000000 --- a/.riot/requirements/173e759.txt +++ /dev/null @@ -1,64 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/173e759.in -# -aiohttp==3.9.1 -aiosignal==1.3.1 -annotated-types==0.6.0 -anyio==4.2.0 -async-timeout==4.0.3 -attrs==23.2.0 -blinker==1.7.0 -certifi==2023.11.17 -charset-normalizer==3.3.2 -click==8.1.7 -coverage[toml]==7.4.0 -dataclasses-json==0.6.3 -exceptiongroup==1.2.0 -flask==3.0.0 -frozenlist==1.4.1 -greenlet==3.0.3 -gunicorn==21.2.0 -hypothesis==6.45.0 -idna==3.6 -importlib-metadata==7.0.1 -iniconfig==2.0.0 -itsdangerous==2.1.2 -jinja2==3.1.2 -jsonpatch==1.33 -jsonpointer==2.4 -langchain==0.0.354 -langchain-community==0.0.8 -langchain-core==0.1.5 -langchain-experimental==0.0.47 -langsmith==0.0.77 -markupsafe==2.1.3 -marshmallow==3.20.1 -mock==5.1.0 -multidict==6.0.4 -mypy-extensions==1.0.0 -numpy==1.24.4 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -psycopg2-binary==2.9.9 -pydantic==2.5.3 -pydantic-core==2.14.6 -pytest==7.4.4 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pyyaml==6.0.1 -requests==2.31.0 -sniffio==1.3.0 -sortedcontainers==2.4.0 -sqlalchemy==2.0.25 -tenacity==8.2.3 -tomli==2.0.1 -typing-extensions==4.9.0 -typing-inspect==0.9.0 -urllib3==2.1.0 -werkzeug==3.0.1 -yarl==1.9.4 -zipp==3.17.0 diff --git a/.riot/requirements/17b5eda.txt b/.riot/requirements/17b5eda.txt deleted file mode 100644 index 6ae81a00705..00000000000 --- a/.riot/requirements/17b5eda.txt +++ /dev/null @@ -1,32 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.11 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/17b5eda.in -# -attrs==23.1.0 -blinker==1.7.0 -certifi==2023.7.22 -charset-normalizer==3.3.2 -click==8.1.7 -coverage[toml]==7.3.2 -flask==2.3.3 -gunicorn==21.2.0 -hypothesis==6.45.0 -idna==3.4 -iniconfig==2.0.0 -itsdangerous==2.1.2 -jinja2==3.1.2 -markupsafe==2.1.3 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -psycopg2-binary==2.9.9 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -requests==2.31.0 -sortedcontainers==2.4.0 -urllib3==2.1.0 -werkzeug==3.0.1 diff --git a/.riot/requirements/1a1ddb4.txt b/.riot/requirements/1a1ddb4.txt deleted file mode 100644 index 3e64e630c33..00000000000 --- a/.riot/requirements/1a1ddb4.txt +++ /dev/null @@ -1,36 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.7 -# by the following command: -# -# pip-compile --no-annotate --resolver=backtracking .riot/requirements/1a1ddb4.in -# -attrs==23.1.0 -certifi==2023.7.22 -charset-normalizer==3.3.2 -click==7.1.2 -coverage[toml]==7.2.7 -exceptiongroup==1.1.3 -flask==1.1.4 -gunicorn==21.2.0 -hypothesis==6.45.0 -idna==3.4 -importlib-metadata==6.7.0 -iniconfig==2.0.0 -itsdangerous==1.1.0 -jinja2==2.11.3 -markupsafe==1.1.1 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.2.0 -psycopg2-binary==2.9.9 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.11.1 -requests==2.31.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -typing-extensions==4.7.1 -urllib3==2.0.7 -werkzeug==1.0.1 -zipp==3.15.0 diff --git a/.riot/requirements/1b0d603.txt b/.riot/requirements/1b0d603.txt deleted file mode 100644 index c6dfa7f6fae..00000000000 --- a/.riot/requirements/1b0d603.txt +++ /dev/null @@ -1,64 +0,0 @@ -# -# This file is autogenerated by pip-compile with python 3.9 -# To update, run: -# -# pip-compile --no-annotate --resolver=backtracking .riot/requirements/1b0d603.in -# -aiohttp==3.9.1 -aiosignal==1.3.1 -annotated-types==0.6.0 -anyio==4.2.0 -async-timeout==4.0.3 -attrs==23.2.0 -blinker==1.7.0 -certifi==2023.11.17 -charset-normalizer==3.3.2 -click==8.1.7 -coverage[toml]==7.4.0 -dataclasses-json==0.6.3 -exceptiongroup==1.2.0 -flask==3.0.0 -frozenlist==1.4.1 -greenlet==3.0.3 -gunicorn==21.2.0 -hypothesis==6.45.0 -idna==3.6 -importlib-metadata==7.0.1 -iniconfig==2.0.0 -itsdangerous==2.1.2 -jinja2==3.1.2 -jsonpatch==1.33 -jsonpointer==2.4 -langchain==0.0.354 -langchain-community==0.0.8 -langchain-core==0.1.5 -langchain-experimental==0.0.47 -langsmith==0.0.77 -markupsafe==2.1.3 -marshmallow==3.20.1 -mock==5.1.0 -multidict==6.0.4 -mypy-extensions==1.0.0 -numpy==1.26.3 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -psycopg2-binary==2.9.9 -pydantic==2.5.3 -pydantic-core==2.14.6 -pytest==7.4.4 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pyyaml==6.0.1 -requests==2.31.0 -sniffio==1.3.0 -sortedcontainers==2.4.0 -sqlalchemy==2.0.25 -tenacity==8.2.3 -tomli==2.0.1 -typing-extensions==4.9.0 -typing-inspect==0.9.0 -urllib3==2.1.0 -werkzeug==3.0.1 -yarl==1.9.4 -zipp==3.17.0 diff --git a/.riot/requirements/1e2d655.txt b/.riot/requirements/1e2d655.txt deleted file mode 100644 index 7f6b56e2776..00000000000 --- a/.riot/requirements/1e2d655.txt +++ /dev/null @@ -1,60 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.12 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1e2d655.in -# -aiohappyeyeballs==2.4.0 -aiohttp==3.10.5 -aiosignal==1.3.1 -annotated-types==0.7.0 -anyio==4.4.0 -attrs==24.2.0 -blinker==1.8.2 -certifi==2024.7.4 -charset-normalizer==3.3.2 -click==8.1.7 -coverage[toml]==7.6.1 -dataclasses-json==0.6.7 -flask==3.0.3 -frozenlist==1.4.1 -greenlet==3.0.3 -gunicorn==23.0.0 -hypothesis==6.45.0 -idna==3.8 -iniconfig==2.0.0 -itsdangerous==2.2.0 -jinja2==3.1.4 -jsonpatch==1.33 -jsonpointer==3.0.0 -langchain==0.0.354 -langchain-community==0.0.20 -langchain-core==0.1.23 -langchain-experimental==0.0.47 -langsmith==0.0.87 -markupsafe==2.1.5 -marshmallow==3.22.0 -mock==5.1.0 -multidict==6.0.5 -mypy-extensions==1.0.0 -numpy==1.26.4 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.5.0 -psycopg2-binary==2.9.9 -pydantic==2.8.2 -pydantic-core==2.20.1 -pytest==8.3.2 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pyyaml==6.0.2 -requests==2.32.3 -sniffio==1.3.1 -sortedcontainers==2.4.0 -sqlalchemy==2.0.32 -tenacity==8.5.0 -typing-extensions==4.12.2 -typing-inspect==0.9.0 -urllib3==2.2.2 -werkzeug==3.0.4 -yarl==1.9.4 diff --git a/.riot/requirements/1e81527.txt b/.riot/requirements/1e81527.txt deleted file mode 100644 index 3eae5fd518c..00000000000 --- a/.riot/requirements/1e81527.txt +++ /dev/null @@ -1,36 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/1e81527.in -# -attrs==23.1.0 -blinker==1.7.0 -certifi==2023.7.22 -charset-normalizer==3.3.2 -click==8.1.7 -coverage[toml]==7.3.2 -exceptiongroup==1.1.3 -flask==2.3.3 -gunicorn==21.2.0 -hypothesis==6.45.0 -idna==3.4 -importlib-metadata==6.8.0 -iniconfig==2.0.0 -itsdangerous==2.1.2 -jinja2==3.1.2 -markupsafe==2.1.3 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -psycopg2-binary==2.9.9 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -requests==2.31.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -urllib3==2.1.0 -werkzeug==3.0.1 -zipp==3.17.0 diff --git a/.riot/requirements/1fb9968.txt b/.riot/requirements/1fb9968.txt deleted file mode 100644 index 57524a248f5..00000000000 --- a/.riot/requirements/1fb9968.txt +++ /dev/null @@ -1,59 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.11 -# by the following command: -# -# pip-compile --no-annotate .riot/requirements/1fb9968.in -# -aiohttp==3.9.1 -aiosignal==1.3.1 -annotated-types==0.6.0 -anyio==4.2.0 -attrs==23.2.0 -blinker==1.7.0 -certifi==2023.11.17 -charset-normalizer==3.3.2 -click==8.1.7 -coverage[toml]==7.4.0 -dataclasses-json==0.6.3 -flask==3.0.0 -frozenlist==1.4.1 -greenlet==3.0.3 -gunicorn==21.2.0 -hypothesis==6.45.0 -idna==3.6 -iniconfig==2.0.0 -itsdangerous==2.1.2 -jinja2==3.1.2 -jsonpatch==1.33 -jsonpointer==2.4 -langchain==0.0.354 -langchain-community==0.0.8 -langchain-core==0.1.5 -langchain-experimental==0.0.47 -langsmith==0.0.77 -markupsafe==2.1.3 -marshmallow==3.20.1 -mock==5.1.0 -multidict==6.0.4 -mypy-extensions==1.0.0 -numpy==1.26.3 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -psycopg2-binary==2.9.9 -pydantic==2.5.3 -pydantic-core==2.14.6 -pytest==7.4.4 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -pyyaml==6.0.1 -requests==2.31.0 -sniffio==1.3.0 -sortedcontainers==2.4.0 -sqlalchemy==2.0.25 -tenacity==8.2.3 -typing-extensions==4.9.0 -typing-inspect==0.9.0 -urllib3==2.1.0 -werkzeug==3.0.1 -yarl==1.9.4 diff --git a/.riot/requirements/2b94418.txt b/.riot/requirements/2b94418.txt deleted file mode 100644 index a64003e98e9..00000000000 --- a/.riot/requirements/2b94418.txt +++ /dev/null @@ -1,34 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.10 -# by the following command: -# -# pip-compile --no-annotate --resolver=backtracking .riot/requirements/2b94418.in -# -attrs==23.1.0 -blinker==1.7.0 -certifi==2023.7.22 -charset-normalizer==3.3.2 -click==8.1.7 -coverage[toml]==7.3.2 -exceptiongroup==1.1.3 -flask==2.3.3 -gunicorn==21.2.0 -hypothesis==6.45.0 -idna==3.4 -iniconfig==2.0.0 -itsdangerous==2.1.2 -jinja2==3.1.2 -markupsafe==2.1.3 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -psycopg2-binary==2.9.9 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -requests==2.31.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -urllib3==2.1.0 -werkzeug==3.0.1 diff --git a/.riot/requirements/5e31227.txt b/.riot/requirements/5e31227.txt new file mode 100644 index 00000000000..a3815ab0d74 --- /dev/null +++ b/.riot/requirements/5e31227.txt @@ -0,0 +1,23 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/5e31227.in +# +attrs==24.3.0 +certifi==2024.12.14 +charset-normalizer==3.4.1 +coverage[toml]==7.6.10 +hypothesis==6.45.0 +idna==3.10 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.2 +pluggy==1.5.0 +pytest==8.3.4 +pytest-cov==6.0.0 +pytest-mock==3.14.0 +requests==2.32.3 +sortedcontainers==2.4.0 +urllib3==2.3.0 diff --git a/.riot/requirements/628e8fe.txt b/.riot/requirements/628e8fe.txt new file mode 100644 index 00000000000..163d0416c31 --- /dev/null +++ b/.riot/requirements/628e8fe.txt @@ -0,0 +1,23 @@ +# +# This file is autogenerated by pip-compile with Python 3.11 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/628e8fe.in +# +attrs==24.3.0 +certifi==2024.12.14 +charset-normalizer==3.4.1 +coverage[toml]==7.6.10 +hypothesis==6.45.0 +idna==3.10 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.2 +pluggy==1.5.0 +pytest==8.3.4 +pytest-cov==6.0.0 +pytest-mock==3.14.0 +requests==2.32.3 +sortedcontainers==2.4.0 +urllib3==2.3.0 diff --git a/.riot/requirements/8dd53b1.txt b/.riot/requirements/8dd53b1.txt new file mode 100644 index 00000000000..1dbf9b66b89 --- /dev/null +++ b/.riot/requirements/8dd53b1.txt @@ -0,0 +1,25 @@ +# +# This file is autogenerated by pip-compile with Python 3.10 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/8dd53b1.in +# +attrs==24.3.0 +certifi==2024.12.14 +charset-normalizer==3.4.1 +coverage[toml]==7.6.10 +exceptiongroup==1.2.2 +hypothesis==6.45.0 +idna==3.10 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.2 +pluggy==1.5.0 +pytest==8.3.4 +pytest-cov==6.0.0 +pytest-mock==3.14.0 +requests==2.32.3 +sortedcontainers==2.4.0 +tomli==2.2.1 +urllib3==2.3.0 diff --git a/.riot/requirements/968fdc9.txt b/.riot/requirements/968fdc9.txt new file mode 100644 index 00000000000..6633b871d53 --- /dev/null +++ b/.riot/requirements/968fdc9.txt @@ -0,0 +1,23 @@ +# +# This file is autogenerated by pip-compile with Python 3.12 +# by the following command: +# +# pip-compile --allow-unsafe --no-annotate .riot/requirements/968fdc9.in +# +attrs==24.3.0 +certifi==2024.12.14 +charset-normalizer==3.4.1 +coverage[toml]==7.6.10 +hypothesis==6.45.0 +idna==3.10 +iniconfig==2.0.0 +mock==5.1.0 +opentracing==2.4.0 +packaging==24.2 +pluggy==1.5.0 +pytest==8.3.4 +pytest-cov==6.0.0 +pytest-mock==3.14.0 +requests==2.32.3 +sortedcontainers==2.4.0 +urllib3==2.3.0 diff --git a/.riot/requirements/e53ccba.txt b/.riot/requirements/e53ccba.txt deleted file mode 100644 index 0da0b56ed64..00000000000 --- a/.riot/requirements/e53ccba.txt +++ /dev/null @@ -1,33 +0,0 @@ -# -# This file is autogenerated by pip-compile with python 3.9 -# To update, run: -# -# pip-compile --no-annotate --resolver=backtracking .riot/requirements/e53ccba.in -# -attrs==23.1.0 -certifi==2023.7.22 -charset-normalizer==3.3.2 -click==7.1.2 -coverage[toml]==7.3.2 -exceptiongroup==1.1.3 -flask==1.1.4 -gunicorn==21.2.0 -hypothesis==6.45.0 -idna==3.4 -iniconfig==2.0.0 -itsdangerous==1.1.0 -jinja2==2.11.3 -markupsafe==1.1.1 -mock==5.1.0 -opentracing==2.4.0 -packaging==23.2 -pluggy==1.3.0 -psycopg2-binary==2.9.9 -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-mock==3.12.0 -requests==2.31.0 -sortedcontainers==2.4.0 -tomli==2.0.1 -urllib3==2.1.0 -werkzeug==1.0.1 diff --git a/hatch.toml b/hatch.toml index 41e5d049e60..4277452723b 100644 --- a/hatch.toml +++ b/hatch.toml @@ -308,7 +308,6 @@ dependencies = [ "pytest-cov", "requests", "hypothesis", - "requests", "astunparse", "flask", "virtualenv-clone" @@ -324,6 +323,51 @@ test = [ [[envs.appsec_iast_packages.matrix]] python = ["3.9", "3.10", "3.11", "3.12"] +## ASM appsec_integrations_flask + +[envs.appsec_integrations_flask] +template = "appsec_integrations_flask" +dependencies = [ + "pytest", + "pytest-cov", + "requests", + "hypothesis", + "gunicorn", + "psycopg2-binary~=2.9.9", + "MarkupSafe{matrix:markupsafe:}", + "itsdangerous{matrix:itsdangerous:}", + "Werkzeug{matrix:werkzeug:}", + "flask{matrix:flask}", +] + +[envs.appsec_integrations_flask.scripts] +test = [ + "uname -a", + "pip freeze", + "DD_TRACE_AGENT_URL=http://localhost:9126 DD_CIVISIBILITY_ITR_ENABLED=0 DD_IAST_REQUEST_SAMPLING=100 _DD_APPSEC_DEDUPLICATION_ENABLED=false python -m pytest -vvv {args:tests/appsec/integrations/flask_tests/}", +] + +[[envs.appsec_integrations_flask.matrix]] +python = ["3.8", "3.9"] +flask = ["~=1.1"] +# https://github.com/pallets/markupsafe/issues/282 +# DEV: Breaking change made in 2.1.0 release +markupsafe = ["~=1.1"] +itsdangerous = ["==2.0.1"] +# DEV: Flask 1.0.x is missing a maximum version for werkzeug dependency +werkzeug = ["==2.0.3"] + +[[envs.appsec_integrations_flask.matrix]] +python = ["3.8", "3.9", "3.10", "3.11"] +flask = ["~=2.2"] + +[[envs.appsec_integrations_flask.matrix]] +python = ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] +flask = ["~=2.2"] + +[[envs.appsec_integrations_flask.matrix]] +python = ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] +flask = ["~=3.0"] ## ASM FastAPI diff --git a/riotfile.py b/riotfile.py index 42deb36b813..b172e648e74 100644 --- a/riotfile.py +++ b/riotfile.py @@ -228,50 +228,16 @@ def select_pys(min_version: str = MIN_PYTHON_VERSION, max_version: str = MAX_PYT }, ), Venv( - name="appsec_integrations", - command="pytest {cmdargs} tests/appsec/integrations/", + name="appsec_integrations_pygoat", + pys=select_pys(min_version="3.10"), + command="pytest {cmdargs} tests/appsec/integrations/pygoat_tests/", pkgs={ "requests": latest, - "gunicorn": latest, - "psycopg2-binary": "~=2.9.9", }, env={ "DD_CIVISIBILITY_ITR_ENABLED": "0", "DD_IAST_REQUEST_SAMPLING": "100", # Override default 30% to analyze all IAST requests }, - venvs=[ - # Flask 1.x.x - Venv( - pys=select_pys(min_version="3.7", max_version="3.9"), - pkgs={ - "flask": "~=1.0", - # https://github.com/pallets/itsdangerous/issues/290 - # DEV: Breaking change made in 2.1.0 release - "itsdangerous": "<2.1.0", - # https://github.com/pallets/markupsafe/issues/282 - # DEV: Breaking change made in 2.1.0 release - "markupsafe": "<2.0", - # DEV: Flask 1.0.x is missing a maximum version for werkzeug dependency - "werkzeug": "<2.0", - }, - ), - # Flask 2.x.x - Venv( - pys=select_pys(min_version="3.7", max_version="3.11"), - pkgs={ - "flask": "~=2.2", - }, - ), - # Flask 3.x.x - Venv( - pys=select_pys(min_version="3.8", max_version="3.12"), - pkgs={ - "flask": "~=3.0", - "langchain": "==0.0.354", - "langchain_experimental": "==0.0.47", - }, - ), - ], ), Venv( name="profile-diff", diff --git a/tests/appsec/app.py b/tests/appsec/app.py index eb5beb666cf..e7e5dbaf231 100644 --- a/tests/appsec/app.py +++ b/tests/appsec/app.py @@ -87,7 +87,7 @@ from tests.appsec.iast_packages.packages.pkg_wrapt import pkg_wrapt from tests.appsec.iast_packages.packages.pkg_yarl import pkg_yarl from tests.appsec.iast_packages.packages.pkg_zipp import pkg_zipp -import tests.appsec.integrations.module_with_import_errors as module_with_import_errors +import tests.appsec.integrations.flask_tests.module_with_import_errors as module_with_import_errors app = Flask(__name__) diff --git a/tests/appsec/integrations/django_tests/__init__.py b/tests/appsec/integrations/django_tests/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/appsec/integrations/flask_tests/__init__.py b/tests/appsec/integrations/flask_tests/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/appsec/integrations/module_with_import_errors.py b/tests/appsec/integrations/flask_tests/module_with_import_errors.py similarity index 100% rename from tests/appsec/integrations/module_with_import_errors.py rename to tests/appsec/integrations/flask_tests/module_with_import_errors.py diff --git a/tests/appsec/integrations/test_flask_remoteconfig.py b/tests/appsec/integrations/flask_tests/test_flask_remoteconfig.py similarity index 94% rename from tests/appsec/integrations/test_flask_remoteconfig.py rename to tests/appsec/integrations/flask_tests/test_flask_remoteconfig.py index eaa69e04182..8f64d9ea7c9 100644 --- a/tests/appsec/integrations/test_flask_remoteconfig.py +++ b/tests/appsec/integrations/flask_tests/test_flask_remoteconfig.py @@ -14,9 +14,9 @@ from ddtrace.internal.compat import httplib from ddtrace.internal.compat import parse from tests.appsec.appsec_utils import gunicorn_server -from tests.appsec.integrations.utils import _PORT -from tests.appsec.integrations.utils import _multi_requests -from tests.appsec.integrations.utils import _request_200 +from tests.appsec.integrations.flask_tests.utils import _PORT +from tests.appsec.integrations.flask_tests.utils import _multi_requests +from tests.appsec.integrations.flask_tests.utils import _request_200 from tests.utils import flaky @@ -187,6 +187,7 @@ def _request_403(client, debug_mode=False, max_retries=40, sleep_time=1): raise AssertionError("request_403 failed, max_retries=%d, sleep_time=%f" % (max_retries, sleep_time)) +@flaky(until=1706677200, reason="TODO(avara1986): We need to migrate testagent to gitlab") @pytest.mark.skipif(sys.version_info >= (3, 11), reason="Gunicorn is only supported up to 3.10") def test_load_testing_appsec_ip_blocking_gunicorn_rc_disabled(): token = "test_load_testing_appsec_ip_blocking_gunicorn_rc_disabled_{}".format(str(uuid.uuid4())) @@ -202,6 +203,7 @@ def test_load_testing_appsec_ip_blocking_gunicorn_rc_disabled(): _unblock_ip(token) +@flaky(until=1706677200, reason="TODO(avara1986): We need to migrate testagent to gitlab") @pytest.mark.skipif(sys.version_info >= (3, 11), reason="Gunicorn is only supported up to 3.10") def test_load_testing_appsec_ip_blocking_gunicorn_block(): token = "test_load_testing_appsec_ip_blocking_gunicorn_block_{}".format(str(uuid.uuid4())) @@ -219,6 +221,7 @@ def test_load_testing_appsec_ip_blocking_gunicorn_block(): _request_200(gunicorn_client) +@flaky(until=1706677200, reason="TODO(avara1986): We need to migrate testagent to gitlab") @pytest.mark.skipif(list(sys.version_info[:2]) != [3, 10], reason="Run this tests in python 3.10") def test_load_testing_appsec_ip_blocking_gunicorn_block_and_kill_child_worker(): token = "test_load_testing_appsec_ip_blocking_gunicorn_block_and_kill_child_worker_{}".format(str(uuid.uuid4())) @@ -267,7 +270,7 @@ def test_load_testing_appsec_1click_and_ip_blocking_gunicorn_block_and_kill_chil _request_200(gunicorn_client, debug_mode=False) -@pytest.mark.subprocess(ddtrace_run=True, out=b"success") +@pytest.mark.subprocess(ddtrace_run=True, check_logs=False, out=b"success") def test_compatiblity_with_multiprocessing(): import multiprocessing from multiprocessing import Array diff --git a/tests/appsec/integrations/test_gunicorn_handlers.py b/tests/appsec/integrations/flask_tests/test_gunicorn_handlers.py similarity index 100% rename from tests/appsec/integrations/test_gunicorn_handlers.py rename to tests/appsec/integrations/flask_tests/test_gunicorn_handlers.py diff --git a/tests/appsec/integrations/test_flask_entrypoint_iast_patches.py b/tests/appsec/integrations/flask_tests/test_iast_flask_entrypoint_iast_patches.py similarity index 85% rename from tests/appsec/integrations/test_flask_entrypoint_iast_patches.py rename to tests/appsec/integrations/flask_tests/test_iast_flask_entrypoint_iast_patches.py index 4f54bc675c3..f8c512eb261 100644 --- a/tests/appsec/integrations/test_flask_entrypoint_iast_patches.py +++ b/tests/appsec/integrations/flask_tests/test_iast_flask_entrypoint_iast_patches.py @@ -1,9 +1,10 @@ -import pytest +import sys -from tests.utils import flaky +import pytest -@pytest.mark.subprocess() +@pytest.mark.skipif(sys.version_info >= (3, 13, 0), reason="Test not compatible with Python 3.13") +@pytest.mark.subprocess(check_logs=False) def test_ddtrace_iast_flask_patch(): import dis import io @@ -35,7 +36,35 @@ def test_ddtrace_iast_flask_patch(): del sys.modules["tests.appsec.iast.fixtures.entrypoint.app_main_patched"] -@pytest.mark.subprocess() +@pytest.mark.skipif(sys.version_info < (3, 13, 0), reason="Test compatible with Python 3.13") +@pytest.mark.subprocess(check_logs=False) +def test_ddtrace_iast_flask_patch_py313(): + import dis + import io + import re + import sys + + from tests.utils import override_env + from tests.utils import override_global_config + + PATTERN = r"""LOAD_GLOBAL 0 \(_ddtrace_aspects\)""" + + with override_global_config(dict(_iast_enabled=True)), override_env( + dict(DD_IAST_ENABLED="true", DD_IAST_REQUEST_SAMPLING="100") + ): + import tests.appsec.iast.fixtures.entrypoint.app_main_patched as flask_entrypoint + + dis_output = io.StringIO() + dis.dis(flask_entrypoint, file=dis_output) + str_output = dis_output.getvalue() + # Should have replaced the binary op with the aspect in add_test: + assert re.search(PATTERN, str_output), str_output + # Should have replaced the app.run() with a pass: + # assert "Disassembly of run" not in str_output, str_output + del sys.modules["tests.appsec.iast.fixtures.entrypoint.app_main_patched"] + + +@pytest.mark.subprocess(check_logs=False) def test_ddtrace_iast_flask_patch_iast_disabled(): import dis import io @@ -62,7 +91,7 @@ def _uninstall_watchdog_and_reload(): del sys.modules["tests.appsec.iast.fixtures.entrypoint.app_main_patched"] -@pytest.mark.subprocess() +@pytest.mark.subprocess(check_logs=False) def test_ddtrace_iast_flask_no_patch(): import dis import io @@ -93,7 +122,7 @@ def _uninstall_watchdog_and_reload(): del sys.modules["tests.appsec.iast.fixtures.entrypoint.app"] -@pytest.mark.subprocess() +@pytest.mark.subprocess(check_logs=False) def test_ddtrace_iast_flask_app_create_app_enable_iast_propagation(): import dis import io @@ -125,7 +154,7 @@ def _uninstall_watchdog_and_reload(): del sys.modules["tests.appsec.iast.fixtures.entrypoint.views"] -@pytest.mark.subprocess() +@pytest.mark.subprocess(check_logs=False) def test_ddtrace_iast_flask_app_create_app_patch_all(): import dis import io @@ -155,7 +184,6 @@ def _uninstall_watchdog_and_reload(): del sys.modules["tests.appsec.iast.fixtures.entrypoint.views"] -@flaky(1736035200) @pytest.mark.subprocess(check_logs=False) def test_ddtrace_iast_flask_app_create_app_patch_all_enable_iast_propagation(): import dis @@ -187,7 +215,7 @@ def _uninstall_watchdog_and_reload(): del sys.modules["tests.appsec.iast.fixtures.entrypoint.views"] -@pytest.mark.subprocess() +@pytest.mark.subprocess(check_logs=False) def test_ddtrace_iast_flask_app_create_app_patch_all_enable_iast_propagation_disabled(): import dis import io diff --git a/tests/appsec/integrations/test_flask_iast_patching.py b/tests/appsec/integrations/flask_tests/test_iast_flask_patching.py similarity index 93% rename from tests/appsec/integrations/test_flask_iast_patching.py rename to tests/appsec/integrations/flask_tests/test_iast_flask_patching.py index 3291297ea92..8cb3b2e7730 100644 --- a/tests/appsec/integrations/test_flask_iast_patching.py +++ b/tests/appsec/integrations/flask_tests/test_iast_flask_patching.py @@ -2,9 +2,8 @@ from tests.appsec.appsec_utils import flask_server from tests.appsec.appsec_utils import gunicorn_server -from tests.appsec.integrations.utils import _PORT -from tests.appsec.integrations.utils import _request_200 -from tests.utils import flaky +from tests.appsec.integrations.flask_tests.utils import _PORT +from tests.appsec.integrations.flask_tests.utils import _request_200 def test_flask_iast_ast_patching_import_error(): @@ -28,7 +27,6 @@ def test_flask_iast_ast_patching_import_error(): assert response.content == b"False" -@flaky(until=1706677200, reason="TODO(avara1986): Re.Match contains errors. APPSEC-55239") @pytest.mark.parametrize("style", ["re_module", "re_object"]) @pytest.mark.parametrize("endpoint", ["re", "non-re"]) @pytest.mark.parametrize( diff --git a/tests/appsec/integrations/test_flask_telemetry.py b/tests/appsec/integrations/flask_tests/test_iast_flask_telemetry.py similarity index 100% rename from tests/appsec/integrations/test_flask_telemetry.py rename to tests/appsec/integrations/flask_tests/test_iast_flask_telemetry.py diff --git a/tests/appsec/integrations/test_langchain.py b/tests/appsec/integrations/flask_tests/test_iast_langchain.py similarity index 100% rename from tests/appsec/integrations/test_langchain.py rename to tests/appsec/integrations/flask_tests/test_iast_langchain.py diff --git a/tests/appsec/integrations/test_psycopg2.py b/tests/appsec/integrations/flask_tests/test_iast_psycopg2.py similarity index 100% rename from tests/appsec/integrations/test_psycopg2.py rename to tests/appsec/integrations/flask_tests/test_iast_psycopg2.py diff --git a/tests/appsec/integrations/utils.py b/tests/appsec/integrations/flask_tests/utils.py similarity index 100% rename from tests/appsec/integrations/utils.py rename to tests/appsec/integrations/flask_tests/utils.py index 18bf20a1608..3935a9447b4 100644 --- a/tests/appsec/integrations/utils.py +++ b/tests/appsec/integrations/flask_tests/utils.py @@ -2,6 +2,9 @@ import time +_PORT = 8040 + + def _multi_requests(client, url="/", debug_mode=False): if debug_mode: results = [ @@ -47,9 +50,6 @@ def _request_200( raise AssertionError("request_200 failed, max_retries=%d, sleep_time=%f" % (max_retries, sleep_time)) -_PORT = 8040 - - def _request(client, url="/"): response = client.get(url, headers={"X-Forwarded-For": "123.45.67.88"}) return response diff --git a/tests/appsec/suitespec.yml b/tests/appsec/suitespec.yml index 07769271cb9..f304490f515 100644 --- a/tests/appsec/suitespec.yml +++ b/tests/appsec/suitespec.yml @@ -81,7 +81,7 @@ suites: retry: 2 runner: hatch timeout: 50m - appsec_integrations: + appsec_integrations_pygoat: parallelism: 7 paths: - '@bootstrap' @@ -90,11 +90,24 @@ suites: - '@appsec' - '@appsec_iast' - '@remoteconfig' - - tests/appsec/* + - tests/appsec/integrations/pygoat_tests/* - tests/snapshots/tests.appsec.* retry: 2 runner: riot snapshot: true + appsec_integrations_flask: + parallelism: 6 + paths: + - '@bootstrap' + - '@core' + - '@tracing' + - '@appsec' + - '@appsec_iast' + - '@remoteconfig' + - tests/appsec/integrations/flask_tests/* + retry: 2 + runner: hatch + timeout: 30m appsec_threats_django: parallelism: 12 paths: From 4183671d62b7d87f97318e9ed786cfa4879bc71f Mon Sep 17 00:00:00 2001 From: Federico Mon Date: Thu, 16 Jan 2025 11:35:10 +0100 Subject: [PATCH 13/16] chore(iast): taint get and post http parameter name in django (#11945) --- ddtrace/appsec/_iast/_handlers.py | 4 +- .../contrib/django/django_app/appsec_urls.py | 81 ++++++++++----- .../contrib/django/test_django_appsec_iast.py | 99 +++++++++++++++++++ 3 files changed, 160 insertions(+), 24 deletions(-) diff --git a/ddtrace/appsec/_iast/_handlers.py b/ddtrace/appsec/_iast/_handlers.py index 33dd7aa348b..e873d8a6a5a 100644 --- a/ddtrace/appsec/_iast/_handlers.py +++ b/ddtrace/appsec/_iast/_handlers.py @@ -171,8 +171,6 @@ def _on_django_func_wrapped(fn_args, fn_kwargs, first_arg_expected_type, *_): http_req = fn_args[0] http_req.COOKIES = taint_structure(http_req.COOKIES, OriginType.COOKIE_NAME, OriginType.COOKIE) - http_req.GET = taint_structure(http_req.GET, OriginType.PARAMETER_NAME, OriginType.PARAMETER) - http_req.POST = taint_structure(http_req.POST, OriginType.BODY, OriginType.BODY) if ( getattr(http_req, "_body", None) is not None and len(getattr(http_req, "_body", None)) > 0 @@ -202,6 +200,8 @@ def _on_django_func_wrapped(fn_args, fn_kwargs, first_arg_expected_type, *_): except AttributeError: log.debug("IAST can't set attribute http_req.body", exc_info=True) + http_req.GET = taint_structure(http_req.GET, OriginType.PARAMETER_NAME, OriginType.PARAMETER) + http_req.POST = taint_structure(http_req.POST, OriginType.PARAMETER_NAME, OriginType.BODY) http_req.headers = taint_structure(http_req.headers, OriginType.HEADER_NAME, OriginType.HEADER) http_req.path = taint_pyobject( http_req.path, source_name="path", source_value=http_req.path, source_origin=OriginType.PATH diff --git a/tests/contrib/django/django_app/appsec_urls.py b/tests/contrib/django/django_app/appsec_urls.py index f5b3f359445..0b25b9433d9 100644 --- a/tests/contrib/django/django_app/appsec_urls.py +++ b/tests/contrib/django/django_app/appsec_urls.py @@ -32,6 +32,26 @@ from typing import Any # noqa:F401 +if python_supported_by_iast(): + with override_env({"DD_IAST_ENABLED": "True"}): + from ddtrace.appsec._iast._taint_tracking import OriginType + from ddtrace.appsec._iast._taint_tracking._taint_objects import is_pyobject_tainted + from ddtrace.appsec._iast.reporter import IastSpanReporter + + def assert_origin(parameter, origin_type): # type: (Any, Any) -> None + assert is_pyobject_tainted(parameter) + sources, _ = IastSpanReporter.taint_ranges_as_evidence_info(parameter) + assert sources[0].origin == origin_type + +else: + + def assert_origin(pyobject, origin_type): # type: (Any) -> bool + return True + + def is_pyobject_tainted(pyobject): # type: (Any) -> bool + return True + + def include_view(request): return HttpResponse(status=200) @@ -52,6 +72,8 @@ def body_view(request): return HttpResponse(data, status=200) else: data = request.POST + first_post_key = list(request.POST.keys())[0] + assert_origin(first_post_key, OriginType.PARAMETER_NAME) return HttpResponse(str(dict(data)), status=200) @@ -86,6 +108,24 @@ def sqli_http_request_parameter(request): return HttpResponse(request.META["HTTP_USER_AGENT"], status=200) +def sqli_http_request_parameter_name_get(request): + obj = " 1" + with connection.cursor() as cursor: + # label iast_enabled_sqli_http_request_parameter_name_get + cursor.execute(add_aspect(list(request.GET.keys())[0], obj)) + + return HttpResponse(request.META["HTTP_USER_AGENT"], status=200) + + +def sqli_http_request_parameter_name_post(request): + obj = " 1" + with connection.cursor() as cursor: + # label iast_enabled_sqli_http_request_parameter_name_post + cursor.execute(add_aspect(list(request.POST.keys())[0], obj)) + + return HttpResponse(request.META["HTTP_USER_AGENT"], status=200) + + def sqli_http_request_header_name(request): key = [x for x in request.META.keys() if x == "master"][0] @@ -119,35 +159,21 @@ def sqli_http_path_parameter(request, q_http_path_parameter): def taint_checking_enabled_view(request): - if python_supported_by_iast(): - with override_env({"DD_IAST_ENABLED": "True"}): - from ddtrace.appsec._iast._taint_tracking import OriginType - from ddtrace.appsec._iast._taint_tracking._taint_objects import is_pyobject_tainted - from ddtrace.appsec._iast.reporter import IastSpanReporter - - def assert_origin_path(path): # type: (Any) -> None - assert is_pyobject_tainted(path) - sources, tainted_ranges_to_dict = IastSpanReporter.taint_ranges_as_evidence_info(path) - assert sources[0].origin == OriginType.PATH - - else: - - def assert_origin_path(pyobject): # type: (Any) -> bool - return True - - def is_pyobject_tainted(pyobject): # type: (Any) -> bool - return True - # TODO: Taint request body # assert is_pyobject_tainted(request.body) + first_get_key = list(request.GET.keys())[0] assert is_pyobject_tainted(request.GET["q"]) + assert is_pyobject_tainted(first_get_key) assert is_pyobject_tainted(request.META["QUERY_STRING"]) assert is_pyobject_tainted(request.META["HTTP_USER_AGENT"]) # TODO: Taint request headers # assert is_pyobject_tainted(request.headers["User-Agent"]) - assert_origin_path(request.path_info) - assert_origin_path(request.path) - assert_origin_path(request.META["PATH_INFO"]) + assert_origin(request.path_info, OriginType.PATH) + assert_origin(request.path, OriginType.PATH) + assert_origin(request.META["PATH_INFO"], OriginType.PATH) + assert_origin(request.GET["q"], OriginType.PARAMETER) + assert_origin(first_get_key, OriginType.PARAMETER_NAME) + return HttpResponse(request.META["HTTP_USER_AGENT"], status=200) @@ -162,6 +188,7 @@ def is_pyobject_tainted(pyobject): # type: (Any) -> bool assert not is_pyobject_tainted(request.body) assert not is_pyobject_tainted(request.GET["q"]) + assert not is_pyobject_tainted(list(request.GET.keys())[0]) assert not is_pyobject_tainted(request.META["QUERY_STRING"]) assert not is_pyobject_tainted(request.META["HTTP_USER_AGENT"]) assert not is_pyobject_tainted(request.headers["User-Agent"]) @@ -297,6 +324,16 @@ def validate_querydict(request): handler("taint-checking-enabled/$", taint_checking_enabled_view, name="taint_checking_enabled_view"), handler("taint-checking-disabled/$", taint_checking_disabled_view, name="taint_checking_disabled_view"), handler("sqli_http_request_parameter/$", sqli_http_request_parameter, name="sqli_http_request_parameter"), + handler( + "sqli_http_request_parameter_name_get/$", + sqli_http_request_parameter_name_get, + name="sqli_http_request_parameter_name_get", + ), + handler( + "sqli_http_request_parameter_name_post/$", + sqli_http_request_parameter_name_post, + name="sqli_http_request_parameter_name_post", + ), handler("sqli_http_request_header_name/$", sqli_http_request_header_name, name="sqli_http_request_header_name"), handler("sqli_http_request_header_value/$", sqli_http_request_header_value, name="sqli_http_request_header_value"), handler("sqli_http_request_cookie_name/$", sqli_http_request_cookie_name, name="sqli_http_request_cookie_name"), diff --git a/tests/contrib/django/test_django_appsec_iast.py b/tests/contrib/django/test_django_appsec_iast.py index ee5cb069331..90e88716844 100644 --- a/tests/contrib/django/test_django_appsec_iast.py +++ b/tests/contrib/django/test_django_appsec_iast.py @@ -245,6 +245,105 @@ def test_django_tainted_user_agent_iast_enabled_sqli_http_request_parameter(clie assert loaded["vulnerabilities"][0]["hash"] == hash_value +@pytest.mark.django_db() +@pytest.mark.skipif(not python_supported_by_iast(), reason="Python version not supported by IAST") +def test_django_tainted_user_agent_iast_enabled_sqli_http_request_parameter_name_get(client, test_spans, tracer): + with override_global_config(dict(_iast_enabled=True, _deduplication_enabled=False, _iast_request_sampling=100.0)): + root_span, response = _aux_appsec_get_root_span( + client, + test_spans, + tracer, + content_type="application/x-www-form-urlencoded", + url="/appsec/sqli_http_request_parameter_name_get/?SELECT=unused", + headers={"HTTP_USER_AGENT": "test/1.2.3"}, + ) + + vuln_type = "SQL_INJECTION" + + assert response.status_code == 200 + assert response.content == b"test/1.2.3" + + loaded = json.loads(root_span.get_tag(IAST.JSON)) + + line, hash_value = get_line_and_hash( + "iast_enabled_sqli_http_request_parameter_name_get", vuln_type, filename=TEST_FILE + ) + + assert loaded["sources"] == [ + { + "name": "SELECT", + "origin": "http.request.parameter.name", + "value": "SELECT", + } + ] + + assert loaded["vulnerabilities"][0]["type"] == vuln_type + assert loaded["vulnerabilities"][0]["evidence"] == { + "valueParts": [ + {"source": 0, "value": "SELECT"}, + { + "value": " ", + }, + { + "redacted": True, + }, + ] + } + assert loaded["vulnerabilities"][0]["location"]["path"] == TEST_FILE + assert loaded["vulnerabilities"][0]["location"]["line"] == line + assert loaded["vulnerabilities"][0]["hash"] == hash_value + + +@pytest.mark.django_db() +@pytest.mark.skipif(not python_supported_by_iast(), reason="Python version not supported by IAST") +def test_django_tainted_user_agent_iast_enabled_sqli_http_request_parameter_name_post(client, test_spans, tracer): + with override_global_config(dict(_iast_enabled=True, _deduplication_enabled=False, _iast_request_sampling=100.0)): + root_span, response = _aux_appsec_get_root_span( + client, + test_spans, + tracer, + payload=urlencode({"SELECT": "unused"}), + content_type="application/x-www-form-urlencoded", + url="/appsec/sqli_http_request_parameter_name_post/", + headers={"HTTP_USER_AGENT": "test/1.2.3"}, + ) + + vuln_type = "SQL_INJECTION" + + assert response.status_code == 200 + assert response.content == b"test/1.2.3" + + loaded = json.loads(root_span.get_tag(IAST.JSON)) + + line, hash_value = get_line_and_hash( + "iast_enabled_sqli_http_request_parameter_name_post", vuln_type, filename=TEST_FILE + ) + + assert loaded["sources"] == [ + { + "name": "SELECT", + "origin": "http.request.parameter.name", + "value": "SELECT", + } + ] + + assert loaded["vulnerabilities"][0]["type"] == vuln_type + assert loaded["vulnerabilities"][0]["evidence"] == { + "valueParts": [ + {"source": 0, "value": "SELECT"}, + { + "value": " ", + }, + { + "redacted": True, + }, + ] + } + assert loaded["vulnerabilities"][0]["location"]["path"] == TEST_FILE + assert loaded["vulnerabilities"][0]["location"]["line"] == line + assert loaded["vulnerabilities"][0]["hash"] == hash_value + + @pytest.mark.django_db() @pytest.mark.skipif(not python_supported_by_iast(), reason="Python version not supported by IAST") def test_django_tainted_user_agent_iast_enabled_sqli_http_request_header_value(client, test_spans, tracer): From eddd51464e8c32db019e239c106317228b65667c Mon Sep 17 00:00:00 2001 From: Christophe Papazian <114495376+christophe-papazian@users.noreply.github.com> Date: Thu, 16 Jan 2025 18:04:23 +0100 Subject: [PATCH 14/16] chore(asm): addendum to ATO V3 (#11981) This PR fixes missing support from unreleased PR https://github.com/DataDog/dd-trace-py/pull/11932 - add better support for usr.login tag and anonymisation of the value - add support for WAF permanent address force resent to the WAF. This should be the default behaviour for usr.id and usr.login WAF addresses - add check for usr.login anonymised on threat tests and django tests - update processor tests for the new usr.login WAF address - improve user blocking mechanism by blocking as soon as we can with BlockingException Will also enable support for system tests on https://github.com/DataDog/system-tests/pull/3828 when this PR is merged. APPSEC-56315 ## Checklist - [x] PR author has checked that all the criteria below are met - The PR description includes an overview of the change - The PR description articulates the motivation for the change - The change includes tests OR the PR description describes a testing strategy - The PR description notes risks associated with the change, if any - Newly-added code is easy to change - The change follows the [library release note guidelines](https://ddtrace.readthedocs.io/en/stable/releasenotes.html) - The change includes or references documentation updates if necessary - Backport labels are set (if [applicable](https://ddtrace.readthedocs.io/en/latest/contributing.html#backporting)) ## Reviewer Checklist - [x] Reviewer has checked that all the criteria below are met - Title is accurate - All changes are related to the pull request's stated goal - Avoids breaking [API](https://ddtrace.readthedocs.io/en/stable/versioning.html#interfaces) changes - Testing strategy adequately addresses listed risks - Newly-added code is easy to change - Release note makes sense to a user of the library - If necessary, author has acknowledged and discussed the performance implications of this PR as reported in the benchmarks PR comment - Backport labels are set in a manner that is consistent with the [release branch maintenance policy](https://ddtrace.readthedocs.io/en/latest/contributing.html#backporting) --- ddtrace/appsec/_constants.py | 2 ++ ddtrace/appsec/_processor.py | 3 ++- ddtrace/appsec/_trace_utils.py | 26 +++++++++++++++++----- tests/appsec/appsec/test_processor.py | 19 +++++++++++++++- tests/appsec/contrib_appsec/utils.py | 2 ++ tests/contrib/django/test_django_appsec.py | 9 ++++++-- 6 files changed, 51 insertions(+), 10 deletions(-) diff --git a/ddtrace/appsec/_constants.py b/ddtrace/appsec/_constants.py index d16de0e1379..92b9e239900 100644 --- a/ddtrace/appsec/_constants.py +++ b/ddtrace/appsec/_constants.py @@ -182,6 +182,7 @@ class WAF_DATA_NAMES(metaclass=Constant_Class): REQUEST_COOKIES: Literal["server.request.cookies"] = "server.request.cookies" REQUEST_HTTP_IP: Literal["http.client_ip"] = "http.client_ip" REQUEST_USER_ID: Literal["usr.id"] = "usr.id" + REQUEST_USERNAME: Literal["usr.login"] = "usr.login" RESPONSE_STATUS: Literal["server.response.status"] = "server.response.status" RESPONSE_HEADERS_NO_COOKIES: Literal["server.response.headers.no_cookies"] = "server.response.headers.no_cookies" RESPONSE_BODY: Literal["server.response.body"] = "server.response.body" @@ -196,6 +197,7 @@ class WAF_DATA_NAMES(metaclass=Constant_Class): REQUEST_COOKIES, REQUEST_HTTP_IP, REQUEST_USER_ID, + REQUEST_USERNAME, RESPONSE_STATUS, RESPONSE_HEADERS_NO_COOKIES, RESPONSE_BODY, diff --git a/ddtrace/appsec/_processor.py b/ddtrace/appsec/_processor.py index 54a9f624afe..030399f8a50 100644 --- a/ddtrace/appsec/_processor.py +++ b/ddtrace/appsec/_processor.py @@ -262,6 +262,7 @@ def _waf_action( custom_data: Optional[Dict[str, Any]] = None, crop_trace: Optional[str] = None, rule_type: Optional[str] = None, + force_sent: bool = False, ) -> Optional[DDWaf_result]: """ Call the `WAF` with the given parameters. If `custom_data_names` is specified as @@ -293,7 +294,7 @@ def _waf_action( force_keys = custom_data.get("PROCESSOR_SETTINGS", {}).get("extract-schema", False) if custom_data else False for key, waf_name in iter_data: # type: ignore[attr-defined] - if key in data_already_sent: + if key in data_already_sent and not force_sent: continue # ensure ephemeral addresses are sent, event when value is None if waf_name not in WAF_DATA_NAMES.PERSISTENT_ADDRESSES and custom_data: diff --git a/ddtrace/appsec/_trace_utils.py b/ddtrace/appsec/_trace_utils.py index 8609344f05a..77cb1aaca3a 100644 --- a/ddtrace/appsec/_trace_utils.py +++ b/ddtrace/appsec/_trace_utils.py @@ -9,11 +9,13 @@ from ddtrace.appsec._asm_request_context import in_asm_context from ddtrace.appsec._constants import APPSEC from ddtrace.appsec._constants import LOGIN_EVENTS_MODE +from ddtrace.appsec._constants import WAF_ACTIONS from ddtrace.appsec._utils import _hash_user_id from ddtrace.contrib.trace_utils import set_user from ddtrace.ext import SpanTypes from ddtrace.ext import user from ddtrace.internal import core +from ddtrace.internal._exceptions import BlockingException from ddtrace.internal.logger import get_logger from ddtrace.settings.asm import config as asm_config @@ -121,21 +123,31 @@ def track_user_login_success_event( real_mode = login_events_mode if login_events_mode != LOGIN_EVENTS_MODE.AUTO else asm_config._user_event_mode if real_mode == LOGIN_EVENTS_MODE.DISABLED: return + initial_login = login + initial_user_id = user_id if real_mode == LOGIN_EVENTS_MODE.ANON: - login = name = email = None + name = email = None + login = None if login is None else _hash_user_id(str(login)) span = _track_user_login_common(tracer, True, metadata, login_events_mode, login, name, email, span) if not span: return - if real_mode == LOGIN_EVENTS_MODE.ANON and isinstance(user_id, str): user_id = _hash_user_id(user_id) - if in_asm_context(): - call_waf_callback(custom_data={"REQUEST_USER_ID": str(user_id), "LOGIN_SUCCESS": real_mode}) - if login_events_mode != LOGIN_EVENTS_MODE.SDK: span.set_tag_str(APPSEC.USER_LOGIN_USERID, str(user_id)) set_user(tracer, user_id, name, email, scope, role, session_id, propagate, span) + if in_asm_context(): + res = call_waf_callback( + custom_data={ + "REQUEST_USER_ID": str(initial_user_id) if initial_user_id else None, + "REQUEST_USERNAME": initial_login, + "LOGIN_SUCCESS": real_mode, + }, + force_sent=True, + ) + if res and any(action in [WAF_ACTIONS.BLOCK_ACTION, WAF_ACTIONS.REDIRECT_ACTION] for action in res.actions): + raise BlockingException(get_blocked()) def track_user_login_failure_event( @@ -159,6 +171,8 @@ def track_user_login_failure_event( real_mode = login_events_mode if login_events_mode != LOGIN_EVENTS_MODE.AUTO else asm_config._user_event_mode if real_mode == LOGIN_EVENTS_MODE.DISABLED: return + if real_mode == LOGIN_EVENTS_MODE.ANON and isinstance(login, str): + login = _hash_user_id(login) span = _track_user_login_common(tracer, False, metadata, login_events_mode, login) if not span: return @@ -265,7 +279,7 @@ def should_block_user(tracer: Tracer, userid: str) -> bool: if get_blocked(): return True - _asm_request_context.call_waf_callback(custom_data={"REQUEST_USER_ID": str(userid)}) + _asm_request_context.call_waf_callback(custom_data={"REQUEST_USER_ID": str(userid)}, force_sent=True) return bool(get_blocked()) diff --git a/tests/appsec/appsec/test_processor.py b/tests/appsec/appsec/test_processor.py index 3fec599237b..ad3c9e6827a 100644 --- a/tests/appsec/appsec/test_processor.py +++ b/tests/appsec/appsec/test_processor.py @@ -638,7 +638,23 @@ def test_asm_context_registration(tracer): "name": "test required", "tags": {"category": "attack_attempt", "custom": "1", "type": "custom"}, "transformers": [], - } + }, + { + "conditions": [ + { + "operator": "match_regex", + "parameters": { + "inputs": [{"address": "usr.login"}], + "options": {"case_sensitive": False}, + "regex": "GET", + }, + } + ], + "id": "32b243c7-26eb-4046-bbbb-custom", + "name": "test required", + "tags": {"category": "attack_attempt", "custom": "1", "type": "custom"}, + "transformers": [], + }, ] } @@ -672,6 +688,7 @@ def test_required_addresses(): "server.request.query", "server.response.headers.no_cookies", "usr.id", + "usr.login", } diff --git a/tests/appsec/contrib_appsec/utils.py b/tests/appsec/contrib_appsec/utils.py index e614e33a51b..6400cacb625 100644 --- a/tests/appsec/contrib_appsec/utils.py +++ b/tests/appsec/contrib_appsec/utils.py @@ -1480,6 +1480,8 @@ def test_auto_user_events( assert get_tag("_dd.appsec.events.users.login.success.sdk") is None if mode == "identification": assert get_tag("_dd.appsec.usr.login") == user + elif mode == "anonymization": + assert get_tag("_dd.appsec.usr.login") == _hash_user_id(user) else: assert get_tag("appsec.events.users.login.success.track") == "true" assert get_tag("usr.id") == user_id_hash diff --git a/tests/contrib/django/test_django_appsec.py b/tests/contrib/django/test_django_appsec.py index 3c5cb399739..2a00657e14a 100644 --- a/tests/contrib/django/test_django_appsec.py +++ b/tests/contrib/django/test_django_appsec.py @@ -235,7 +235,9 @@ def test_django_login_sucess_anonymization(client, test_spans, tracer, use_login assert login_span.get_tag(user.ID) == "1" assert login_span.get_tag("appsec.events.users.login.success.track") == "true" assert login_span.get_tag(APPSEC.AUTO_LOGIN_EVENTS_SUCCESS_MODE) == LOGIN_EVENTS_MODE.ANON - assert login_span.get_tag(APPSEC.USER_LOGIN_EVENT_PREFIX + ".success.login") is None + assert login_span.get_tag(APPSEC.USER_LOGIN_EVENT_PREFIX + ".success.login") == ( + "anon_d1ad1f735a4381c2e8dbed0222db1136" if use_login else None + ) assert login_span.get_tag(APPSEC.USER_LOGIN_EVENT_PREFIX + ".success.email") is None assert login_span.get_tag(APPSEC.USER_LOGIN_EVENT_PREFIX + ".success.username") is None @@ -368,7 +370,10 @@ def test_django_login_sucess_anonymization_but_user_set_login(client, test_spans assert login_span.get_tag(user.ID) == "anon_d1ad1f735a4381c2e8dbed0222db1136" assert login_span.get_tag("appsec.events.users.login.success.track") == "true" assert login_span.get_tag(APPSEC.AUTO_LOGIN_EVENTS_SUCCESS_MODE) == LOGIN_EVENTS_MODE.ANON - assert not login_span.get_tag(APPSEC.USER_LOGIN_EVENT_PREFIX + ".success.login") + assert ( + login_span.get_tag(APPSEC.USER_LOGIN_EVENT_PREFIX + ".success.login") + == "anon_d1ad1f735a4381c2e8dbed0222db1136" + ) assert not login_span.get_tag(APPSEC.USER_LOGIN_EVENT_PREFIX_PUBLIC + ".success.email") assert not login_span.get_tag(APPSEC.USER_LOGIN_EVENT_PREFIX_PUBLIC + ".success.username") From b028cc6d33cdfd7822c9a94b3996dda742cc0b18 Mon Sep 17 00:00:00 2001 From: Munir Abdinur Date: Thu, 16 Jan 2025 13:05:02 -0500 Subject: [PATCH 15/16] chore(pin): remove all deprecated references [3.0] (#11982) ## Motivation Ensure deprecation warnings are only logged when users manually use deprecated interfaces. ## Description This change ensures we do not use the deprecated `ddtrace.Pin` reference in docs and integrations. ## Checklist - [x] PR author has checked that all the criteria below are met - The PR description includes an overview of the change - The PR description articulates the motivation for the change - The change includes tests OR the PR description describes a testing strategy - The PR description notes risks associated with the change, if any - Newly-added code is easy to change - The change follows the [library release note guidelines](https://ddtrace.readthedocs.io/en/stable/releasenotes.html) - The change includes or references documentation updates if necessary - Backport labels are set (if [applicable](https://ddtrace.readthedocs.io/en/latest/contributing.html#backporting)) ## Reviewer Checklist - [x] Reviewer has checked that all the criteria below are met - Title is accurate - All changes are related to the pull request's stated goal - Avoids breaking [API](https://ddtrace.readthedocs.io/en/stable/versioning.html#interfaces) changes - Testing strategy adequately addresses listed risks - Newly-added code is easy to change - Release note makes sense to a user of the library - If necessary, author has acknowledged and discussed the performance implications of this PR as reported in the benchmarks PR comment - Backport labels are set in a manner that is consistent with the [release branch maintenance policy](https://ddtrace.readthedocs.io/en/latest/contributing.html#backporting) --- ddtrace/__init__.py | 6 ++++-- ddtrace/contrib/aredis/__init__.py | 2 +- ddtrace/contrib/httpx/__init__.py | 2 +- ddtrace/contrib/internal/mongoengine/trace.py | 6 +++--- ddtrace/contrib/internal/pylibmc/client.py | 6 +++--- ddtrace/contrib/internal/pymongo/client.py | 12 ++++++------ ddtrace/contrib/internal/tornado/application.py | 2 +- ddtrace/contrib/redis/__init__.py | 2 +- ddtrace/contrib/yaaredis/__init__.py | 2 +- docs/api.rst | 2 +- 10 files changed, 22 insertions(+), 20 deletions(-) diff --git a/ddtrace/__init__.py b/ddtrace/__init__.py index 835291fadb7..f65d70b3fa5 100644 --- a/ddtrace/__init__.py +++ b/ddtrace/__init__.py @@ -64,7 +64,7 @@ ] -_DEPRECATED_MODULE_ATTRIBUTES = [ +_DEPRECATED_TRACE_ATTRIBUTES = [ "Span", "Tracer", "Pin", @@ -72,10 +72,12 @@ def __getattr__(name): - if name in _DEPRECATED_MODULE_ATTRIBUTES: + if name in _DEPRECATED_TRACE_ATTRIBUTES: debtcollector.deprecate( ("%s.%s is deprecated" % (__name__, name)), + message="Import from ddtrace.trace instead.", category=DDTraceDeprecationWarning, + removal_version="3.0.0", ) if name in globals(): diff --git a/ddtrace/contrib/aredis/__init__.py b/ddtrace/contrib/aredis/__init__.py index 8448740104f..1d651b9c616 100644 --- a/ddtrace/contrib/aredis/__init__.py +++ b/ddtrace/contrib/aredis/__init__.py @@ -50,7 +50,7 @@ Instance Configuration ~~~~~~~~~~~~~~~~~~~~~~ -To configure particular aredis instances use the :class:`Pin ` API:: +To configure particular aredis instances use the :class:`Pin ` API:: import aredis from ddtrace.trace import Pin diff --git a/ddtrace/contrib/httpx/__init__.py b/ddtrace/contrib/httpx/__init__.py index 28621de44f2..95762604687 100644 --- a/ddtrace/contrib/httpx/__init__.py +++ b/ddtrace/contrib/httpx/__init__.py @@ -57,7 +57,7 @@ Instance Configuration ~~~~~~~~~~~~~~~~~~~~~~ -To configure particular ``httpx`` client instances use the :class:`Pin ` API:: +To configure particular ``httpx`` client instances use the :class:`Pin ` API:: import httpx from ddtrace.trace import Pin diff --git a/ddtrace/contrib/internal/mongoengine/trace.py b/ddtrace/contrib/internal/mongoengine/trace.py index c5f3e834aed..b13ef567037 100644 --- a/ddtrace/contrib/internal/mongoengine/trace.py +++ b/ddtrace/contrib/internal/mongoengine/trace.py @@ -23,12 +23,12 @@ class WrappedConnect(wrapt.ObjectProxy): def __init__(self, connect): super(WrappedConnect, self).__init__(connect) - ddtrace.Pin(_SERVICE, tracer=ddtrace.tracer).onto(self) + ddtrace.trace.Pin(_SERVICE, tracer=ddtrace.tracer).onto(self) def __call__(self, *args, **kwargs): client = self.__wrapped__(*args, **kwargs) - pin = ddtrace.Pin.get_from(self) + pin = ddtrace.trace.Pin.get_from(self) if pin: - ddtrace.Pin(service=pin.service, tracer=pin.tracer).onto(client) + ddtrace.trace.Pin(service=pin.service, tracer=pin.tracer).onto(client) return client diff --git a/ddtrace/contrib/internal/pylibmc/client.py b/ddtrace/contrib/internal/pylibmc/client.py index 917a42b293e..af15925f327 100644 --- a/ddtrace/contrib/internal/pylibmc/client.py +++ b/ddtrace/contrib/internal/pylibmc/client.py @@ -51,7 +51,7 @@ def __init__(self, client=None, service=memcached.SERVICE, tracer=None, *args, * super(TracedClient, self).__init__(client) schematized_service = schematize_service_name(service) - pin = ddtrace.Pin(service=schematized_service, tracer=tracer) + pin = ddtrace.trace.Pin(service=schematized_service, tracer=tracer) pin.onto(self) # attempt to collect the pool of urls this client talks to @@ -64,7 +64,7 @@ def clone(self, *args, **kwargs): # rewrap new connections. cloned = self.__wrapped__.clone(*args, **kwargs) traced_client = TracedClient(cloned) - pin = ddtrace.Pin.get_from(self) + pin = ddtrace.trace.Pin.get_from(self) if pin: pin.clone().onto(traced_client) return traced_client @@ -155,7 +155,7 @@ def _no_span(self): def _span(self, cmd_name): """Return a span timing the given command.""" - pin = ddtrace.Pin.get_from(self) + pin = ddtrace.trace.Pin.get_from(self) if not pin or not pin.enabled(): return self._no_span() diff --git a/ddtrace/contrib/internal/pymongo/client.py b/ddtrace/contrib/internal/pymongo/client.py index 426d205f9da..2cdf2185586 100644 --- a/ddtrace/contrib/internal/pymongo/client.py +++ b/ddtrace/contrib/internal/pymongo/client.py @@ -61,7 +61,7 @@ def __setddpin__(client, pin): pin.onto(client._topology) def __getddpin__(client): - return ddtrace.Pin.get_from(client._topology) + return ddtrace.trace.Pin.get_from(client._topology) # Set a pin on the mongoclient pin on the topology object # This allows us to pass the same pin to the server objects @@ -103,7 +103,7 @@ def _trace_topology_select_server(func, args, kwargs): # Ensure the pin used on the traced mongo client is passed down to the topology instance # This allows us to pass the same pin in traced server objects. topology_instance = get_argument_value(args, kwargs, 0, "self") - pin = ddtrace.Pin.get_from(topology_instance) + pin = ddtrace.trace.Pin.get_from(topology_instance) if pin is not None: pin.onto(server) @@ -125,7 +125,7 @@ def _datadog_trace_operation(operation, wrapped): log.exception("error parsing query") # Gets the pin from the mogno client (through the topology object) - pin = ddtrace.Pin.get_from(wrapped) + pin = ddtrace.trace.Pin.get_from(wrapped) # if we couldn't parse or shouldn't trace the message, just go. if not cmd or not pin or not pin.enabled(): return None @@ -220,7 +220,7 @@ def _trace_socket_command(func, args, kwargs): except Exception: log.exception("error parsing spec. skipping trace") - pin = ddtrace.Pin.get_from(socket_instance) + pin = ddtrace.trace.Pin.get_from(socket_instance) # skip tracing if we don't have a piece of data we need if not dbname or not cmd or not pin or not pin.enabled(): return func(*args, **kwargs) @@ -239,7 +239,7 @@ def _trace_socket_write_command(func, args, kwargs): except Exception: log.exception("error parsing msg") - pin = ddtrace.Pin.get_from(socket_instance) + pin = ddtrace.trace.Pin.get_from(socket_instance) # if we couldn't parse it, don't try to trace it. if not cmd or not pin or not pin.enabled(): return func(*args, **kwargs) @@ -252,7 +252,7 @@ def _trace_socket_write_command(func, args, kwargs): def _trace_cmd(cmd, socket_instance, address): - pin = ddtrace.Pin.get_from(socket_instance) + pin = ddtrace.trace.Pin.get_from(socket_instance) s = pin.tracer.trace( schematize_database_operation("pymongo.cmd", database_provider="mongodb"), span_type=SpanTypes.MONGODB, diff --git a/ddtrace/contrib/internal/tornado/application.py b/ddtrace/contrib/internal/tornado/application.py index 3a7dc832b5e..227c74f3359 100644 --- a/ddtrace/contrib/internal/tornado/application.py +++ b/ddtrace/contrib/internal/tornado/application.py @@ -55,4 +55,4 @@ def tracer_config(__init__, app, args, kwargs): tracer.set_tags(tags) # configure the PIN object for template rendering - ddtrace.Pin(service=service, tracer=tracer).onto(template) + ddtrace.trace.Pin(service=service, tracer=tracer).onto(template) diff --git a/ddtrace/contrib/redis/__init__.py b/ddtrace/contrib/redis/__init__.py index 638d08b0a79..9b498614e4b 100644 --- a/ddtrace/contrib/redis/__init__.py +++ b/ddtrace/contrib/redis/__init__.py @@ -52,7 +52,7 @@ Instance Configuration ~~~~~~~~~~~~~~~~~~~~~~ -To configure particular redis instances use the :class:`Pin ` API:: +To configure particular redis instances use the :class:`Pin ` API:: import redis from ddtrace.trace import Pin diff --git a/ddtrace/contrib/yaaredis/__init__.py b/ddtrace/contrib/yaaredis/__init__.py index 2eefb3beb93..7c0c9bd1b21 100644 --- a/ddtrace/contrib/yaaredis/__init__.py +++ b/ddtrace/contrib/yaaredis/__init__.py @@ -50,7 +50,7 @@ Instance Configuration ~~~~~~~~~~~~~~~~~~~~~~ -To configure particular yaaredis instances use the :class:`Pin ` API:: +To configure particular yaaredis instances use the :class:`Pin ` API:: import yaaredis from ddtrace.trace import Pin diff --git a/docs/api.rst b/docs/api.rst index 4c52e37808f..d4b4e80674a 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -19,7 +19,7 @@ Tracing .. autoclass:: ddtrace.Span :members: -.. autoclass:: ddtrace.Pin +.. autoclass:: ddtrace.trace.Pin :members: .. autoclass:: ddtrace.trace.Context From 0275a5ec8c1d01b9fd343f1e6753796ce010f6dc Mon Sep 17 00:00:00 2001 From: Munir Abdinur Date: Thu, 16 Jan 2025 14:36:42 -0500 Subject: [PATCH 16/16] chore(tracing): deprecates FilterRequestsOnUrl [3.0] (#11962) In recent ddtrace versions spans can be sampled by tags and resource names via the `DD_TRACE_SAMPLING_RULES` configuration. Setting this configuration allows ddtrace to compute accurate trace metrics and prevents the generation of partial traces. To encourage the use of `DD_TRACE_SAMPLING_RULES`, `FilterRequestsOnUrl` will be deprecated. ## Checklist - [x] PR author has checked that all the criteria below are met - The PR description includes an overview of the change - The PR description articulates the motivation for the change - The change includes tests OR the PR description describes a testing strategy - The PR description notes risks associated with the change, if any - Newly-added code is easy to change - The change follows the [library release note guidelines](https://ddtrace.readthedocs.io/en/stable/releasenotes.html) - The change includes or references documentation updates if necessary - Backport labels are set (if [applicable](https://ddtrace.readthedocs.io/en/latest/contributing.html#backporting)) ## Reviewer Checklist - [x] Reviewer has checked that all the criteria below are met - Title is accurate - All changes are related to the pull request's stated goal - Avoids breaking [API](https://ddtrace.readthedocs.io/en/stable/versioning.html#interfaces) changes - Testing strategy adequately addresses listed risks - Newly-added code is easy to change - Release note makes sense to a user of the library - If necessary, author has acknowledged and discussed the performance implications of this PR as reported in the benchmarks PR comment - Backport labels are set in a manner that is consistent with the [release branch maintenance policy](https://ddtrace.readthedocs.io/en/latest/contributing.html#backporting) --- ddtrace/contrib/tornado/__init__.py | 5 ----- ddtrace/filters.py | 4 ++-- ddtrace/trace/__init__.py | 3 +-- docs/advanced_usage.rst | 19 ++++++++++--------- ...rcefilter-deprecated-52b1c92d388b0518.yaml | 4 ++++ tests/tracer/test_filters.py | 2 +- 6 files changed, 18 insertions(+), 19 deletions(-) create mode 100644 releasenotes/notes/ddtrace-resourcefilter-deprecated-52b1c92d388b0518.yaml diff --git a/ddtrace/contrib/tornado/__init__.py b/ddtrace/contrib/tornado/__init__.py index ad0adef2dd5..10390e77e6e 100644 --- a/ddtrace/contrib/tornado/__init__.py +++ b/ddtrace/contrib/tornado/__init__.py @@ -76,11 +76,6 @@ def log_exception(self, typ, value, tb): 'default_service': 'my-tornado-app', 'tags': {'env': 'production'}, 'distributed_tracing': False, - 'settings': { - 'FILTERS': [ - FilterRequestsOnUrl(r'http://test\\.example\\.com'), - ], - }, }, } diff --git a/ddtrace/filters.py b/ddtrace/filters.py index 3c9c42892b8..bd6367d5635 100644 --- a/ddtrace/filters.py +++ b/ddtrace/filters.py @@ -4,7 +4,7 @@ deprecate( - "The ddtrace.filters module is deprecated and will be removed.", - message="Import ``TraceFilter`` and/or ``FilterRequestsOnUrl`` from the ddtrace.trace package.", + "The ddtrace.filters module and the ``FilterRequestsOnUrl`` class is deprecated and will be removed.", + message="Import ``TraceFilter`` from the ddtrace.trace package.", category=DDTraceDeprecationWarning, ) diff --git a/ddtrace/trace/__init__.py b/ddtrace/trace/__init__.py index dcd3aeb928e..72653598956 100644 --- a/ddtrace/trace/__init__.py +++ b/ddtrace/trace/__init__.py @@ -1,8 +1,7 @@ from ddtrace._trace.context import Context -from ddtrace._trace.filters import FilterRequestsOnUrl from ddtrace._trace.filters import TraceFilter from ddtrace._trace.pin import Pin # TODO: Move `ddtrace.Tracer`, `ddtrace.Span`, and `ddtrace.tracer` to this module -__all__ = ["Context", "Pin", "TraceFilter", "FilterRequestsOnUrl"] +__all__ = ["Context", "Pin", "TraceFilter"] diff --git a/docs/advanced_usage.rst b/docs/advanced_usage.rst index 9906fddea89..c1c41df00c0 100644 --- a/docs/advanced_usage.rst +++ b/docs/advanced_usage.rst @@ -332,24 +332,25 @@ configuring the tracer with a filters list. For instance, to filter out all traces of incoming requests to a specific url:: from ddtrace import tracer + from ddtrace.trace import TraceFilter + + class FilterbyName(TraceFilter): + def process_trace(self, trace): + for span in trace: + if span.name == "some_name" + # drop the full trace chunk + return None + return trace tracer.configure(settings={ 'FILTERS': [ - FilterRequestsOnUrl(r'http://test\.example\.com'), + FilterbyName(), ], }) The filters in the filters list will be applied sequentially to each trace and the resulting trace will either be sent to the Agent or discarded. -**Built-in filters** - -The library comes with a ``FilterRequestsOnUrl`` filter that can be used to -filter out incoming requests to specific urls: - -.. autoclass:: ddtrace.trace.FilterRequestsOnUrl - :members: - **Writing a custom filter** Create a filter by implementing a class with a ``process_trace`` method and diff --git a/releasenotes/notes/ddtrace-resourcefilter-deprecated-52b1c92d388b0518.yaml b/releasenotes/notes/ddtrace-resourcefilter-deprecated-52b1c92d388b0518.yaml new file mode 100644 index 00000000000..183249aa688 --- /dev/null +++ b/releasenotes/notes/ddtrace-resourcefilter-deprecated-52b1c92d388b0518.yaml @@ -0,0 +1,4 @@ +--- +deprecations: + - | + tracing: Deprecates ``ddtrace.filters.FilterRequestsOnUrl``. Spans should be filtered/sampled using DD_TRACE_SAMPLING_RULES configuration. diff --git a/tests/tracer/test_filters.py b/tests/tracer/test_filters.py index 871405517b7..d632ceb4998 100644 --- a/tests/tracer/test_filters.py +++ b/tests/tracer/test_filters.py @@ -2,9 +2,9 @@ import pytest +from ddtrace._trace.filters import FilterRequestsOnUrl from ddtrace._trace.span import Span from ddtrace.ext.http import URL -from ddtrace.trace import FilterRequestsOnUrl from ddtrace.trace import TraceFilter