diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index b8a8a48d8f8..84e6c873642 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -115,6 +115,8 @@ ddtrace/contrib/flask_login/ @DataDog/asm-python ddtrace/contrib/webbrowser @DataDog/asm-python ddtrace/contrib/urllib @DataDog/asm-python ddtrace/internal/_exceptions.py @DataDog/asm-python +ddtrace/internal/appsec/ @DataDog/asm-python +ddtrace/internal/iast/ @DataDog/asm-python tests/appsec/ @DataDog/asm-python tests/contrib/dbapi/test_dbapi_appsec.py @DataDog/asm-python tests/contrib/subprocess @DataDog/asm-python diff --git a/.github/workflows/rust-ci.yml b/.github/workflows/rust-ci.yml index 4bd448eaf18..3241a0b763e 100644 --- a/.github/workflows/rust-ci.yml +++ b/.github/workflows/rust-ci.yml @@ -11,7 +11,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - extension: ["src/core"] + extension: ["src/native"] steps: - uses: actions/checkout@v4 with: diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 748942af278..47d2baf065c 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -6,6 +6,7 @@ stages: - shared-pipeline - benchmarks - macrobenchmarks + - benchmarks-report - release variables: @@ -59,18 +60,20 @@ onboarding_tests_installer: matrix: - ONBOARDING_FILTER_WEBLOG: [test-app-python,test-app-python-container,test-app-python-alpine] - onboarding_tests_k8s_injection: parallel: matrix: - - WEBLOG_VARIANT: - - dd-lib-python-init-test-django - - dd-lib-python-init-test-django-gunicorn - - dd-lib-python-init-test-django-gunicorn-alpine - - dd-lib-python-init-test-django-preinstalled - - dd-lib-python-init-test-django-unsupported-package-force - - dd-lib-python-init-test-django-uvicorn - - dd-lib-python-init-test-protobuf-old + - WEBLOG_VARIANT: [dd-lib-python-init-test-django, ] + SCENARIO: [K8S_LIB_INJECTION, K8S_LIB_INJECTION_UDS, K8S_LIB_INJECTION_NO_AC, K8S_LIB_INJECTION_NO_AC_UDS, K8S_LIB_INJECTION_PROFILING_DISABLED, K8S_LIB_INJECTION_PROFILING_ENABLED, K8S_LIB_INJECTION_PROFILING_OVERRIDE] + K8S_CLUSTER_VERSION: ['7.56.2', '7.59.0'] + + - WEBLOG_VARIANT: [dd-lib-python-init-test-django-gunicorn, dd-lib-python-init-test-django-gunicorn-alpine, dd-lib-python-init-test-django-unsupported-package-force, dd-lib-python-init-test-django-uvicorn, dd-lib-python-init-test-protobuf-old ] + SCENARIO: [K8S_LIB_INJECTION, K8S_LIB_INJECTION_PROFILING_ENABLED] + K8S_CLUSTER_VERSION: ['7.56.2', '7.59.0'] + + - WEBLOG_VARIANT: [dd-lib-python-init-test-django-preinstalled] + SCENARIO: [K8S_LIB_INJECTION, K8S_LIB_INJECTION_UDS, K8S_LIB_INJECTION_NO_AC, K8S_LIB_INJECTION_NO_AC_UDS] + K8S_CLUSTER_VERSION: ['7.56.2', '7.59.0'] deploy_to_di_backend:manual: stage: shared-pipeline diff --git a/.gitlab/benchmarks.yml b/.gitlab/benchmarks.yml index 9d56afcdf09..e922d315444 100644 --- a/.gitlab/benchmarks.yml +++ b/.gitlab/benchmarks.yml @@ -25,7 +25,6 @@ variables: paths: - reports/ expire_in: 3 months - allow_failure: true # Allow failure, so partial results are uploaded variables: UPSTREAM_PROJECT_ID: $CI_PROJECT_ID # The ID of the current project. This ID is unique across all projects on the GitLab instance. UPSTREAM_PROJECT_NAME: $CI_PROJECT_NAME # "dd-trace-py" @@ -60,8 +59,7 @@ microbenchmarks: benchmarks-pr-comment: image: $MICROBENCHMARKS_CI_IMAGE tags: ["arch:amd64"] - stage: benchmarks - needs: [ "microbenchmarks" ] + stage: benchmarks-report when: always script: - export REPORTS_DIR="$(pwd)/reports/" && (mkdir "${REPORTS_DIR}" || :) @@ -78,6 +76,20 @@ benchmarks-pr-comment: UPSTREAM_COMMIT_SHA: $CI_COMMIT_SHA # The commit revision the project is built for. KUBERNETES_SERVICE_ACCOUNT_OVERWRITE: dd-trace-py +check-big-regressions: + stage: benchmarks-report + when: always + tags: ["arch:amd64"] + image: $MICROBENCHMARKS_CI_IMAGE + script: + - export ARTIFACTS_DIR="$(pwd)/reports/" + - git config --global url."https://gitlab-ci-token:${CI_JOB_TOKEN}@gitlab.ddbuild.io/DataDog/".insteadOf "https://github.com/DataDog/" + - git clone --branch dd-trace-py https://github.com/DataDog/benchmarking-platform /platform && cd /platform + - bp-runner bp-runner.fail-on-regression.yml --debug + variables: + # Gitlab and BP specific env vars. Do not modify. + KUBERNETES_SERVICE_ACCOUNT_OVERWRITE: dd-trace-py + benchmark-serverless: stage: benchmarks image: $SLS_CI_IMAGE diff --git a/.riot/requirements/107d8f2.txt b/.riot/requirements/107d8f2.txt new file mode 100644 index 00000000000..7bed129ddaf --- /dev/null +++ b/.riot/requirements/107d8f2.txt @@ -0,0 +1,54 @@ +# +# This file is autogenerated by pip-compile with Python 3.9 +# by the following command: +# +# pip-compile --no-annotate --resolver=backtracking .riot/requirements/107d8f2.in +# +annotated-types==0.7.0 +anyio==4.8.0 +attrs==24.3.0 +certifi==2024.12.14 +coverage[toml]==7.6.10 +distro==1.9.0 +exceptiongroup==1.2.2 +h11==0.14.0 +httpcore==1.0.7 +httpx==0.27.2 +hypothesis==6.45.0 +idna==3.10 +importlib-metadata==8.6.1 +iniconfig==2.0.0 +mock==5.1.0 +multidict==6.1.0 +numpy==2.0.2 +openai[datalib,embeddings]==1.30.1 +opentracing==2.4.0 +packaging==24.2 +pandas==2.2.3 +pandas-stubs==2.2.2.240807 +pillow==9.5.0 +pluggy==1.5.0 +propcache==0.2.1 +pydantic==2.10.5 +pydantic-core==2.27.2 +pytest==8.3.4 +pytest-asyncio==0.21.1 +pytest-cov==6.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.16.0 +python-dateutil==2.9.0.post0 +pytz==2024.2 +pyyaml==6.0.2 +six==1.17.0 +sniffio==1.3.1 +sortedcontainers==2.4.0 +tomli==2.2.1 +tqdm==4.67.1 +types-pytz==2024.2.0.20241221 +typing-extensions==4.12.2 +tzdata==2025.1 +urllib3==1.26.20 +vcrpy==4.2.1 +wrapt==1.17.2 +yarl==1.18.3 +zipp==3.21.0 diff --git a/.riot/requirements/130158f.txt b/.riot/requirements/130158f.txt new file mode 100644 index 00000000000..037c7010f33 --- /dev/null +++ b/.riot/requirements/130158f.txt @@ -0,0 +1,48 @@ +# +# This file is autogenerated by pip-compile with Python 3.12 +# by the following command: +# +# pip-compile --no-annotate .riot/requirements/130158f.in +# +annotated-types==0.7.0 +anyio==4.8.0 +attrs==24.3.0 +certifi==2024.12.14 +charset-normalizer==3.4.1 +coverage[toml]==7.6.10 +distro==1.9.0 +h11==0.14.0 +httpcore==1.0.7 +httpx==0.28.1 +hypothesis==6.45.0 +idna==3.10 +iniconfig==2.0.0 +jiter==0.8.2 +mock==5.1.0 +multidict==6.1.0 +openai==1.60.0 +opentracing==2.4.0 +packaging==24.2 +pillow==11.1.0 +pluggy==1.5.0 +propcache==0.2.1 +pydantic==2.10.5 +pydantic-core==2.27.2 +pytest==8.3.4 +pytest-asyncio==0.21.1 +pytest-cov==6.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.16.0 +pyyaml==6.0.2 +regex==2024.11.6 +requests==2.32.3 +six==1.17.0 +sniffio==1.3.1 +sortedcontainers==2.4.0 +tiktoken==0.8.0 +tqdm==4.67.1 +typing-extensions==4.12.2 +urllib3==1.26.20 +vcrpy==4.2.1 +wrapt==1.17.2 +yarl==1.18.3 diff --git a/.riot/requirements/13804af.txt b/.riot/requirements/13804af.txt deleted file mode 100644 index 7035a764386..00000000000 --- a/.riot/requirements/13804af.txt +++ /dev/null @@ -1,57 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/13804af.in -# -annotated-types==0.7.0 -anyio==4.4.0 -attrs==24.2.0 -certifi==2024.7.4 -charset-normalizer==3.3.2 -coverage[toml]==7.6.1 -distro==1.9.0 -exceptiongroup==1.2.2 -h11==0.14.0 -httpcore==1.0.5 -httpx==0.27.0 -hypothesis==6.45.0 -idna==3.8 -importlib-metadata==8.4.0 -iniconfig==2.0.0 -mock==5.1.0 -multidict==6.0.5 -numpy==1.24.4 -openai[datalib]==1.30.1 -opentracing==2.4.0 -packaging==24.1 -pandas==2.0.3 -pandas-stubs==2.0.3.230814 -pillow==10.1.0 -pluggy==1.5.0 -pydantic==2.8.2 -pydantic-core==2.20.1 -pytest==8.3.2 -pytest-asyncio==0.21.1 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -python-dateutil==2.9.0.post0 -pytz==2024.1 -pyyaml==6.0.2 -regex==2024.7.24 -requests==2.32.3 -six==1.16.0 -sniffio==1.3.1 -sortedcontainers==2.4.0 -tiktoken==0.7.0 -tomli==2.0.1 -tqdm==4.66.5 -types-pytz==2024.1.0.20240417 -typing-extensions==4.12.2 -tzdata==2024.1 -urllib3==1.26.19 -vcrpy==4.2.1 -wrapt==1.16.0 -yarl==1.9.4 -zipp==3.20.1 diff --git a/.riot/requirements/13fec34.txt b/.riot/requirements/13fec34.txt deleted file mode 100644 index 8858506f793..00000000000 --- a/.riot/requirements/13fec34.txt +++ /dev/null @@ -1,49 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.11 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/13fec34.in -# -annotated-types==0.7.0 -anyio==3.7.1 -attrs==24.2.0 -certifi==2024.7.4 -coverage[toml]==7.6.1 -distro==1.9.0 -h11==0.14.0 -httpcore==1.0.5 -httpx==0.27.0 -hypothesis==6.45.0 -idna==3.8 -iniconfig==2.0.0 -mock==5.1.0 -multidict==6.0.5 -numpy==2.1.0 -openai[datalib,embeddings]==1.1.1 -opentracing==2.4.0 -packaging==24.1 -pandas==2.2.2 -pandas-stubs==2.2.2.240807 -pillow==9.5.0 -pluggy==1.5.0 -pydantic==2.8.2 -pydantic-core==2.20.1 -pytest==8.3.2 -pytest-asyncio==0.21.1 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -python-dateutil==2.9.0.post0 -pytz==2024.1 -pyyaml==6.0.2 -six==1.16.0 -sniffio==1.3.1 -sortedcontainers==2.4.0 -tqdm==4.66.5 -types-pytz==2024.1.0.20240417 -typing-extensions==4.12.2 -tzdata==2024.1 -urllib3==1.26.19 -vcrpy==4.2.1 -wrapt==1.16.0 -yarl==1.9.4 diff --git a/.riot/requirements/1825740.txt b/.riot/requirements/1825740.txt index b4660fad985..d1ef7a92bc0 100644 --- a/.riot/requirements/1825740.txt +++ b/.riot/requirements/1825740.txt @@ -9,13 +9,13 @@ aiosignal==1.3.1 async-timeout==4.0.3 asynctest==0.13.0 attrs==24.2.0 -certifi==2024.7.4 -charset-normalizer==3.3.2 +certifi==2024.12.14 +charset-normalizer==3.4.1 coverage[toml]==7.2.7 exceptiongroup==1.2.2 frozenlist==1.3.3 hypothesis==6.45.0 -idna==3.8 +idna==3.10 importlib-metadata==6.7.0 iniconfig==2.0.0 joblib==1.3.2 @@ -36,13 +36,13 @@ pyyaml==6.0.1 requests==2.31.0 scikit-learn==1.0.2 scipy==1.7.3 -six==1.16.0 +six==1.17.0 sortedcontainers==2.4.0 threadpoolctl==3.1.0 tomli==2.0.1 -tqdm==4.66.5 +tqdm==4.67.1 typing-extensions==4.7.1 -urllib3==1.26.19 +urllib3==1.26.20 vcrpy==4.2.1 wrapt==1.16.0 yarl==1.9.4 diff --git a/.riot/requirements/18de44f.txt b/.riot/requirements/18de44f.txt new file mode 100644 index 00000000000..702b980c641 --- /dev/null +++ b/.riot/requirements/18de44f.txt @@ -0,0 +1,52 @@ +# +# This file is autogenerated by pip-compile with Python 3.8 +# by the following command: +# +# pip-compile --no-annotate --resolver=backtracking .riot/requirements/18de44f.in +# +annotated-types==0.7.0 +anyio==4.5.2 +attrs==24.3.0 +certifi==2024.12.14 +charset-normalizer==3.4.1 +coverage[toml]==7.6.1 +distro==1.9.0 +exceptiongroup==1.2.2 +h11==0.14.0 +httpcore==1.0.7 +httpx==0.28.1 +hypothesis==6.45.0 +idna==3.10 +importlib-metadata==8.5.0 +iniconfig==2.0.0 +jiter==0.8.2 +mock==5.1.0 +multidict==6.1.0 +openai==1.60.0 +opentracing==2.4.0 +packaging==24.2 +pillow==10.4.0 +pluggy==1.5.0 +propcache==0.2.0 +pydantic==2.10.5 +pydantic-core==2.27.2 +pytest==8.3.4 +pytest-asyncio==0.21.1 +pytest-cov==5.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.15.0 +pyyaml==6.0.2 +regex==2024.11.6 +requests==2.32.3 +six==1.17.0 +sniffio==1.3.1 +sortedcontainers==2.4.0 +tiktoken==0.7.0 +tomli==2.2.1 +tqdm==4.67.1 +typing-extensions==4.12.2 +urllib3==1.26.20 +vcrpy==4.2.1 +wrapt==1.17.2 +yarl==1.15.2 +zipp==3.20.2 diff --git a/.riot/requirements/1ad89c5.txt b/.riot/requirements/1ad89c5.txt new file mode 100644 index 00000000000..b10206e12d9 --- /dev/null +++ b/.riot/requirements/1ad89c5.txt @@ -0,0 +1,50 @@ +# +# This file is autogenerated by pip-compile with Python 3.10 +# by the following command: +# +# pip-compile --no-annotate .riot/requirements/1ad89c5.in +# +annotated-types==0.7.0 +anyio==4.8.0 +attrs==24.3.0 +certifi==2024.12.14 +charset-normalizer==3.4.1 +coverage[toml]==7.6.10 +distro==1.9.0 +exceptiongroup==1.2.2 +h11==0.14.0 +httpcore==1.0.7 +httpx==0.28.1 +hypothesis==6.45.0 +idna==3.10 +iniconfig==2.0.0 +jiter==0.8.2 +mock==5.1.0 +multidict==6.1.0 +openai==1.60.0 +opentracing==2.4.0 +packaging==24.2 +pillow==11.1.0 +pluggy==1.5.0 +propcache==0.2.1 +pydantic==2.10.5 +pydantic-core==2.27.2 +pytest==8.3.4 +pytest-asyncio==0.21.1 +pytest-cov==6.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.16.0 +pyyaml==6.0.2 +regex==2024.11.6 +requests==2.32.3 +six==1.17.0 +sniffio==1.3.1 +sortedcontainers==2.4.0 +tiktoken==0.8.0 +tomli==2.2.1 +tqdm==4.67.1 +typing-extensions==4.12.2 +urllib3==1.26.20 +vcrpy==4.2.1 +wrapt==1.17.2 +yarl==1.18.3 diff --git a/.riot/requirements/1db5311.txt b/.riot/requirements/1e6bd37.txt similarity index 55% rename from .riot/requirements/1db5311.txt rename to .riot/requirements/1e6bd37.txt index c29bc9bdb8f..11bb5871c14 100644 --- a/.riot/requirements/1db5311.txt +++ b/.riot/requirements/1e6bd37.txt @@ -2,52 +2,53 @@ # This file is autogenerated by pip-compile with Python 3.8 # by the following command: # -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1db5311.in +# pip-compile --no-annotate --resolver=backtracking .riot/requirements/1e6bd37.in # annotated-types==0.7.0 -anyio==4.4.0 -attrs==24.2.0 -certifi==2024.7.4 +anyio==4.5.2 +attrs==24.3.0 +certifi==2024.12.14 coverage[toml]==7.6.1 distro==1.9.0 exceptiongroup==1.2.2 h11==0.14.0 -httpcore==1.0.5 -httpx==0.27.0 +httpcore==1.0.7 +httpx==0.27.2 hypothesis==6.45.0 -idna==3.8 -importlib-metadata==8.4.0 +idna==3.10 +importlib-metadata==8.5.0 iniconfig==2.0.0 mock==5.1.0 -multidict==6.0.5 +multidict==6.1.0 numpy==1.24.4 openai[datalib,embeddings]==1.30.1 opentracing==2.4.0 -packaging==24.1 +packaging==24.2 pandas==2.0.3 pandas-stubs==2.0.3.230814 pillow==9.5.0 pluggy==1.5.0 -pydantic==2.8.2 -pydantic-core==2.20.1 -pytest==8.3.2 +propcache==0.2.0 +pydantic==2.10.5 +pydantic-core==2.27.2 +pytest==8.3.4 pytest-asyncio==0.21.1 pytest-cov==5.0.0 pytest-mock==3.14.0 pytest-randomly==3.15.0 python-dateutil==2.9.0.post0 -pytz==2024.1 +pytz==2024.2 pyyaml==6.0.2 -six==1.16.0 +six==1.17.0 sniffio==1.3.1 sortedcontainers==2.4.0 -tomli==2.0.1 -tqdm==4.66.5 -types-pytz==2024.1.0.20240417 +tomli==2.2.1 +tqdm==4.67.1 +types-pytz==2024.2.0.20241221 typing-extensions==4.12.2 -tzdata==2024.1 -urllib3==1.26.19 +tzdata==2025.1 +urllib3==1.26.20 vcrpy==4.2.1 -wrapt==1.16.0 -yarl==1.9.4 -zipp==3.20.1 +wrapt==1.17.2 +yarl==1.15.2 +zipp==3.20.2 diff --git a/.riot/requirements/1ec15f5.txt b/.riot/requirements/1ec15f5.txt deleted file mode 100644 index b4479a2fb39..00000000000 --- a/.riot/requirements/1ec15f5.txt +++ /dev/null @@ -1,57 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.9 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1ec15f5.in -# -annotated-types==0.7.0 -anyio==4.4.0 -attrs==24.2.0 -certifi==2024.7.4 -charset-normalizer==3.3.2 -coverage[toml]==7.6.1 -distro==1.9.0 -exceptiongroup==1.2.2 -h11==0.14.0 -httpcore==1.0.5 -httpx==0.27.0 -hypothesis==6.45.0 -idna==3.8 -importlib-metadata==8.4.0 -iniconfig==2.0.0 -mock==5.1.0 -multidict==6.0.5 -numpy==2.0.1 -openai[datalib]==1.30.1 -opentracing==2.4.0 -packaging==24.1 -pandas==2.2.2 -pandas-stubs==2.2.2.240807 -pillow==10.1.0 -pluggy==1.5.0 -pydantic==2.8.2 -pydantic-core==2.20.1 -pytest==8.3.2 -pytest-asyncio==0.21.1 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -python-dateutil==2.9.0.post0 -pytz==2024.1 -pyyaml==6.0.2 -regex==2024.7.24 -requests==2.32.3 -six==1.16.0 -sniffio==1.3.1 -sortedcontainers==2.4.0 -tiktoken==0.7.0 -tomli==2.0.1 -tqdm==4.66.5 -types-pytz==2024.1.0.20240417 -typing-extensions==4.12.2 -tzdata==2024.1 -urllib3==1.26.19 -vcrpy==4.2.1 -wrapt==1.16.0 -yarl==1.9.4 -zipp==3.20.1 diff --git a/.riot/requirements/1ee49b9.txt b/.riot/requirements/1ee49b9.txt deleted file mode 100644 index f170e2885c4..00000000000 --- a/.riot/requirements/1ee49b9.txt +++ /dev/null @@ -1,53 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.11 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/1ee49b9.in -# -annotated-types==0.7.0 -anyio==4.4.0 -attrs==24.2.0 -certifi==2024.7.4 -charset-normalizer==3.3.2 -coverage[toml]==7.6.1 -distro==1.9.0 -h11==0.14.0 -httpcore==1.0.5 -httpx==0.27.0 -hypothesis==6.45.0 -idna==3.8 -iniconfig==2.0.0 -mock==5.1.0 -multidict==6.0.5 -numpy==2.1.0 -openai[datalib]==1.30.1 -opentracing==2.4.0 -packaging==24.1 -pandas==2.2.2 -pandas-stubs==2.2.2.240807 -pillow==10.1.0 -pluggy==1.5.0 -pydantic==2.8.2 -pydantic-core==2.20.1 -pytest==8.3.2 -pytest-asyncio==0.21.1 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -python-dateutil==2.9.0.post0 -pytz==2024.1 -pyyaml==6.0.2 -regex==2024.7.24 -requests==2.32.3 -six==1.16.0 -sniffio==1.3.1 -sortedcontainers==2.4.0 -tiktoken==0.7.0 -tqdm==4.66.5 -types-pytz==2024.1.0.20240417 -typing-extensions==4.12.2 -tzdata==2024.1 -urllib3==1.26.19 -vcrpy==4.2.1 -wrapt==1.16.0 -yarl==1.9.4 diff --git a/.riot/requirements/2634bf7.txt b/.riot/requirements/2634bf7.txt new file mode 100644 index 00000000000..0000f6e28ff --- /dev/null +++ b/.riot/requirements/2634bf7.txt @@ -0,0 +1,48 @@ +# +# This file is autogenerated by pip-compile with Python 3.11 +# by the following command: +# +# pip-compile --no-annotate --resolver=backtracking .riot/requirements/2634bf7.in +# +annotated-types==0.7.0 +anyio==4.8.0 +attrs==24.3.0 +certifi==2024.12.14 +charset-normalizer==3.4.1 +coverage[toml]==7.6.10 +distro==1.9.0 +h11==0.14.0 +httpcore==1.0.7 +httpx==0.28.1 +hypothesis==6.45.0 +idna==3.10 +iniconfig==2.0.0 +jiter==0.8.2 +mock==5.1.0 +multidict==6.1.0 +openai==1.60.0 +opentracing==2.4.0 +packaging==24.2 +pillow==11.1.0 +pluggy==1.5.0 +propcache==0.2.1 +pydantic==2.10.5 +pydantic-core==2.27.2 +pytest==8.3.4 +pytest-asyncio==0.21.1 +pytest-cov==6.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.16.0 +pyyaml==6.0.2 +regex==2024.11.6 +requests==2.32.3 +six==1.17.0 +sniffio==1.3.1 +sortedcontainers==2.4.0 +tiktoken==0.8.0 +tqdm==4.67.1 +typing-extensions==4.12.2 +urllib3==1.26.20 +vcrpy==4.2.1 +wrapt==1.17.2 +yarl==1.18.3 diff --git a/.riot/requirements/35ce786.txt b/.riot/requirements/35ce786.txt deleted file mode 100644 index 3489155be91..00000000000 --- a/.riot/requirements/35ce786.txt +++ /dev/null @@ -1,55 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.10 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/35ce786.in -# -annotated-types==0.7.0 -anyio==4.4.0 -attrs==24.2.0 -certifi==2024.7.4 -charset-normalizer==3.3.2 -coverage[toml]==7.6.1 -distro==1.9.0 -exceptiongroup==1.2.2 -h11==0.14.0 -httpcore==1.0.5 -httpx==0.27.0 -hypothesis==6.45.0 -idna==3.8 -iniconfig==2.0.0 -mock==5.1.0 -multidict==6.0.5 -numpy==2.1.0 -openai[datalib]==1.30.1 -opentracing==2.4.0 -packaging==24.1 -pandas==2.2.2 -pandas-stubs==2.2.2.240807 -pillow==10.1.0 -pluggy==1.5.0 -pydantic==2.8.2 -pydantic-core==2.20.1 -pytest==8.3.2 -pytest-asyncio==0.21.1 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -python-dateutil==2.9.0.post0 -pytz==2024.1 -pyyaml==6.0.2 -regex==2024.7.24 -requests==2.32.3 -six==1.16.0 -sniffio==1.3.1 -sortedcontainers==2.4.0 -tiktoken==0.7.0 -tomli==2.0.1 -tqdm==4.66.5 -types-pytz==2024.1.0.20240417 -typing-extensions==4.12.2 -tzdata==2024.1 -urllib3==1.26.19 -vcrpy==4.2.1 -wrapt==1.16.0 -yarl==1.9.4 diff --git a/.riot/requirements/4a85f6d.txt b/.riot/requirements/4a85f6d.txt new file mode 100644 index 00000000000..41953c69178 --- /dev/null +++ b/.riot/requirements/4a85f6d.txt @@ -0,0 +1,50 @@ +# +# This file is autogenerated by pip-compile with Python 3.11 +# by the following command: +# +# pip-compile --no-annotate --resolver=backtracking .riot/requirements/4a85f6d.in +# +annotated-types==0.7.0 +anyio==4.8.0 +attrs==24.3.0 +certifi==2024.12.14 +coverage[toml]==7.6.10 +distro==1.9.0 +h11==0.14.0 +httpcore==1.0.7 +httpx==0.27.2 +hypothesis==6.45.0 +idna==3.10 +iniconfig==2.0.0 +mock==5.1.0 +multidict==6.1.0 +numpy==2.2.2 +openai[datalib,embeddings]==1.30.1 +opentracing==2.4.0 +packaging==24.2 +pandas==2.2.3 +pandas-stubs==2.2.3.241126 +pillow==9.5.0 +pluggy==1.5.0 +propcache==0.2.1 +pydantic==2.10.5 +pydantic-core==2.27.2 +pytest==8.3.4 +pytest-asyncio==0.21.1 +pytest-cov==6.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.16.0 +python-dateutil==2.9.0.post0 +pytz==2024.2 +pyyaml==6.0.2 +six==1.17.0 +sniffio==1.3.1 +sortedcontainers==2.4.0 +tqdm==4.67.1 +types-pytz==2024.2.0.20241221 +typing-extensions==4.12.2 +tzdata==2025.1 +urllib3==1.26.20 +vcrpy==4.2.1 +wrapt==1.17.2 +yarl==1.18.3 diff --git a/.riot/requirements/4d27459.txt b/.riot/requirements/4d27459.txt new file mode 100644 index 00000000000..630c81558f3 --- /dev/null +++ b/.riot/requirements/4d27459.txt @@ -0,0 +1,48 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --no-annotate .riot/requirements/4d27459.in +# +annotated-types==0.7.0 +anyio==4.8.0 +attrs==24.3.0 +certifi==2024.12.14 +charset-normalizer==3.4.1 +coverage[toml]==7.6.10 +distro==1.9.0 +h11==0.14.0 +httpcore==1.0.7 +httpx==0.28.1 +hypothesis==6.45.0 +idna==3.10 +iniconfig==2.0.0 +jiter==0.8.2 +mock==5.1.0 +multidict==6.1.0 +openai==1.60.0 +opentracing==2.4.0 +packaging==24.2 +pillow==11.1.0 +pluggy==1.5.0 +propcache==0.2.1 +pydantic==2.10.5 +pydantic-core==2.27.2 +pytest==8.3.4 +pytest-asyncio==0.21.1 +pytest-cov==6.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.16.0 +pyyaml==6.0.2 +regex==2024.11.6 +requests==2.32.3 +six==1.17.0 +sniffio==1.3.1 +sortedcontainers==2.4.0 +tiktoken==0.8.0 +tqdm==4.67.1 +typing-extensions==4.12.2 +urllib3==1.26.20 +vcrpy==4.2.1 +wrapt==1.17.2 +yarl==1.18.3 diff --git a/.riot/requirements/530c983.txt b/.riot/requirements/530c983.txt new file mode 100644 index 00000000000..c07f9a6b918 --- /dev/null +++ b/.riot/requirements/530c983.txt @@ -0,0 +1,52 @@ +# +# This file is autogenerated by pip-compile with Python 3.9 +# by the following command: +# +# pip-compile --no-annotate --resolver=backtracking .riot/requirements/530c983.in +# +annotated-types==0.7.0 +anyio==4.8.0 +attrs==24.3.0 +certifi==2024.12.14 +charset-normalizer==3.4.1 +coverage[toml]==7.6.10 +distro==1.9.0 +exceptiongroup==1.2.2 +h11==0.14.0 +httpcore==1.0.7 +httpx==0.28.1 +hypothesis==6.45.0 +idna==3.10 +importlib-metadata==8.6.1 +iniconfig==2.0.0 +jiter==0.8.2 +mock==5.1.0 +multidict==6.1.0 +openai==1.60.0 +opentracing==2.4.0 +packaging==24.2 +pillow==11.1.0 +pluggy==1.5.0 +propcache==0.2.1 +pydantic==2.10.5 +pydantic-core==2.27.2 +pytest==8.3.4 +pytest-asyncio==0.21.1 +pytest-cov==6.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.16.0 +pyyaml==6.0.2 +regex==2024.11.6 +requests==2.32.3 +six==1.17.0 +sniffio==1.3.1 +sortedcontainers==2.4.0 +tiktoken==0.8.0 +tomli==2.2.1 +tqdm==4.67.1 +typing-extensions==4.12.2 +urllib3==1.26.20 +vcrpy==4.2.1 +wrapt==1.17.2 +yarl==1.18.3 +zipp==3.21.0 diff --git a/.riot/requirements/5da4fd8.txt b/.riot/requirements/5da4fd8.txt deleted file mode 100644 index a700b91bf81..00000000000 --- a/.riot/requirements/5da4fd8.txt +++ /dev/null @@ -1,49 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.11 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/5da4fd8.in -# -annotated-types==0.7.0 -anyio==4.4.0 -attrs==24.2.0 -certifi==2024.7.4 -coverage[toml]==7.6.1 -distro==1.9.0 -h11==0.14.0 -httpcore==1.0.5 -httpx==0.27.0 -hypothesis==6.45.0 -idna==3.8 -iniconfig==2.0.0 -mock==5.1.0 -multidict==6.0.5 -numpy==2.1.0 -openai[datalib,embeddings]==1.30.1 -opentracing==2.4.0 -packaging==24.1 -pandas==2.2.2 -pandas-stubs==2.2.2.240807 -pillow==9.5.0 -pluggy==1.5.0 -pydantic==2.8.2 -pydantic-core==2.20.1 -pytest==8.3.2 -pytest-asyncio==0.21.1 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -python-dateutil==2.9.0.post0 -pytz==2024.1 -pyyaml==6.0.2 -six==1.16.0 -sniffio==1.3.1 -sortedcontainers==2.4.0 -tqdm==4.66.5 -types-pytz==2024.1.0.20240417 -typing-extensions==4.12.2 -tzdata==2024.1 -urllib3==1.26.19 -vcrpy==4.2.1 -wrapt==1.16.0 -yarl==1.9.4 diff --git a/.riot/requirements/84ec59a.txt b/.riot/requirements/84ec59a.txt deleted file mode 100644 index 9b079cf3a38..00000000000 --- a/.riot/requirements/84ec59a.txt +++ /dev/null @@ -1,53 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/84ec59a.in -# -annotated-types==0.7.0 -anyio==3.7.1 -attrs==24.2.0 -certifi==2024.7.4 -coverage[toml]==7.6.1 -distro==1.9.0 -exceptiongroup==1.2.2 -h11==0.14.0 -httpcore==1.0.5 -httpx==0.27.0 -hypothesis==6.45.0 -idna==3.8 -importlib-metadata==8.4.0 -iniconfig==2.0.0 -mock==5.1.0 -multidict==6.0.5 -numpy==1.24.4 -openai[datalib,embeddings]==1.1.1 -opentracing==2.4.0 -packaging==24.1 -pandas==2.0.3 -pandas-stubs==2.0.3.230814 -pillow==9.5.0 -pluggy==1.5.0 -pydantic==2.8.2 -pydantic-core==2.20.1 -pytest==8.3.2 -pytest-asyncio==0.21.1 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -python-dateutil==2.9.0.post0 -pytz==2024.1 -pyyaml==6.0.2 -six==1.16.0 -sniffio==1.3.1 -sortedcontainers==2.4.0 -tomli==2.0.1 -tqdm==4.66.5 -types-pytz==2024.1.0.20240417 -typing-extensions==4.12.2 -tzdata==2024.1 -urllib3==1.26.19 -vcrpy==4.2.1 -wrapt==1.16.0 -yarl==1.9.4 -zipp==3.20.1 diff --git a/.riot/requirements/87a1fff.txt b/.riot/requirements/87a1fff.txt deleted file mode 100644 index b85e76cdd56..00000000000 --- a/.riot/requirements/87a1fff.txt +++ /dev/null @@ -1,53 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.9 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/87a1fff.in -# -annotated-types==0.7.0 -anyio==4.4.0 -attrs==24.2.0 -certifi==2024.7.4 -coverage[toml]==7.6.1 -distro==1.9.0 -exceptiongroup==1.2.2 -h11==0.14.0 -httpcore==1.0.5 -httpx==0.27.0 -hypothesis==6.45.0 -idna==3.8 -importlib-metadata==8.4.0 -iniconfig==2.0.0 -mock==5.1.0 -multidict==6.0.5 -numpy==2.0.1 -openai[datalib,embeddings]==1.30.1 -opentracing==2.4.0 -packaging==24.1 -pandas==2.2.2 -pandas-stubs==2.2.2.240807 -pillow==9.5.0 -pluggy==1.5.0 -pydantic==2.8.2 -pydantic-core==2.20.1 -pytest==8.3.2 -pytest-asyncio==0.21.1 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -python-dateutil==2.9.0.post0 -pytz==2024.1 -pyyaml==6.0.2 -six==1.16.0 -sniffio==1.3.1 -sortedcontainers==2.4.0 -tomli==2.0.1 -tqdm==4.66.5 -types-pytz==2024.1.0.20240417 -typing-extensions==4.12.2 -tzdata==2024.1 -urllib3==1.26.19 -vcrpy==4.2.1 -wrapt==1.16.0 -yarl==1.9.4 -zipp==3.20.1 diff --git a/.riot/requirements/b5d5a35.txt b/.riot/requirements/b5d5a35.txt new file mode 100644 index 00000000000..7838b7abd2c --- /dev/null +++ b/.riot/requirements/b5d5a35.txt @@ -0,0 +1,52 @@ +# +# This file is autogenerated by pip-compile with Python 3.10 +# by the following command: +# +# pip-compile --no-annotate .riot/requirements/b5d5a35.in +# +annotated-types==0.7.0 +anyio==4.8.0 +attrs==24.3.0 +certifi==2024.12.14 +coverage[toml]==7.6.10 +distro==1.9.0 +exceptiongroup==1.2.2 +h11==0.14.0 +httpcore==1.0.7 +httpx==0.27.2 +hypothesis==6.45.0 +idna==3.10 +iniconfig==2.0.0 +mock==5.1.0 +multidict==6.1.0 +numpy==2.2.2 +openai[datalib,embeddings]==1.30.1 +opentracing==2.4.0 +packaging==24.2 +pandas==2.2.3 +pandas-stubs==2.2.3.241126 +pillow==9.5.0 +pluggy==1.5.0 +propcache==0.2.1 +pydantic==2.10.5 +pydantic-core==2.27.2 +pytest==8.3.4 +pytest-asyncio==0.21.1 +pytest-cov==6.0.0 +pytest-mock==3.14.0 +pytest-randomly==3.16.0 +python-dateutil==2.9.0.post0 +pytz==2024.2 +pyyaml==6.0.2 +six==1.17.0 +sniffio==1.3.1 +sortedcontainers==2.4.0 +tomli==2.2.1 +tqdm==4.67.1 +types-pytz==2024.2.0.20241221 +typing-extensions==4.12.2 +tzdata==2025.1 +urllib3==1.26.20 +vcrpy==4.2.1 +wrapt==1.17.2 +yarl==1.18.3 diff --git a/.riot/requirements/c74f6e0.txt b/.riot/requirements/c74f6e0.txt deleted file mode 100644 index 63345853661..00000000000 --- a/.riot/requirements/c74f6e0.txt +++ /dev/null @@ -1,51 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.10 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/c74f6e0.in -# -annotated-types==0.7.0 -anyio==4.4.0 -attrs==24.2.0 -certifi==2024.7.4 -coverage[toml]==7.6.1 -distro==1.9.0 -exceptiongroup==1.2.2 -h11==0.14.0 -httpcore==1.0.5 -httpx==0.27.0 -hypothesis==6.45.0 -idna==3.8 -iniconfig==2.0.0 -mock==5.1.0 -multidict==6.0.5 -numpy==2.1.0 -openai[datalib,embeddings]==1.30.1 -opentracing==2.4.0 -packaging==24.1 -pandas==2.2.2 -pandas-stubs==2.2.2.240807 -pillow==9.5.0 -pluggy==1.5.0 -pydantic==2.8.2 -pydantic-core==2.20.1 -pytest==8.3.2 -pytest-asyncio==0.21.1 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -python-dateutil==2.9.0.post0 -pytz==2024.1 -pyyaml==6.0.2 -six==1.16.0 -sniffio==1.3.1 -sortedcontainers==2.4.0 -tomli==2.0.1 -tqdm==4.66.5 -types-pytz==2024.1.0.20240417 -typing-extensions==4.12.2 -tzdata==2024.1 -urllib3==1.26.19 -vcrpy==4.2.1 -wrapt==1.16.0 -yarl==1.9.4 diff --git a/.riot/requirements/cd2e4ea.txt b/.riot/requirements/cd2e4ea.txt deleted file mode 100644 index 24353dafa0c..00000000000 --- a/.riot/requirements/cd2e4ea.txt +++ /dev/null @@ -1,53 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.9 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/cd2e4ea.in -# -annotated-types==0.7.0 -anyio==3.7.1 -attrs==24.2.0 -certifi==2024.7.4 -coverage[toml]==7.6.1 -distro==1.9.0 -exceptiongroup==1.2.2 -h11==0.14.0 -httpcore==1.0.5 -httpx==0.27.0 -hypothesis==6.45.0 -idna==3.8 -importlib-metadata==8.4.0 -iniconfig==2.0.0 -mock==5.1.0 -multidict==6.0.5 -numpy==2.0.1 -openai[datalib,embeddings]==1.1.1 -opentracing==2.4.0 -packaging==24.1 -pandas==2.2.2 -pandas-stubs==2.2.2.240807 -pillow==9.5.0 -pluggy==1.5.0 -pydantic==2.8.2 -pydantic-core==2.20.1 -pytest==8.3.2 -pytest-asyncio==0.21.1 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -python-dateutil==2.9.0.post0 -pytz==2024.1 -pyyaml==6.0.2 -six==1.16.0 -sniffio==1.3.1 -sortedcontainers==2.4.0 -tomli==2.0.1 -tqdm==4.66.5 -types-pytz==2024.1.0.20240417 -typing-extensions==4.12.2 -tzdata==2024.1 -urllib3==1.26.19 -vcrpy==4.2.1 -wrapt==1.16.0 -yarl==1.9.4 -zipp==3.20.1 diff --git a/.riot/requirements/181216c.txt b/.riot/requirements/df60af6.txt similarity index 82% rename from .riot/requirements/181216c.txt rename to .riot/requirements/df60af6.txt index ac739930363..5143f0e0a74 100644 --- a/.riot/requirements/181216c.txt +++ b/.riot/requirements/df60af6.txt @@ -2,13 +2,13 @@ # This file is autogenerated by pip-compile with Python 3.7 # by the following command: # -# pip-compile --allow-unsafe --config=pyproject.toml --no-annotate --resolver=backtracking .riot/requirements/181216c.in +# pip-compile --allow-unsafe --config=pyproject.toml --no-annotate --resolver=backtracking .riot/requirements/df60af6.in # annotated-types==0.5.0 anyio==3.7.1 attrs==24.2.0 cached-property==1.5.2 -certifi==2024.7.4 +certifi==2024.12.14 coverage[toml]==7.2.7 distro==1.9.0 exceptiongroup==1.2.2 @@ -16,13 +16,13 @@ h11==0.14.0 httpcore==0.17.3 httpx==0.24.1 hypothesis==6.45.0 -idna==3.8 +idna==3.10 importlib-metadata==6.7.0 iniconfig==2.0.0 mock==5.1.0 multidict==6.0.5 numpy==1.21.6 -openai[datalib,embeddings]==1.30.1 +openai[datalib]==1.30.1 opentracing==2.4.0 packaging==24.0 pandas==1.3.5 @@ -37,15 +37,15 @@ pytest-cov==4.1.0 pytest-mock==3.11.1 pytest-randomly==3.12.0 python-dateutil==2.9.0.post0 -pytz==2024.1 +pytz==2024.2 pyyaml==6.0.1 -six==1.16.0 +six==1.17.0 sniffio==1.3.1 sortedcontainers==2.4.0 tomli==2.0.1 -tqdm==4.66.5 +tqdm==4.67.1 typing-extensions==4.7.1 -urllib3==1.26.19 +urllib3==1.26.20 vcrpy==4.2.1 wrapt==1.16.0 yarl==1.9.4 diff --git a/.riot/requirements/f1c37b1.txt b/.riot/requirements/f1c37b1.txt deleted file mode 100644 index 4da5078a988..00000000000 --- a/.riot/requirements/f1c37b1.txt +++ /dev/null @@ -1,51 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.10 -# by the following command: -# -# pip-compile --allow-unsafe --no-annotate .riot/requirements/f1c37b1.in -# -annotated-types==0.7.0 -anyio==3.7.1 -attrs==24.2.0 -certifi==2024.7.4 -coverage[toml]==7.6.1 -distro==1.9.0 -exceptiongroup==1.2.2 -h11==0.14.0 -httpcore==1.0.5 -httpx==0.27.0 -hypothesis==6.45.0 -idna==3.8 -iniconfig==2.0.0 -mock==5.1.0 -multidict==6.0.5 -numpy==2.1.0 -openai[datalib,embeddings]==1.1.1 -opentracing==2.4.0 -packaging==24.1 -pandas==2.2.2 -pandas-stubs==2.2.2.240807 -pillow==9.5.0 -pluggy==1.5.0 -pydantic==2.8.2 -pydantic-core==2.20.1 -pytest==8.3.2 -pytest-asyncio==0.21.1 -pytest-cov==5.0.0 -pytest-mock==3.14.0 -pytest-randomly==3.15.0 -python-dateutil==2.9.0.post0 -pytz==2024.1 -pyyaml==6.0.2 -six==1.16.0 -sniffio==1.3.1 -sortedcontainers==2.4.0 -tomli==2.0.1 -tqdm==4.66.5 -types-pytz==2024.1.0.20240417 -typing-extensions==4.12.2 -tzdata==2024.1 -urllib3==1.26.19 -vcrpy==4.2.1 -wrapt==1.16.0 -yarl==1.9.4 diff --git a/.riot/requirements/f7c30a0.txt b/.riot/requirements/f7c30a0.txt deleted file mode 100644 index 3e4716aede1..00000000000 --- a/.riot/requirements/f7c30a0.txt +++ /dev/null @@ -1,51 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.7 -# by the following command: -# -# pip-compile --allow-unsafe --config=pyproject.toml --no-annotate --resolver=backtracking .riot/requirements/f7c30a0.in -# -annotated-types==0.5.0 -anyio==3.7.1 -attrs==24.2.0 -certifi==2024.7.4 -coverage[toml]==7.2.7 -distro==1.9.0 -exceptiongroup==1.2.2 -h11==0.14.0 -httpcore==0.17.3 -httpx==0.24.1 -hypothesis==6.45.0 -idna==3.8 -importlib-metadata==6.7.0 -iniconfig==2.0.0 -mock==5.1.0 -multidict==6.0.5 -numpy==1.21.6 -openai[datalib,embeddings]==1.1.1 -opentracing==2.4.0 -packaging==24.0 -pandas==1.3.5 -pandas-stubs==1.2.0.62 -pillow==9.5.0 -pluggy==1.2.0 -pydantic==2.5.3 -pydantic-core==2.14.6 -pytest==7.4.4 -pytest-asyncio==0.21.1 -pytest-cov==4.1.0 -pytest-mock==3.11.1 -pytest-randomly==3.12.0 -python-dateutil==2.9.0.post0 -pytz==2024.1 -pyyaml==6.0.1 -six==1.16.0 -sniffio==1.3.1 -sortedcontainers==2.4.0 -tomli==2.0.1 -tqdm==4.66.5 -typing-extensions==4.7.1 -urllib3==1.26.19 -vcrpy==4.2.1 -wrapt==1.16.0 -yarl==1.9.4 -zipp==3.15.0 diff --git a/benchmarks/http_propagation_extract/scenario.py b/benchmarks/http_propagation_extract/scenario.py index 8aa67f651c7..cdfe9f8dadd 100644 --- a/benchmarks/http_propagation_extract/scenario.py +++ b/benchmarks/http_propagation_extract/scenario.py @@ -29,6 +29,8 @@ def generate_headers(self): def run(self): if self.styles: config._propagation_style_extract = self.styles.split(",") if ("," in self.styles) else [self.styles] + if "none" in config._propagation_style_extract: + config._propagation_style_extract.remove("none") headers = self.generate_headers() diff --git a/ddtrace/__init__.py b/ddtrace/__init__.py index b555d1117ca..e480851926f 100644 --- a/ddtrace/__init__.py +++ b/ddtrace/__init__.py @@ -17,7 +17,7 @@ # configure ddtrace logger before other modules log configure_ddtrace_logger() # noqa: E402 -from .settings import _config as config +from .settings import _global_config as config # Enable telemetry writer and excepthook as early as possible to ensure we capture any exceptions from initialization diff --git a/ddtrace/_monkey.py b/ddtrace/_monkey.py index 75c70114ef2..8ede9f49ca4 100644 --- a/ddtrace/_monkey.py +++ b/ddtrace/_monkey.py @@ -12,7 +12,7 @@ from .internal import telemetry from .internal.logger import get_logger from .internal.utils import formats -from .settings import _config as config +from .settings import _global_config as config if TYPE_CHECKING: # pragma: no cover diff --git a/ddtrace/_trace/context.py b/ddtrace/_trace/context.py index 07bc3960b56..c69496c865e 100644 --- a/ddtrace/_trace/context.py +++ b/ddtrace/_trace/context.py @@ -12,9 +12,9 @@ from ddtrace._trace._span_link import SpanLink from ddtrace._trace.types import _MetaDictType from ddtrace._trace.types import _MetricDictType -from ddtrace.constants import ORIGIN_KEY -from ddtrace.constants import SAMPLING_PRIORITY_KEY -from ddtrace.constants import USER_ID_KEY +from ddtrace.constants import _ORIGIN_KEY +from ddtrace.constants import _SAMPLING_PRIORITY_KEY +from ddtrace.constants import _USER_ID_KEY from ddtrace.internal.compat import NumericType from ddtrace.internal.constants import MAX_UINT_64BITS as _MAX_UINT_64BITS from ddtrace.internal.constants import W3C_TRACEPARENT_KEY @@ -72,9 +72,9 @@ def __init__( self._is_remote: bool = is_remote if dd_origin is not None and _DD_ORIGIN_INVALID_CHARS_REGEX.search(dd_origin) is None: - self._meta[ORIGIN_KEY] = dd_origin + self._meta[_ORIGIN_KEY] = dd_origin if sampling_priority is not None: - self._metrics[SAMPLING_PRIORITY_KEY] = sampling_priority + self._metrics[_SAMPLING_PRIORITY_KEY] = sampling_priority if span_links is not None: self._span_links = span_links else: @@ -127,16 +127,16 @@ def _update_tags(self, span: "Span") -> None: @property def sampling_priority(self) -> Optional[NumericType]: """Return the context sampling priority for the trace.""" - return self._metrics.get(SAMPLING_PRIORITY_KEY) + return self._metrics.get(_SAMPLING_PRIORITY_KEY) @sampling_priority.setter def sampling_priority(self, value: Optional[NumericType]) -> None: with self._lock: if value is None: - if SAMPLING_PRIORITY_KEY in self._metrics: - del self._metrics[SAMPLING_PRIORITY_KEY] + if _SAMPLING_PRIORITY_KEY in self._metrics: + del self._metrics[_SAMPLING_PRIORITY_KEY] return - self._metrics[SAMPLING_PRIORITY_KEY] = value + self._metrics[_SAMPLING_PRIORITY_KEY] = value @property def _traceparent(self) -> str: @@ -180,22 +180,22 @@ def _tracestate(self) -> str: @property def dd_origin(self) -> Optional[Text]: """Get the origin of the trace.""" - return self._meta.get(ORIGIN_KEY) + return self._meta.get(_ORIGIN_KEY) @dd_origin.setter def dd_origin(self, value: Optional[Text]) -> None: """Set the origin of the trace.""" with self._lock: if value is None: - if ORIGIN_KEY in self._meta: - del self._meta[ORIGIN_KEY] + if _ORIGIN_KEY in self._meta: + del self._meta[_ORIGIN_KEY] return - self._meta[ORIGIN_KEY] = value + self._meta[_ORIGIN_KEY] = value @property def dd_user_id(self) -> Optional[Text]: """Get the user ID of the trace.""" - user_id = self._meta.get(USER_ID_KEY) + user_id = self._meta.get(_USER_ID_KEY) if user_id: return str(base64.b64decode(user_id), encoding="utf-8") return None @@ -205,10 +205,10 @@ def dd_user_id(self, value: Optional[Text]) -> None: """Set the user ID of the trace.""" with self._lock: if value is None: - if USER_ID_KEY in self._meta: - del self._meta[USER_ID_KEY] + if _USER_ID_KEY in self._meta: + del self._meta[_USER_ID_KEY] return - self._meta[USER_ID_KEY] = str(base64.b64encode(bytes(value, encoding="utf-8")), encoding="utf-8") + self._meta[_USER_ID_KEY] = str(base64.b64encode(bytes(value, encoding="utf-8")), encoding="utf-8") @property def _trace_id_64bits(self): diff --git a/ddtrace/_trace/processor/__init__.py b/ddtrace/_trace/processor/__init__.py index fc59a64828b..0437b65b364 100644 --- a/ddtrace/_trace/processor/__init__.py +++ b/ddtrace/_trace/processor/__init__.py @@ -14,7 +14,7 @@ from ddtrace._trace.span import _get_64_highest_order_bits_as_hex from ddtrace._trace.span import _is_top_level from ddtrace.constants import _APM_ENABLED_METRIC_KEY as MK_APM_ENABLED -from ddtrace.constants import SAMPLING_PRIORITY_KEY +from ddtrace.constants import _SAMPLING_PRIORITY_KEY from ddtrace.constants import USER_KEEP from ddtrace.internal import gitmetadata from ddtrace.internal import telemetry @@ -165,7 +165,7 @@ def process_trace(self, trace: List[Span]) -> Optional[List[Span]]: # In order to ensure that the agent does not update priority sampling rates # due to single spans sampling, we set all of these spans to manual keep. if config._trace_compute_stats: - span.set_metric(SAMPLING_PRIORITY_KEY, USER_KEEP) + span.set_metric(_SAMPLING_PRIORITY_KEY, USER_KEEP) break return trace diff --git a/ddtrace/_trace/sampler.py b/ddtrace/_trace/sampler.py index 6e2515ee152..96d61b9adcf 100644 --- a/ddtrace/_trace/sampler.py +++ b/ddtrace/_trace/sampler.py @@ -12,7 +12,7 @@ from typing import Tuple # noqa:F401 from ddtrace import config -from ddtrace.constants import SAMPLING_LIMIT_DECISION +from ddtrace.constants import _SAMPLING_LIMIT_DECISION from ..constants import ENV_KEY from ..internal.constants import _PRIORITY_CATEGORY @@ -22,7 +22,7 @@ from ..internal.rate_limiter import RateLimiter from ..internal.sampling import _get_highest_precedence_rule_matching from ..internal.sampling import _set_sampling_tags -from ..settings import _config as ddconfig +from ..settings import _global_config as ddconfig from .sampling_rule import SamplingRule @@ -342,7 +342,7 @@ def sample(self, span): # uses DatadogSampler._rate_limit_always_on to override this functionality. if sampled: sampled = self.limiter.is_allowed() - span.set_metric(SAMPLING_LIMIT_DECISION, self.limiter.effective_rate) + span.set_metric(_SAMPLING_LIMIT_DECISION, self.limiter.effective_rate) _set_sampling_tags( span, sampled, diff --git a/ddtrace/_trace/span.py b/ddtrace/_trace/span.py index afb3496db80..446239a8091 100644 --- a/ddtrace/_trace/span.py +++ b/ddtrace/_trace/span.py @@ -24,17 +24,17 @@ from ddtrace._trace.types import _MetricDictType from ddtrace._trace.types import _TagNameType from ddtrace.constants import _ANALYTICS_SAMPLE_RATE_KEY +from ddtrace.constants import _SAMPLING_AGENT_DECISION +from ddtrace.constants import _SAMPLING_LIMIT_DECISION +from ddtrace.constants import _SAMPLING_RULE_DECISION +from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.constants import ERROR_MSG from ddtrace.constants import ERROR_STACK from ddtrace.constants import ERROR_TYPE from ddtrace.constants import MANUAL_DROP_KEY from ddtrace.constants import MANUAL_KEEP_KEY -from ddtrace.constants import SAMPLING_AGENT_DECISION -from ddtrace.constants import SAMPLING_LIMIT_DECISION -from ddtrace.constants import SAMPLING_RULE_DECISION from ddtrace.constants import SERVICE_KEY from ddtrace.constants import SERVICE_VERSION_KEY -from ddtrace.constants import SPAN_MEASURED_KEY from ddtrace.constants import USER_KEEP from ddtrace.constants import USER_REJECT from ddtrace.constants import VERSION_KEY @@ -327,7 +327,7 @@ def _override_sampling_decision(self, decision: Optional[NumericType]): self.context.sampling_priority = decision set_sampling_decision_maker(self.context, SamplingMechanism.MANUAL) if self._local_root: - for key in (SAMPLING_RULE_DECISION, SAMPLING_AGENT_DECISION, SAMPLING_LIMIT_DECISION): + for key in (_SAMPLING_RULE_DECISION, _SAMPLING_AGENT_DECISION, _SAMPLING_LIMIT_DECISION): if key in self._local_root._metrics: del self._local_root._metrics[key] @@ -401,7 +401,7 @@ def set_tag(self, key: _TagNameType, value: Any = None) -> None: # Also set the `version` tag to the same value # DEV: Note that we do no return, we want to set both self.set_tag(VERSION_KEY, value) - elif key == SPAN_MEASURED_KEY: + elif key == _SPAN_MEASURED_KEY: # Set `_dd.measured` tag as a metric # DEV: `set_metric` will ensure it is an integer 0 or 1 if value is None: @@ -458,7 +458,7 @@ def set_tags(self, tags: Dict[_TagNameType, Any]) -> None: def set_metric(self, key: _TagNameType, value: NumericType) -> None: """This method sets a numeric tag value for the given key.""" # Enforce a specific constant for `_dd.measured` - if key == SPAN_MEASURED_KEY: + if key == _SPAN_MEASURED_KEY: try: value = int(bool(value)) except (ValueError, TypeError): diff --git a/ddtrace/_trace/trace_handlers.py b/ddtrace/_trace/trace_handlers.py index 96c33da8c6b..dab3f743146 100644 --- a/ddtrace/_trace/trace_handlers.py +++ b/ddtrace/_trace/trace_handlers.py @@ -18,8 +18,8 @@ ) from ddtrace._trace.utils_botocore.span_tags import set_botocore_response_metadata_tags from ddtrace.constants import _ANALYTICS_SAMPLE_RATE_KEY +from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.constants import SPAN_KIND -from ddtrace.constants import SPAN_MEASURED_KEY from ddtrace.contrib import trace_utils from ddtrace.contrib.internal.botocore.constants import BOTOCORE_STEPFUNCTIONS_INPUT_KEY from ddtrace.contrib.internal.trace_utils import _set_url_tag @@ -334,7 +334,7 @@ def _on_request_span_modifier( # RequestContext` and possibly a url rule span.resource = " ".join((request.method, request.path)) - span.set_tag(SPAN_MEASURED_KEY) + span.set_tag(_SPAN_MEASURED_KEY) # set analytics sample rate with global config enabled sample_rate = flask_config.get_analytics_sample_rate(use_global_config=True) if sample_rate is not None: @@ -366,7 +366,7 @@ def _on_request_span_modifier_post(ctx, flask_config, request, req_body): def _on_traced_get_response_pre(_, ctx: core.ExecutionContext, request, before_request_tags): before_request_tags(ctx["pin"], ctx.span, request) - ctx.span._metrics[SPAN_MEASURED_KEY] = 1 + ctx.span._metrics[_SPAN_MEASURED_KEY] = 1 def _on_django_finalize_response_pre(ctx, after_request_tags, request, response): diff --git a/ddtrace/_trace/tracer.py b/ddtrace/_trace/tracer.py index 84b9252930a..6e595bbe7c1 100644 --- a/ddtrace/_trace/tracer.py +++ b/ddtrace/_trace/tracer.py @@ -30,8 +30,8 @@ from ddtrace._trace.sampler import DatadogSampler from ddtrace._trace.span import Span from ddtrace.appsec._constants import APPSEC +from ddtrace.constants import _HOSTNAME_KEY from ddtrace.constants import ENV_KEY -from ddtrace.constants import HOSTNAME_KEY from ddtrace.constants import PID from ddtrace.constants import VERSION_KEY from ddtrace.internal import agent @@ -966,7 +966,7 @@ def _start_span( on_finish=[self._on_span_finish], ) if config._report_hostname: - span.set_tag_str(HOSTNAME_KEY, hostname.get_hostname()) + span.set_tag_str(_HOSTNAME_KEY, hostname.get_hostname()) if not span._parent: span.set_tag_str("runtime-id", get_runtime_id()) diff --git a/ddtrace/_trace/utils_botocore/span_tags.py b/ddtrace/_trace/utils_botocore/span_tags.py index 5394c2b397a..f90cbd60a51 100644 --- a/ddtrace/_trace/utils_botocore/span_tags.py +++ b/ddtrace/_trace/utils_botocore/span_tags.py @@ -7,8 +7,8 @@ from ddtrace import config from ddtrace._trace.utils_botocore.aws_payload_tagging import AWSPayloadTagging from ddtrace.constants import _ANALYTICS_SAMPLE_RATE_KEY +from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.constants import SPAN_KIND -from ddtrace.constants import SPAN_MEASURED_KEY from ddtrace.ext import SpanKind from ddtrace.ext import aws from ddtrace.ext import http @@ -23,7 +23,7 @@ def set_botocore_patched_api_call_span_tags(span: Span, instance, args, params, span.set_tag_str(COMPONENT, config.botocore.integration_name) # set span.kind to the type of request being performed span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) - span.set_tag(SPAN_MEASURED_KEY) + span.set_tag(_SPAN_MEASURED_KEY) if args: # DEV: join is the fastest way of concatenating strings that is compatible diff --git a/ddtrace/_trace/utils_redis.py b/ddtrace/_trace/utils_redis.py index 433d8578fbb..fde9c291cb5 100644 --- a/ddtrace/_trace/utils_redis.py +++ b/ddtrace/_trace/utils_redis.py @@ -7,8 +7,8 @@ from typing import Optional from ddtrace.constants import _ANALYTICS_SAMPLE_RATE_KEY +from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.constants import SPAN_KIND -from ddtrace.constants import SPAN_MEASURED_KEY from ddtrace.contrib import trace_utils from ddtrace.contrib.internal.redis_utils import _extract_conn_tags from ddtrace.ext import SpanKind @@ -30,7 +30,7 @@ def _set_span_tags( span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) span.set_tag_str(COMPONENT, config_integration.integration_name) span.set_tag_str(db.SYSTEM, redisx.APP) - span.set_tag(SPAN_MEASURED_KEY) + span.set_tag(_SPAN_MEASURED_KEY) if query is not None: span_name = schematize_cache_operation(redisx.RAWCMD, cache_provider=redisx.APP) # type: ignore[operator] span.set_tag_str(span_name, query) diff --git a/ddtrace/appsec/_deduplications.py b/ddtrace/appsec/_deduplications.py index bef013433d5..c4c669b4991 100644 --- a/ddtrace/appsec/_deduplications.py +++ b/ddtrace/appsec/_deduplications.py @@ -28,9 +28,12 @@ def _reset_cache(self): """ self.reported_logs.clear() + def _check_deduplication(self): + return asm_config._asm_deduplication_enabled + def __call__(self, *args, **kwargs): result = None - if asm_config._deduplication_enabled: + if self._check_deduplication(): raw_log_hash = hash("".join([str(arg) for arg in self._extract(args)])) last_reported_timestamp = self.reported_logs.get(raw_log_hash, M_INF) + self._time_lapse current = monotonic() diff --git a/ddtrace/appsec/_iast/_ast/ast_patching.py b/ddtrace/appsec/_iast/_ast/ast_patching.py index bb3a9c74d44..44bef969bff 100644 --- a/ddtrace/appsec/_iast/_ast/ast_patching.py +++ b/ddtrace/appsec/_iast/_ast/ast_patching.py @@ -33,6 +33,7 @@ "beautifulsoup4.", "cachetools.", "cryptography.", + "django.", "docutils.", "idna.", "iniconfig.", diff --git a/ddtrace/appsec/_iast/_handlers.py b/ddtrace/appsec/_iast/_handlers.py index 7a5113a7ddd..4d4628d910e 100644 --- a/ddtrace/appsec/_iast/_handlers.py +++ b/ddtrace/appsec/_iast/_handlers.py @@ -5,7 +5,8 @@ from wrapt import wrap_function_wrapper as _w from ddtrace.appsec._iast import _is_iast_enabled -from ddtrace.appsec._iast._iast_request_context import in_iast_context +from ddtrace.appsec._iast._iast_request_context import get_iast_stacktrace_reported +from ddtrace.appsec._iast._iast_request_context import set_iast_stacktrace_reported from ddtrace.appsec._iast._metrics import _set_metric_iast_instrumented_source from ddtrace.appsec._iast._patch import _iast_instrument_starlette_request from ddtrace.appsec._iast._patch import _iast_instrument_starlette_request_body @@ -56,7 +57,7 @@ def _on_set_http_meta_iast( def _on_request_init(wrapped, instance, args, kwargs): wrapped(*args, **kwargs) - if _is_iast_enabled() and in_iast_context(): + if _is_iast_enabled() and is_iast_request_enabled(): try: instance.query_string = taint_pyobject( pyobject=instance.query_string, @@ -105,9 +106,6 @@ def _on_flask_patch(flask_version): _set_metric_iast_instrumented_source(OriginType.PATH) _set_metric_iast_instrumented_source(OriginType.QUERY) - # Instrumented on _ddtrace.appsec._asm_request_context._on_wrapped_view - _set_metric_iast_instrumented_source(OriginType.PATH_PARAMETER) - try_wrap_function_wrapper( "werkzeug.wrappers.request", "Request.get_data", @@ -129,9 +127,17 @@ def _on_flask_patch(flask_version): ) _set_metric_iast_instrumented_source(OriginType.QUERY) + # Instrumented on _ddtrace.appsec._asm_request_context._on_wrapped_view + _set_metric_iast_instrumented_source(OriginType.PATH_PARAMETER) + + # Instrumented on _on_set_request_tags_iast + _set_metric_iast_instrumented_source(OriginType.COOKIE_NAME) + _set_metric_iast_instrumented_source(OriginType.COOKIE) + _set_metric_iast_instrumented_source(OriginType.PARAMETER_NAME) + def _on_wsgi_environ(wrapped, _instance, args, kwargs): - if _is_iast_enabled() and args and in_iast_context(): + if _is_iast_enabled() and args and is_iast_request_enabled(): return wrapped(*((taint_structure(args[0], OriginType.HEADER_NAME, OriginType.HEADER),) + args[1:]), **kwargs) return wrapped(*args, **kwargs) @@ -140,6 +146,13 @@ def _on_wsgi_environ(wrapped, _instance, args, kwargs): def _on_django_patch(): if _is_iast_enabled(): try: + when_imported("django.http.request")( + lambda m: try_wrap_function_wrapper( + m, + "QueryDict.__getitem__", + functools.partial(if_iast_taint_returned_object_for, OriginType.PARAMETER), + ) + ) # we instrument those sources on _on_django_func_wrapped _set_metric_iast_instrumented_source(OriginType.HEADER_NAME) _set_metric_iast_instrumented_source(OriginType.HEADER) @@ -150,13 +163,7 @@ def _on_django_patch(): _set_metric_iast_instrumented_source(OriginType.PARAMETER) _set_metric_iast_instrumented_source(OriginType.PARAMETER_NAME) _set_metric_iast_instrumented_source(OriginType.BODY) - when_imported("django.http.request")( - lambda m: try_wrap_function_wrapper( - m, - "QueryDict.__getitem__", - functools.partial(if_iast_taint_returned_object_for, OriginType.PARAMETER), - ) - ) + except Exception: log.debug("Unexpected exception while patch IAST functions", exc_info=True) @@ -165,7 +172,7 @@ def _on_django_func_wrapped(fn_args, fn_kwargs, first_arg_expected_type, *_): # If IAST is enabled, and we're wrapping a Django view call, taint the kwargs (view's # path parameters) if _is_iast_enabled() and fn_args and isinstance(fn_args[0], first_arg_expected_type): - if not in_iast_context(): + if not is_iast_request_enabled(): return http_req = fn_args[0] @@ -278,18 +285,16 @@ def _on_grpc_response(message): def if_iast_taint_yield_tuple_for(origins, wrapped, instance, args, kwargs): - if _is_iast_enabled(): - if not is_iast_request_enabled(): - for key, value in wrapped(*args, **kwargs): - yield key, value - else: + if _is_iast_enabled() and is_iast_request_enabled(): + try: for key, value in wrapped(*args, **kwargs): new_key = taint_pyobject(pyobject=key, source_name=key, source_value=key, source_origin=origins[0]) new_value = taint_pyobject( pyobject=value, source_name=key, source_value=value, source_origin=origins[1] ) yield new_key, new_value - + except Exception: + log.debug("Unexpected exception while tainting pyobject", exc_info=True) else: for key, value in wrapped(*args, **kwargs): yield key, value @@ -319,7 +324,7 @@ def if_iast_taint_starlette_datastructures(origin, wrapped, instance, args, kwar res.append( taint_pyobject( pyobject=element, - source_name=origin_to_str(origin), + source_name=element, source_value=element, source_origin=origin, ) @@ -399,6 +404,12 @@ def _on_iast_fastapi_patch(): "FormData.get", functools.partial(if_iast_taint_returned_object_for, OriginType.BODY), ) + try_wrap_function_wrapper( + "starlette.datastructures", + "FormData.keys", + functools.partial(if_iast_taint_starlette_datastructures, OriginType.PARAMETER_NAME), + ) + _set_metric_iast_instrumented_source(OriginType.BODY) # Instrumented on _iast_starlette_scope_taint @@ -411,14 +422,7 @@ def _on_pre_tracedrequest_iast(ctx): def _on_set_request_tags_iast(request, span, flask_config): - if _is_iast_enabled(): - _set_metric_iast_instrumented_source(OriginType.COOKIE_NAME) - _set_metric_iast_instrumented_source(OriginType.COOKIE) - _set_metric_iast_instrumented_source(OriginType.PARAMETER_NAME) - - if not is_iast_request_enabled(): - return - + if _is_iast_enabled() and is_iast_request_enabled(): request.cookies = taint_structure( request.cookies, OriginType.COOKIE_NAME, @@ -439,3 +443,69 @@ def _on_set_request_tags_iast(request, span, flask_config): OriginType.PARAMETER, override_pyobject_tainted=True, ) + + +def _on_django_finalize_response_pre(ctx, after_request_tags, request, response): + if not response or not _is_iast_enabled() or not is_iast_request_enabled() or get_iast_stacktrace_reported(): + return + + try: + from .taint_sinks.stacktrace_leak import asm_check_stacktrace_leak + + content = response.content.decode("utf-8", errors="ignore") + asm_check_stacktrace_leak(content) + except Exception: + log.debug("Unexpected exception checking for stacktrace leak", exc_info=True) + + +def _on_django_technical_500_response(request, response, exc_type, exc_value, tb): + if not exc_value or not _is_iast_enabled() or not is_iast_request_enabled(): + return + + try: + from .taint_sinks.stacktrace_leak import asm_report_stacktrace_leak_from_django_debug_page + + exc_name = exc_type.__name__ + module = tb.tb_frame.f_globals.get("__name__", "") + asm_report_stacktrace_leak_from_django_debug_page(exc_name, module) + except Exception: + log.debug("Unexpected exception checking for stacktrace leak on 500 response view", exc_info=True) + + +def _on_flask_finalize_request_post(response, _): + if not response or not _is_iast_enabled() or not is_iast_request_enabled() or get_iast_stacktrace_reported(): + return + + try: + from .taint_sinks.stacktrace_leak import asm_check_stacktrace_leak + + content = response[0].decode("utf-8", errors="ignore") + asm_check_stacktrace_leak(content) + except Exception: + log.debug("Unexpected exception checking for stacktrace leak", exc_info=True) + + +def _on_asgi_finalize_response(body, _): + if not body or not _is_iast_enabled() or not is_iast_request_enabled(): + return + + try: + from .taint_sinks.stacktrace_leak import asm_check_stacktrace_leak + + content = body.decode("utf-8", errors="ignore") + asm_check_stacktrace_leak(content) + except Exception: + log.debug("Unexpected exception checking for stacktrace leak", exc_info=True) + + +def _on_werkzeug_render_debugger_html(html): + if not html or not _is_iast_enabled() or not is_iast_request_enabled(): + return + + try: + from .taint_sinks.stacktrace_leak import asm_check_stacktrace_leak + + asm_check_stacktrace_leak(html) + set_iast_stacktrace_reported(True) + except Exception: + log.debug("Unexpected exception checking for stacktrace leak", exc_info=True) diff --git a/ddtrace/appsec/_iast/_iast_request_context.py b/ddtrace/appsec/_iast/_iast_request_context.py index 07ad4c9c238..e9c985d0b3d 100644 --- a/ddtrace/appsec/_iast/_iast_request_context.py +++ b/ddtrace/appsec/_iast/_iast_request_context.py @@ -14,7 +14,7 @@ from ddtrace.appsec._iast._taint_tracking._context import create_context as create_propagation_context from ddtrace.appsec._iast._taint_tracking._context import reset_context as reset_propagation_context from ddtrace.appsec._iast.reporter import IastSpanReporter -from ddtrace.constants import ORIGIN_KEY +from ddtrace.constants import _ORIGIN_KEY from ddtrace.internal import core from ddtrace.internal.logger import get_logger from ddtrace.internal.utils.formats import asbool @@ -46,6 +46,7 @@ def __init__(self, span: Optional[Span] = None): self.iast_reporter: Optional[IastSpanReporter] = None self.iast_span_metrics: Dict[str, int] = {} self.iast_stack_trace_id: int = 0 + self.iast_stack_trace_reported: bool = False def _get_iast_context() -> Optional[IASTEnvironment]: @@ -88,6 +89,19 @@ def get_iast_reporter() -> Optional[IastSpanReporter]: return None +def get_iast_stacktrace_reported() -> bool: + env = _get_iast_context() + if env: + return env.iast_stack_trace_reported + return False + + +def set_iast_stacktrace_reported(reported: bool) -> None: + env = _get_iast_context() + if env: + env.iast_stack_trace_reported = reported + + def get_iast_stacktrace_id() -> int: env = _get_iast_context() if env: @@ -133,8 +147,8 @@ def _create_and_attach_iast_report_to_span(req_span: Span, existing_data: Option set_iast_request_enabled(False) end_iast_context(req_span) - if req_span.get_tag(ORIGIN_KEY) is None: - req_span.set_tag_str(ORIGIN_KEY, APPSEC.ORIGIN_VALUE) + if req_span.get_tag(_ORIGIN_KEY) is None: + req_span.set_tag_str(_ORIGIN_KEY, APPSEC.ORIGIN_VALUE) oce.release_request() diff --git a/ddtrace/appsec/_iast/_listener.py b/ddtrace/appsec/_iast/_listener.py index 953e22b1288..37f721438d6 100644 --- a/ddtrace/appsec/_iast/_listener.py +++ b/ddtrace/appsec/_iast/_listener.py @@ -1,11 +1,16 @@ +from ddtrace.appsec._iast._handlers import _on_asgi_finalize_response +from ddtrace.appsec._iast._handlers import _on_django_finalize_response_pre from ddtrace.appsec._iast._handlers import _on_django_func_wrapped from ddtrace.appsec._iast._handlers import _on_django_patch +from ddtrace.appsec._iast._handlers import _on_django_technical_500_response +from ddtrace.appsec._iast._handlers import _on_flask_finalize_request_post from ddtrace.appsec._iast._handlers import _on_flask_patch from ddtrace.appsec._iast._handlers import _on_grpc_response from ddtrace.appsec._iast._handlers import _on_pre_tracedrequest_iast from ddtrace.appsec._iast._handlers import _on_request_init from ddtrace.appsec._iast._handlers import _on_set_http_meta_iast from ddtrace.appsec._iast._handlers import _on_set_request_tags_iast +from ddtrace.appsec._iast._handlers import _on_werkzeug_render_debugger_html from ddtrace.appsec._iast._handlers import _on_wsgi_environ from ddtrace.appsec._iast._iast_request_context import _iast_end_request from ddtrace.internal import core @@ -18,11 +23,16 @@ def iast_listen(): core.on("set_http_meta_for_asm", _on_set_http_meta_iast) core.on("django.patch", _on_django_patch) core.on("django.wsgi_environ", _on_wsgi_environ, "wrapped_result") + core.on("django.finalize_response.pre", _on_django_finalize_response_pre) core.on("django.func.wrapped", _on_django_func_wrapped) + core.on("django.technical_500_response", _on_django_technical_500_response) core.on("flask.patch", _on_flask_patch) core.on("flask.request_init", _on_request_init) core.on("flask._patched_request", _on_pre_tracedrequest_iast) core.on("flask.set_request_tags", _on_set_request_tags_iast) + core.on("flask.finalize_request.post", _on_flask_finalize_request_post) + core.on("asgi.finalize_response", _on_asgi_finalize_response) + core.on("werkzeug.render_debugger_html", _on_werkzeug_render_debugger_html) core.on("context.ended.wsgi.__call__", _iast_end_request) core.on("context.ended.asgi.__call__", _iast_end_request) diff --git a/ddtrace/appsec/_iast/constants.py b/ddtrace/appsec/_iast/constants.py index 55ec5a5e740..83284094dcf 100644 --- a/ddtrace/appsec/_iast/constants.py +++ b/ddtrace/appsec/_iast/constants.py @@ -1,3 +1,4 @@ +import re from typing import Any from typing import Dict @@ -14,6 +15,7 @@ VULN_HEADER_INJECTION = "HEADER_INJECTION" VULN_CODE_INJECTION = "CODE_INJECTION" VULN_SSRF = "SSRF" +VULN_STACKTRACE_LEAK = "STACKTRACE_LEAK" VULNERABILITY_TOKEN_TYPE = Dict[int, Dict[str, Any]] @@ -27,6 +29,12 @@ RC2_DEF = "rc2" RC4_DEF = "rc4" IDEA_DEF = "idea" +STACKTRACE_RE_DETECT = re.compile(r"Traceback \(most recent call last\):") +HTML_TAGS_REMOVE = re.compile(r"|<[^>]*>|&#\w+;") +STACKTRACE_FILE_LINE = re.compile(r"File (.*?), line (\d+), in (.+)") +STACKTRACE_EXCEPTION_REGEX = re.compile( + r"^(?P[A-Za-z_]\w*(?:Error|Exception|Interrupt|Fault|Warning))" r"(?:\s*:\s*(?P.*))?$" +) DEFAULT_WEAK_HASH_ALGORITHMS = {MD5_DEF, SHA1_DEF} diff --git a/ddtrace/appsec/_iast/taint_sinks/_base.py b/ddtrace/appsec/_iast/taint_sinks/_base.py index 543246581cd..a934c8c5788 100644 --- a/ddtrace/appsec/_iast/taint_sinks/_base.py +++ b/ddtrace/appsec/_iast/taint_sinks/_base.py @@ -5,10 +5,11 @@ from typing import Text from ddtrace import tracer +from ddtrace.appsec._deduplications import deduplication from ddtrace.appsec._trace_utils import _asm_manual_keep from ddtrace.internal.logger import get_logger +from ddtrace.settings.asm import config as asm_config -from ..._deduplications import deduplication from .._iast_request_context import get_iast_reporter from .._iast_request_context import is_iast_request_enabled from .._iast_request_context import set_iast_reporter @@ -27,6 +28,9 @@ class taint_sink_deduplication(deduplication): + def _check_deduplication(self): + return asm_config._iast_deduplication_enabled + def _extract(self, args): # We skip positions 0 and 1 because they represent the 'cls' and 'span' respectively return args[2:] diff --git a/ddtrace/appsec/_iast/taint_sinks/stacktrace_leak.py b/ddtrace/appsec/_iast/taint_sinks/stacktrace_leak.py new file mode 100644 index 00000000000..2fa9a0016a9 --- /dev/null +++ b/ddtrace/appsec/_iast/taint_sinks/stacktrace_leak.py @@ -0,0 +1,102 @@ +import os +import re + +from ..._constants import IAST_SPAN_TAGS +from .. import oce +from .._iast_request_context import set_iast_stacktrace_reported +from .._metrics import _set_metric_iast_executed_sink +from .._metrics import increment_iast_span_metric +from .._taint_tracking._errors import iast_taint_log_error +from ..constants import HTML_TAGS_REMOVE +from ..constants import STACKTRACE_EXCEPTION_REGEX +from ..constants import STACKTRACE_FILE_LINE +from ..constants import VULN_STACKTRACE_LEAK +from ..taint_sinks._base import VulnerabilityBase + + +@oce.register +class StacktraceLeak(VulnerabilityBase): + vulnerability_type = VULN_STACKTRACE_LEAK + skip_location = True + + +def asm_report_stacktrace_leak_from_django_debug_page(exc_name, module): + increment_iast_span_metric(IAST_SPAN_TAGS.TELEMETRY_EXECUTED_SINK, StacktraceLeak.vulnerability_type) + _set_metric_iast_executed_sink(StacktraceLeak.vulnerability_type) + evidence = "Module: %s\nException: %s" % (module, exc_name) + StacktraceLeak.report(evidence_value=evidence) + set_iast_stacktrace_reported(True) + + +def asm_check_stacktrace_leak(content: str) -> None: + if not content: + return + + try: + # Quick check to avoid the slower operations if on stacktrace + if "Traceback (most recent call last):" not in content: + return + + text = HTML_TAGS_REMOVE.sub("", content) + lines = [line.strip() for line in text.splitlines() if line.strip()] + + file_lines = [] + exception_line = "" + + for i, line in enumerate(lines): + if line.startswith("Traceback (most recent call last):"): + # from here until we find an exception line + continue + + # See if this line is a "File ..." line + m_file = STACKTRACE_FILE_LINE.match(line) + if m_file: + file_lines.append(m_file.groups()) + continue + + # See if this line might be the exception line + m_exc = STACKTRACE_EXCEPTION_REGEX.match(line) + if m_exc: + # We consider it as the "final" exception line. Keep it. + exception_line = m_exc.group("exc") + # We won't break immediately because sometimes Django + # HTML stack traces can have repeated exception lines, etc. + # But typically the last match is the real final exception + # We'll keep updating exception_line if we see multiple + continue + + if not file_lines and not exception_line: + return + + module_path = None + if file_lines: + # file_lines looks like [ ("/path/to/file.py", "line_no", "funcname"), ... ] + last_file_entry = file_lines[-1] + module_path = last_file_entry[0] # the path in quotes + + # Attempt to convert a path like "/myproj/foo/bar.py" into "foo.bar" + # or "myproj.foo.bar" depending on your directory structure. + # This is a *best effort* approach (it can be environment-specific). + module_name = "" + if module_path: + mod_no_ext = re.sub(r"\.py$", "", module_path) + parts: list[str] = [] + while True: + head, tail = os.path.split(mod_no_ext) + if tail: + parts.insert(0, tail) + mod_no_ext = head + else: + # might still have a leftover 'head' if it’s not just root + break + + module_name = ".".join(parts) + if not module_name: + module_name = module_path # fallback: just the path + + increment_iast_span_metric(IAST_SPAN_TAGS.TELEMETRY_EXECUTED_SINK, StacktraceLeak.vulnerability_type) + _set_metric_iast_executed_sink(StacktraceLeak.vulnerability_type) + evidence = "Module: %s\nException: %s" % (module_name.strip(), exception_line.strip()) + StacktraceLeak.report(evidence_value=evidence) + except Exception as e: + iast_taint_log_error("[IAST] error in check stacktrace leak. {}".format(e)) diff --git a/ddtrace/appsec/_processor.py b/ddtrace/appsec/_processor.py index 33f3ac8ced1..6102ba1ded2 100644 --- a/ddtrace/appsec/_processor.py +++ b/ddtrace/appsec/_processor.py @@ -27,8 +27,8 @@ from ddtrace.appsec._exploit_prevention.stack_traces import report_stack from ddtrace.appsec._trace_utils import _asm_manual_keep from ddtrace.appsec._utils import has_triggers -from ddtrace.constants import ORIGIN_KEY -from ddtrace.constants import RUNTIME_FAMILY +from ddtrace.constants import _ORIGIN_KEY +from ddtrace.constants import _RUNTIME_FAMILY from ddtrace.ext import SpanTypes from ddtrace.internal import core from ddtrace.internal._unpatched import unpatched_open as open # noqa: A001 @@ -235,7 +235,7 @@ def on_span_start(self, span: Span) -> None: headers_case_sensitive = _asm_request_context.get_headers_case_sensitive() span.set_metric(APPSEC.ENABLED, 1.0) - span.set_tag_str(RUNTIME_FAMILY, "python") + span.set_tag_str(_RUNTIME_FAMILY, "python") def waf_callable(custom_data=None, **kwargs): return self._waf_action(span._local_root or span, ctx, custom_data, **kwargs) @@ -391,8 +391,8 @@ def _waf_action( # Right now, we overwrite any value that could be already there. We need to reconsider when ASM/AppSec's # specs are updated. _asm_manual_keep(span) - if span.get_tag(ORIGIN_KEY) is None: - span.set_tag_str(ORIGIN_KEY, APPSEC.ORIGIN_VALUE) + if span.get_tag(_ORIGIN_KEY) is None: + span.set_tag_str(_ORIGIN_KEY, APPSEC.ORIGIN_VALUE) return waf_results def _is_needed(self, address: str) -> bool: diff --git a/ddtrace/appsec/_remoteconfiguration.py b/ddtrace/appsec/_remoteconfiguration.py index e4785cd5f40..723710d7cc3 100644 --- a/ddtrace/appsec/_remoteconfiguration.py +++ b/ddtrace/appsec/_remoteconfiguration.py @@ -8,7 +8,6 @@ from ddtrace import Tracer from ddtrace.appsec._capabilities import _asm_feature_is_required from ddtrace.appsec._constants import PRODUCTS -from ddtrace.internal import forksafe from ddtrace.internal.logger import get_logger from ddtrace.internal.remoteconfig._connectors import PublisherSubscriberConnector from ddtrace.internal.remoteconfig._publishers import RemoteConfigPublisherMergeDicts @@ -72,7 +71,6 @@ def enable_appsec_rc(test_tracer: Optional[Tracer] = None) -> None: load_common_appsec_modules() - forksafe.register(_forksafe_appsec_rc) telemetry_writer.product_activated(TELEMETRY_APM_PRODUCT.APPSEC, True) diff --git a/ddtrace/bootstrap/preload.py b/ddtrace/bootstrap/preload.py index 0018162beaa..95dc8f4cd55 100644 --- a/ddtrace/bootstrap/preload.py +++ b/ddtrace/bootstrap/preload.py @@ -6,7 +6,6 @@ import os # noqa:I001 from ddtrace import config # noqa:F401 -from ddtrace.appsec._iast._utils import _is_iast_enabled from ddtrace.settings.profiling import config as profiling_config # noqa:F401 from ddtrace.internal.logger import get_logger # noqa:F401 from ddtrace.internal.module import ModuleWatchdog # noqa:F401 @@ -15,7 +14,6 @@ from ddtrace.internal.tracemethods import _install_trace_methods # noqa:F401 from ddtrace.internal.utils.formats import asbool # noqa:F401 from ddtrace.internal.utils.formats import parse_tags_str # noqa:F401 -from ddtrace.settings.asm import config as asm_config # noqa:F401 from ddtrace.settings.crashtracker import config as crashtracker_config from ddtrace import tracer @@ -72,21 +70,6 @@ def register_post_preload(func: t.Callable) -> None: if config._runtime_metrics_enabled: RuntimeWorker.enable() -if _is_iast_enabled(): - """ - This is the entry point for the IAST instrumentation. `enable_iast_propagation` is called on patch_all function - too but patch_all depends of DD_TRACE_ENABLED environment variable. This is the reason why we need to call it - here and it's not a duplicate call due to `enable_iast_propagation` has a global variable to avoid multiple calls. - """ - from ddtrace.appsec._iast import enable_iast_propagation - - enable_iast_propagation() - -if asm_config._asm_enabled or config._remote_config_enabled: - from ddtrace.appsec._remoteconfiguration import enable_appsec_rc - - enable_appsec_rc() - if config._otel_enabled: @ModuleWatchdog.after_module_imported("opentelemetry.trace") diff --git a/ddtrace/constants.py b/ddtrace/constants.py index 1a112f677b8..b4694e24345 100644 --- a/ddtrace/constants.py +++ b/ddtrace/constants.py @@ -4,36 +4,36 @@ # TODO: Deprecate and remove the SAMPLE_RATE_METRIC_KEY constant. # This key enables legacy trace sampling support in the Datadog agent. -SAMPLE_RATE_METRIC_KEY = "_sample_rate" -SAMPLING_PRIORITY_KEY = "_sampling_priority_v1" +_SAMPLE_RATE_METRIC_KEY = SAMPLE_RATE_METRIC_KEY = "_sample_rate" +_SAMPLING_PRIORITY_KEY = SAMPLING_PRIORITY_KEY = "_sampling_priority_v1" _ANALYTICS_SAMPLE_RATE_KEY = ANALYTICS_SAMPLE_RATE_KEY = "_dd1.sr.eausr" -SAMPLING_AGENT_DECISION = "_dd.agent_psr" -SAMPLING_RULE_DECISION = "_dd.rule_psr" -SAMPLING_LIMIT_DECISION = "_dd.limit_psr" +_SAMPLING_AGENT_DECISION = SAMPLING_AGENT_DECISION = "_dd.agent_psr" +_SAMPLING_RULE_DECISION = SAMPLING_RULE_DECISION = "_dd.rule_psr" +_SAMPLING_LIMIT_DECISION = SAMPLING_LIMIT_DECISION = "_dd.limit_psr" _SINGLE_SPAN_SAMPLING_MECHANISM = "_dd.span_sampling.mechanism" _SINGLE_SPAN_SAMPLING_RATE = "_dd.span_sampling.rule_rate" _SINGLE_SPAN_SAMPLING_MAX_PER_SEC = "_dd.span_sampling.max_per_second" _SINGLE_SPAN_SAMPLING_MAX_PER_SEC_NO_LIMIT = -1 _APM_ENABLED_METRIC_KEY = "_dd.apm.enabled" -ORIGIN_KEY = "_dd.origin" -USER_ID_KEY = "_dd.p.usr.id" -HOSTNAME_KEY = "_dd.hostname" -RUNTIME_FAMILY = "_dd.runtime_family" +_ORIGIN_KEY = ORIGIN_KEY = "_dd.origin" +_USER_ID_KEY = USER_ID_KEY = "_dd.p.usr.id" +_HOSTNAME_KEY = HOSTNAME_KEY = "_dd.hostname" +_RUNTIME_FAMILY = RUNTIME_FAMILY = "_dd.runtime_family" ENV_KEY = "env" VERSION_KEY = "version" SERVICE_KEY = "service.name" -BASE_SERVICE_KEY = "_dd.base_service" +_BASE_SERVICE_KEY = BASE_SERVICE_KEY = "_dd.base_service" SERVICE_VERSION_KEY = "service.version" SPAN_KIND = "span.kind" -SPAN_MEASURED_KEY = "_dd.measured" -KEEP_SPANS_RATE_KEY = "_dd.tracer_kr" -MULTIPLE_IP_HEADERS = "_dd.multiple-ip-headers" +_SPAN_MEASURED_KEY = SPAN_MEASURED_KEY = "_dd.measured" +_KEEP_SPANS_RATE_KEY = KEEP_SPANS_RATE_KEY = "_dd.tracer_kr" +_MULTIPLE_IP_HEADERS = MULTIPLE_IP_HEADERS = "_dd.multiple-ip-headers" APPSEC_ENV = "DD_APPSEC_ENABLED" -CONFIG_ENDPOINT_ENV = "_DD_CONFIG_ENDPOINT" -CONFIG_ENDPOINT_RETRIES_ENV = "_DD_CONFIG_ENDPOINT_RETRIES" -CONFIG_ENDPOINT_TIMEOUT_ENV = "_DD_CONFIG_ENDPOINT_TIMEOUT" +_CONFIG_ENDPOINT_ENV = CONFIG_ENDPOINT_ENV = "_DD_CONFIG_ENDPOINT" +_CONFIG_ENDPOINT_RETRIES_ENV = CONFIG_ENDPOINT_RETRIES_ENV = "_DD_CONFIG_ENDPOINT_RETRIES" +_CONFIG_ENDPOINT_TIMEOUT_ENV = CONFIG_ENDPOINT_TIMEOUT_ENV = "_DD_CONFIG_ENDPOINT_TIMEOUT" IAST_ENV = "DD_IAST_ENABLED" MANUAL_DROP_KEY = "manual.drop" @@ -57,6 +57,22 @@ _DEPRECATED_MODULE_ATTRIBUTES = [ "ANALYTICS_SAMPLE_RATE_KEY", + "SAMPLE_RATE_METRIC_KEY", + "SAMPLING_PRIORITY_KEY", + "SAMPLING_AGENT_DECISION", + "SAMPLING_RULE_DECISION", + "SAMPLING_LIMIT_DECISION", + "USER_ID_KEY", + "ORIGIN_KEY", + "HOSTNAME_KEY", + "RUNTIME_FAMILY", + "BASE_SERVICE_KEY", + "SPAN_MEASURED_KEY", + "KEEP_SPANS_RATE_KEY", + "MULTIPLE_IP_HEADERS", + "CONFIG_ENDPOINT_ENV", + "CONFIG_ENDPOINT_RETRIES_ENV", + "CONFIG_ENDPOINT_TIMEOUT_ENV", ] diff --git a/ddtrace/contrib/dbapi/__init__.py b/ddtrace/contrib/dbapi/__init__.py index e511fc31657..fa733c19a63 100644 --- a/ddtrace/contrib/dbapi/__init__.py +++ b/ddtrace/contrib/dbapi/__init__.py @@ -14,8 +14,8 @@ from ...appsec._constants import IAST_SPAN_TAGS from ...appsec._iast._metrics import increment_iast_span_metric from ...constants import _ANALYTICS_SAMPLE_RATE_KEY +from ...constants import _SPAN_MEASURED_KEY from ...constants import SPAN_KIND -from ...constants import SPAN_MEASURED_KEY from ...ext import SpanKind from ...ext import SpanTypes from ...ext import db @@ -92,7 +92,7 @@ def _trace_method(self, method, name, resource, extra_tags, dbm_propagator, *arg name, service=ext_service(pin, self._self_config), resource=resource, span_type=SpanTypes.SQL ) as s: if measured: - s.set_tag(SPAN_MEASURED_KEY) + s.set_tag(_SPAN_MEASURED_KEY) # No reason to tag the query since it is set as the resource by the agent. See: # https://github.com/DataDog/datadog-trace-agent/blob/bda1ebbf170dd8c5879be993bdd4dbae70d10fda/obfuscate/sql.go#L232 s.set_tags(pin.tags) diff --git a/ddtrace/contrib/dbapi_async/__init__.py b/ddtrace/contrib/dbapi_async/__init__.py index 9ebf36847b0..a6ae676f4bd 100644 --- a/ddtrace/contrib/dbapi_async/__init__.py +++ b/ddtrace/contrib/dbapi_async/__init__.py @@ -9,8 +9,8 @@ from ...appsec._constants import IAST_SPAN_TAGS from ...appsec._iast._metrics import increment_iast_span_metric from ...constants import _ANALYTICS_SAMPLE_RATE_KEY +from ...constants import _SPAN_MEASURED_KEY from ...constants import SPAN_KIND -from ...constants import SPAN_MEASURED_KEY from ...ext import SpanKind from ...ext import SpanTypes from ...trace import Pin @@ -67,7 +67,7 @@ async def _trace_method(self, method, name, resource, extra_tags, dbm_propagator name, service=ext_service(pin, self._self_config), resource=resource, span_type=SpanTypes.SQL ) as s: if measured: - s.set_tag(SPAN_MEASURED_KEY) + s.set_tag(_SPAN_MEASURED_KEY) # No reason to tag the query since it is set as the resource by the agent. See: # https://github.com/DataDog/datadog-trace-agent/blob/bda1ebbf170dd8c5879be993bdd4dbae70d10fda/obfuscate/sql.go#L232 s.set_tags(pin.tags) diff --git a/ddtrace/contrib/flask/__init__.py b/ddtrace/contrib/flask/__init__.py index 0562240d090..4b3c1afbf16 100644 --- a/ddtrace/contrib/flask/__init__.py +++ b/ddtrace/contrib/flask/__init__.py @@ -87,9 +87,6 @@ def index(): # Override service name config.flask['service_name'] = 'custom-service-name' - # Report 401, and 403 responses as errors - config.http_server.error_statuses = '401,403' - .. __: http://flask.pocoo.org/ :ref:`All HTTP tags ` are supported for this integration. diff --git a/ddtrace/contrib/httplib/__init__.py b/ddtrace/contrib/httplib/__init__.py index ae85051517e..7c5247422a1 100644 --- a/ddtrace/contrib/httplib/__init__.py +++ b/ddtrace/contrib/httplib/__init__.py @@ -42,20 +42,8 @@ # Disable distributed tracing globally. config.httplib['distributed_tracing'] = False - - # Change the service distributed tracing option only for this HTTP - # connection. - - # Python 2 - connection = urllib.HTTPConnection('www.datadog.com') - - # Python 3 connection = http.client.HTTPConnection('www.datadog.com') - cfg = config.get_from(connection) - cfg['distributed_tracing'] = True - - :ref:`Headers tracing ` is supported for this integration. """ diff --git a/ddtrace/contrib/internal/aiobotocore/patch.py b/ddtrace/contrib/internal/aiobotocore/patch.py index 0df89927ece..4b0fea48b96 100644 --- a/ddtrace/contrib/internal/aiobotocore/patch.py +++ b/ddtrace/contrib/internal/aiobotocore/patch.py @@ -5,8 +5,8 @@ from ddtrace import config from ddtrace.constants import _ANALYTICS_SAMPLE_RATE_KEY +from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.constants import SPAN_KIND -from ddtrace.constants import SPAN_MEASURED_KEY from ddtrace.contrib.internal.trace_utils import ext_service from ddtrace.contrib.internal.trace_utils import unwrap from ddtrace.ext import SpanKind @@ -125,7 +125,7 @@ async def _wrapped_api_call(original_func, instance, args, kwargs): # set span.kind tag equal to type of request span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) - span.set_tag(SPAN_MEASURED_KEY) + span.set_tag(_SPAN_MEASURED_KEY) try: operation = get_argument_value(args, kwargs, 0, "operation_name") diff --git a/ddtrace/contrib/internal/aiohttp/middlewares.py b/ddtrace/contrib/internal/aiohttp/middlewares.py index 59045d7bcc9..ddb2d35fbc6 100644 --- a/ddtrace/contrib/internal/aiohttp/middlewares.py +++ b/ddtrace/contrib/internal/aiohttp/middlewares.py @@ -3,8 +3,8 @@ from ddtrace import config from ddtrace.constants import _ANALYTICS_SAMPLE_RATE_KEY +from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.constants import SPAN_KIND -from ddtrace.constants import SPAN_MEASURED_KEY from ddtrace.contrib import trace_utils from ddtrace.contrib.asyncio import context_provider from ddtrace.ext import SpanKind @@ -50,7 +50,7 @@ async def attach_context(request): service=service, span_type=SpanTypes.WEB, ) - request_span.set_tag(SPAN_MEASURED_KEY) + request_span.set_tag(_SPAN_MEASURED_KEY) request_span.set_tag_str(COMPONENT, config.aiohttp.integration_name) @@ -108,7 +108,7 @@ def finish_request_span(request, response): # DEV: aiohttp is special case maintains separate configuration from config api trace_query_string = request[REQUEST_CONFIG_KEY].get("trace_query_string") if trace_query_string is None: - trace_query_string = config.http.trace_query_string + trace_query_string = config._http.trace_query_string if trace_query_string: request_span.set_tag_str(http.QUERY_STRING, request.query_string) diff --git a/ddtrace/contrib/internal/aiomysql/patch.py b/ddtrace/contrib/internal/aiomysql/patch.py index 193d471d124..8f33a4d6343 100644 --- a/ddtrace/contrib/internal/aiomysql/patch.py +++ b/ddtrace/contrib/internal/aiomysql/patch.py @@ -3,8 +3,8 @@ from ddtrace import config from ddtrace.constants import _ANALYTICS_SAMPLE_RATE_KEY +from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.constants import SPAN_KIND -from ddtrace.constants import SPAN_MEASURED_KEY from ddtrace.contrib import dbapi from ddtrace.contrib import trace_utils from ddtrace.contrib.internal.trace_utils import _convert_to_string @@ -82,7 +82,7 @@ async def _trace_method(self, method, resource, extra_tags, *args, **kwargs): # set span.kind to the type of request being performed s.set_tag_str(SPAN_KIND, SpanKind.CLIENT) - s.set_tag(SPAN_MEASURED_KEY) + s.set_tag(_SPAN_MEASURED_KEY) s.set_tags(pin.tags) s.set_tags(extra_tags) diff --git a/ddtrace/contrib/internal/aiopg/connection.py b/ddtrace/contrib/internal/aiopg/connection.py index 1daf84b2987..b63a6352a27 100644 --- a/ddtrace/contrib/internal/aiopg/connection.py +++ b/ddtrace/contrib/internal/aiopg/connection.py @@ -4,8 +4,8 @@ from ddtrace import config from ddtrace.constants import _ANALYTICS_SAMPLE_RATE_KEY +from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.constants import SPAN_KIND -from ddtrace.constants import SPAN_MEASURED_KEY from ddtrace.contrib import dbapi from ddtrace.contrib import trace_utils from ddtrace.ext import SpanKind @@ -47,7 +47,7 @@ async def _trace_method(self, method, resource, extra_tags, *args, **kwargs): # set span.kind to the type of request being performed s.set_tag_str(SPAN_KIND, SpanKind.CLIENT) - s.set_tag(SPAN_MEASURED_KEY) + s.set_tag(_SPAN_MEASURED_KEY) s.set_tags(pin.tags) s.set_tags(extra_tags) diff --git a/ddtrace/contrib/internal/aioredis/patch.py b/ddtrace/contrib/internal/aioredis/patch.py index 3e1f0c062a3..62b8ec9f80c 100644 --- a/ddtrace/contrib/internal/aioredis/patch.py +++ b/ddtrace/contrib/internal/aioredis/patch.py @@ -9,8 +9,8 @@ from ddtrace._trace.utils_redis import _instrument_redis_cmd from ddtrace._trace.utils_redis import _instrument_redis_execute_pipeline from ddtrace.constants import _ANALYTICS_SAMPLE_RATE_KEY +from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.constants import SPAN_KIND -from ddtrace.constants import SPAN_MEASURED_KEY from ddtrace.contrib import trace_utils from ddtrace.contrib.internal.redis_utils import _run_redis_command_async from ddtrace.contrib.redis_utils import ROW_RETURNING_COMMANDS @@ -149,7 +149,7 @@ def traced_13_execute_command(func, instance, args, kwargs): span.set_tag_str(COMPONENT, config.aioredis.integration_name) span.set_tag_str(db.SYSTEM, redisx.APP) - span.set_tag(SPAN_MEASURED_KEY) + span.set_tag(_SPAN_MEASURED_KEY) span.set_tag_str(redisx.RAWCMD, query) if pin.tags: span.set_tags(pin.tags) @@ -225,7 +225,7 @@ async def traced_13_execute_pipeline(func, instance, args, kwargs): } ) - span.set_tag(SPAN_MEASURED_KEY) + span.set_tag(_SPAN_MEASURED_KEY) span.set_tag_str(redisx.RAWCMD, cmds_string) span.set_metric(redisx.PIPELINE_LEN, len(instance._pipeline)) # set analytics sample rate if enabled diff --git a/ddtrace/contrib/internal/algoliasearch/patch.py b/ddtrace/contrib/internal/algoliasearch/patch.py index e3074225570..bc1ba23a279 100644 --- a/ddtrace/contrib/internal/algoliasearch/patch.py +++ b/ddtrace/contrib/internal/algoliasearch/patch.py @@ -1,8 +1,8 @@ from wrapt import wrap_function_wrapper as _w from ddtrace import config +from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.constants import SPAN_KIND -from ddtrace.constants import SPAN_MEASURED_KEY from ddtrace.contrib import trace_utils from ddtrace.ext import SpanKind from ddtrace.ext import SpanTypes @@ -129,7 +129,7 @@ def _patched_search(func, instance, wrapt_args, wrapt_kwargs): # set span.kind to the type of request being performed span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) - span.set_tag(SPAN_MEASURED_KEY) + span.set_tag(_SPAN_MEASURED_KEY) if span.context.sampling_priority is not None and span.context.sampling_priority <= 0: return func(*wrapt_args, **wrapt_kwargs) diff --git a/ddtrace/contrib/internal/asyncpg/patch.py b/ddtrace/contrib/internal/asyncpg/patch.py index 538af434e0b..4eeae4d193e 100644 --- a/ddtrace/contrib/internal/asyncpg/patch.py +++ b/ddtrace/contrib/internal/asyncpg/patch.py @@ -9,7 +9,7 @@ import wrapt from ddtrace.constants import SPAN_KIND -from ddtrace.constants import SPAN_MEASURED_KEY +from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.ext import SpanKind from ddtrace.ext import SpanTypes from ddtrace.ext import db @@ -117,7 +117,7 @@ async def _traced_query(pin, method, query, args, kwargs): # set span.kind to the type of request being performed span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) - span.set_tag(SPAN_MEASURED_KEY) + span.set_tag(_SPAN_MEASURED_KEY) span.set_tags(pin.tags) # dispatch DBM diff --git a/ddtrace/contrib/internal/boto/patch.py b/ddtrace/contrib/internal/boto/patch.py index e7418aba878..87094751f9b 100644 --- a/ddtrace/contrib/internal/boto/patch.py +++ b/ddtrace/contrib/internal/boto/patch.py @@ -7,8 +7,8 @@ from ddtrace import config from ddtrace.constants import _ANALYTICS_SAMPLE_RATE_KEY +from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.constants import SPAN_KIND -from ddtrace.constants import SPAN_MEASURED_KEY from ddtrace.ext import SpanKind from ddtrace.ext import SpanTypes from ddtrace.ext import aws @@ -93,7 +93,7 @@ def patched_query_request(original_func, instance, args, kwargs): # set span.kind to the type of request being performed span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) - span.set_tag(SPAN_MEASURED_KEY) + span.set_tag(_SPAN_MEASURED_KEY) operation_name = None if args: @@ -164,7 +164,7 @@ def patched_auth_request(original_func, instance, args, kwargs): service=schematize_service_name("{}.{}".format(pin.service, endpoint_name)), span_type=SpanTypes.HTTP, ) as span: - span.set_tag(SPAN_MEASURED_KEY) + span.set_tag(_SPAN_MEASURED_KEY) if args: http_method = get_argument_value(args, kwargs, 0, "method") span.resource = "%s.%s" % (endpoint_name, http_method.lower()) diff --git a/ddtrace/contrib/internal/botocore/patch.py b/ddtrace/contrib/internal/botocore/patch.py index 734c429d798..61353e7b34e 100644 --- a/ddtrace/contrib/internal/botocore/patch.py +++ b/ddtrace/contrib/internal/botocore/patch.py @@ -33,7 +33,7 @@ from ddtrace.internal.utils.formats import asbool from ddtrace.internal.utils.formats import deep_getattr from ddtrace.llmobs._integrations import BedrockIntegration -from ddtrace.settings.config import Config +from ddtrace.settings._config import Config from ddtrace.trace import Pin from .services.bedrock import patched_bedrock_api_call diff --git a/ddtrace/contrib/internal/bottle/trace.py b/ddtrace/contrib/internal/bottle/trace.py index 2f86b1e780a..3aabb4ccc81 100644 --- a/ddtrace/contrib/internal/bottle/trace.py +++ b/ddtrace/contrib/internal/bottle/trace.py @@ -6,8 +6,8 @@ import ddtrace from ddtrace import config from ddtrace.constants import _ANALYTICS_SAMPLE_RATE_KEY +from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.constants import SPAN_KIND -from ddtrace.constants import SPAN_MEASURED_KEY from ddtrace.contrib import trace_utils from ddtrace.ext import SpanKind from ddtrace.ext import SpanTypes @@ -57,7 +57,7 @@ def wrapped(*args, **kwargs): # set span.kind to the type of request being performed s.set_tag_str(SPAN_KIND, SpanKind.SERVER) - s.set_tag(SPAN_MEASURED_KEY) + s.set_tag(_SPAN_MEASURED_KEY) # set analytics sample rate with global config enabled s.set_tag(_ANALYTICS_SAMPLE_RATE_KEY, config.bottle.get_analytics_sample_rate(use_global_config=True)) diff --git a/ddtrace/contrib/internal/cassandra/session.py b/ddtrace/contrib/internal/cassandra/session.py index 7f02d8c0af6..3ccd44bb616 100644 --- a/ddtrace/contrib/internal/cassandra/session.py +++ b/ddtrace/contrib/internal/cassandra/session.py @@ -23,10 +23,10 @@ from ddtrace import Span from ddtrace import config from ddtrace.constants import _ANALYTICS_SAMPLE_RATE_KEY +from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.constants import ERROR_MSG from ddtrace.constants import ERROR_TYPE from ddtrace.constants import SPAN_KIND -from ddtrace.constants import SPAN_MEASURED_KEY from ddtrace.ext import SpanKind from ddtrace.ext import SpanTypes from ddtrace.ext import cassandra as cassx @@ -214,7 +214,7 @@ def _start_span_and_set_tags( span.set_tag_str(COMPONENT, config.cassandra.integration_name) span.set_tag_str(db.SYSTEM, "cassandra") span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) - span.set_tag(SPAN_MEASURED_KEY) + span.set_tag(_SPAN_MEASURED_KEY) span.set_tags(additional_tags) span.set_tag(_ANALYTICS_SAMPLE_RATE_KEY, config.cassandra.get_analytics_sample_rate()) if query is not None: diff --git a/ddtrace/contrib/internal/celery/app.py b/ddtrace/contrib/internal/celery/app.py index 54ad5834769..f338816243b 100644 --- a/ddtrace/contrib/internal/celery/app.py +++ b/ddtrace/contrib/internal/celery/app.py @@ -6,8 +6,8 @@ from ddtrace import config from ddtrace._trace.pin import _DD_PIN_NAME from ddtrace.constants import _ANALYTICS_SAMPLE_RATE_KEY +from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.constants import SPAN_KIND -from ddtrace.constants import SPAN_MEASURED_KEY from ddtrace.contrib import trace_utils from ddtrace.contrib.internal.celery.signals import trace_after_publish from ddtrace.contrib.internal.celery.signals import trace_before_publish @@ -102,7 +102,7 @@ def _traced_beat_inner(func, instance, args, kwargs): rate = config.celery.get_analytics_sample_rate() if rate is not None: span.set_tag(_ANALYTICS_SAMPLE_RATE_KEY, rate) - span.set_tag(SPAN_MEASURED_KEY) + span.set_tag(_SPAN_MEASURED_KEY) return func(*args, **kwargs) diff --git a/ddtrace/contrib/internal/celery/signals.py b/ddtrace/contrib/internal/celery/signals.py index ea9d8c15863..dd03662e12f 100644 --- a/ddtrace/contrib/internal/celery/signals.py +++ b/ddtrace/contrib/internal/celery/signals.py @@ -5,8 +5,8 @@ from ddtrace import config from ddtrace.constants import _ANALYTICS_SAMPLE_RATE_KEY +from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.constants import SPAN_KIND -from ddtrace.constants import SPAN_MEASURED_KEY from ddtrace.contrib import trace_utils from ddtrace.contrib.internal.celery import constants as c from ddtrace.contrib.internal.celery.utils import attach_span @@ -65,7 +65,7 @@ def trace_prerun(*args, **kwargs): if rate is not None: span.set_tag(_ANALYTICS_SAMPLE_RATE_KEY, rate) - span.set_tag(SPAN_MEASURED_KEY) + span.set_tag(_SPAN_MEASURED_KEY) attach_span(task, task_id, span) if config.celery["distributed_tracing"]: attach_span_context(task, task_id, span) @@ -139,7 +139,7 @@ def trace_before_publish(*args, **kwargs): if rate is not None: span.set_tag(_ANALYTICS_SAMPLE_RATE_KEY, rate) - span.set_tag(SPAN_MEASURED_KEY) + span.set_tag(_SPAN_MEASURED_KEY) span.set_tag_str(c.TASK_TAG_KEY, c.TASK_APPLY_ASYNC) span.set_tag_str("celery.id", task_id) set_tags_from_context(span, kwargs) diff --git a/ddtrace/contrib/internal/consul/patch.py b/ddtrace/contrib/internal/consul/patch.py index b24b138b632..d1761fdb05d 100644 --- a/ddtrace/contrib/internal/consul/patch.py +++ b/ddtrace/contrib/internal/consul/patch.py @@ -3,8 +3,8 @@ from ddtrace import config from ddtrace.constants import _ANALYTICS_SAMPLE_RATE_KEY +from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.constants import SPAN_KIND -from ddtrace.constants import SPAN_MEASURED_KEY from ddtrace.ext import SpanKind from ddtrace.ext import SpanTypes from ddtrace.ext import consul as consulx @@ -73,7 +73,7 @@ def trace_func(wrapped, instance, args, kwargs): # set span.kind to the type of request being performed span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) - span.set_tag(SPAN_MEASURED_KEY) + span.set_tag(_SPAN_MEASURED_KEY) rate = config.consul.get_analytics_sample_rate() if rate is not None: span.set_tag(_ANALYTICS_SAMPLE_RATE_KEY, rate) diff --git a/ddtrace/contrib/internal/django/patch.py b/ddtrace/contrib/internal/django/patch.py index 7378d8da31c..c51c789aab7 100644 --- a/ddtrace/contrib/internal/django/patch.py +++ b/ddtrace/contrib/internal/django/patch.py @@ -696,6 +696,23 @@ def traced_as_view(django, pin, func, instance, args, kwargs): return wrapt.FunctionWrapper(view, traced_func(django, "django.view", resource=func_name(instance))) +@trace_utils.with_traced_module +def traced_technical_500_response(django, pin, func, instance, args, kwargs): + """ + Wrapper for django's views.debug.technical_500_response + """ + response = func(*args, **kwargs) + try: + request = get_argument_value(args, kwargs, 0, "request") + exc_type = get_argument_value(args, kwargs, 1, "exc_type") + exc_value = get_argument_value(args, kwargs, 2, "exc_value") + tb = get_argument_value(args, kwargs, 3, "tb") + core.dispatch("django.technical_500_response", (request, response, exc_type, exc_value, tb)) + except Exception: + log.debug("Error while trying to trace Django technical 500 response", exc_info=True) + return response + + @trace_utils.with_traced_module def traced_get_asgi_application(django, pin, func, instance, args, kwargs): from ddtrace.contrib.asgi import TraceMiddleware @@ -891,6 +908,9 @@ def _(m): trace_utils.wrap(m, "re_path", traced_urls_path(django)) when_imported("django.views.generic.base")(lambda m: trace_utils.wrap(m, "View.as_view", traced_as_view(django))) + when_imported("django.views.debug")( + lambda m: trace_utils.wrap(m, "technical_500_response", traced_technical_500_response(django)) + ) @when_imported("channels.routing") def _(m): @@ -935,6 +955,7 @@ def _unpatch(django): trace_utils.unwrap(django.conf.urls, "url") trace_utils.unwrap(django.contrib.auth.login, "login") trace_utils.unwrap(django.contrib.auth.authenticate, "authenticate") + trace_utils.unwrap(django.view.debug.technical_500_response, "technical_500_response") if django.VERSION >= (2, 0, 0): trace_utils.unwrap(django.urls, "path") trace_utils.unwrap(django.urls, "re_path") diff --git a/ddtrace/contrib/internal/django/utils.py b/ddtrace/contrib/internal/django/utils.py index 14f5e3dd4e6..a8d4c469e66 100644 --- a/ddtrace/contrib/internal/django/utils.py +++ b/ddtrace/contrib/internal/django/utils.py @@ -15,7 +15,7 @@ from ddtrace import config from ddtrace._trace.span import Span from ddtrace.constants import _ANALYTICS_SAMPLE_RATE_KEY -from ddtrace.constants import SPAN_MEASURED_KEY +from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.contrib import trace_utils from ddtrace.contrib.internal.django.compat import get_resolver from ddtrace.contrib.internal.django.compat import user_is_authenticated @@ -255,7 +255,7 @@ def _before_request_tags(pin, span, request): # has explicitly set it during the request lifetime span.service = trace_utils.int_service(pin, config.django) span.span_type = SpanTypes.WEB - span._metrics[SPAN_MEASURED_KEY] = 1 + span._metrics[_SPAN_MEASURED_KEY] = 1 analytics_sr = config.django.get_analytics_sample_rate(use_global_config=True) if analytics_sr is not None: diff --git a/ddtrace/contrib/internal/dogpile_cache/region.py b/ddtrace/contrib/internal/dogpile_cache/region.py index 0c89d2d84d9..8b67f5bd2be 100644 --- a/ddtrace/contrib/internal/dogpile_cache/region.py +++ b/ddtrace/contrib/internal/dogpile_cache/region.py @@ -1,6 +1,6 @@ import dogpile -from ddtrace.constants import SPAN_MEASURED_KEY +from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.ext import SpanTypes from ddtrace.ext import db from ddtrace.internal.constants import COMPONENT @@ -23,7 +23,7 @@ def _wrap_get_create(func, instance, args, kwargs): span_type=SpanTypes.CACHE, ) as span: span.set_tag_str(COMPONENT, "dogpile_cache") - span.set_tag(SPAN_MEASURED_KEY) + span.set_tag(_SPAN_MEASURED_KEY) span.set_tag("key", key) span.set_tag("region", instance.name) span.set_tag("backend", instance.actual_backend.__class__.__name__) @@ -45,7 +45,7 @@ def _wrap_get_create_multi(func, instance, args, kwargs): span_type="cache", ) as span: span.set_tag_str(COMPONENT, "dogpile_cache") - span.set_tag(SPAN_MEASURED_KEY) + span.set_tag(_SPAN_MEASURED_KEY) span.set_tag("keys", keys) span.set_tag("region", instance.name) span.set_tag("backend", instance.actual_backend.__class__.__name__) diff --git a/ddtrace/contrib/internal/dramatiq/patch.py b/ddtrace/contrib/internal/dramatiq/patch.py index 08daad9d93c..a6ecbbfd5d4 100644 --- a/ddtrace/contrib/internal/dramatiq/patch.py +++ b/ddtrace/contrib/internal/dramatiq/patch.py @@ -11,7 +11,7 @@ from ddtrace.contrib import trace_utils from ddtrace.ext import SpanKind from ddtrace.ext import SpanTypes -from ddtrace.settings.config import Config +from ddtrace.settings._config import Config def get_version() -> str: diff --git a/ddtrace/contrib/internal/elasticsearch/patch.py b/ddtrace/contrib/internal/elasticsearch/patch.py index 09eff66dbca..fa182ea3063 100644 --- a/ddtrace/contrib/internal/elasticsearch/patch.py +++ b/ddtrace/contrib/internal/elasticsearch/patch.py @@ -6,8 +6,8 @@ from ddtrace import config from ddtrace._trace import _limits from ddtrace.constants import _ANALYTICS_SAMPLE_RATE_KEY +from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.constants import SPAN_KIND -from ddtrace.constants import SPAN_MEASURED_KEY from ddtrace.contrib.internal.elasticsearch.quantize import quantize from ddtrace.contrib.internal.trace_utils import ext_service from ddtrace.contrib.internal.trace_utils import extract_netloc_and_query_info_from_url @@ -140,7 +140,7 @@ def _perform_request(func, instance, args, kwargs): # set span.kind to the type of request being performed span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) - span.set_tag(SPAN_MEASURED_KEY) + span.set_tag(_SPAN_MEASURED_KEY) method, target = args params = kwargs.get("params") diff --git a/ddtrace/contrib/internal/falcon/middleware.py b/ddtrace/contrib/internal/falcon/middleware.py index b6fe9498b1a..b4ec5434777 100644 --- a/ddtrace/contrib/internal/falcon/middleware.py +++ b/ddtrace/contrib/internal/falcon/middleware.py @@ -2,8 +2,8 @@ from ddtrace import config from ddtrace.constants import _ANALYTICS_SAMPLE_RATE_KEY +from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.constants import SPAN_KIND -from ddtrace.constants import SPAN_MEASURED_KEY from ddtrace.contrib import trace_utils from ddtrace.ext import SpanKind from ddtrace.ext import SpanTypes @@ -39,7 +39,7 @@ def process_request(self, req, resp): # set span.kind to the type of operation being performed span.set_tag_str(SPAN_KIND, SpanKind.SERVER) - span.set_tag(SPAN_MEASURED_KEY) + span.set_tag(_SPAN_MEASURED_KEY) # set analytics sample rate with global config enabled span.set_tag(_ANALYTICS_SAMPLE_RATE_KEY, config.falcon.get_analytics_sample_rate(use_global_config=True)) diff --git a/ddtrace/contrib/internal/flask/patch.py b/ddtrace/contrib/internal/flask/patch.py index 7111310b6ef..9476485b314 100644 --- a/ddtrace/contrib/internal/flask/patch.py +++ b/ddtrace/contrib/internal/flask/patch.py @@ -224,6 +224,10 @@ def patch(): _w("flask.templating", "_render", patched_render) _w("flask", "render_template", _build_render_template_wrapper("render_template")) _w("flask", "render_template_string", _build_render_template_wrapper("render_template_string")) + try: + _w("werkzeug.debug.tbtools", "DebugTraceback.render_debugger_html", patched_render_debugger_html) + except AttributeError: + log.debug("Failed to patch DebugTraceback.render_debugger_html, not supported by this werkzeug version") bp_hooks = [ "after_app_request", @@ -380,12 +384,8 @@ def patched_finalize_request(wrapped, instance, args, kwargs): Wrapper for flask.app.Flask.finalize_request """ rv = wrapped(*args, **kwargs) - response = None - headers = None if getattr(rv, "is_sequence", False): - response = rv.response - headers = rv.headers - core.dispatch("flask.finalize_request.post", (response, headers)) + core.dispatch("flask.finalize_request.post", (rv.response, rv.headers)) return rv @@ -419,6 +419,12 @@ def _wrap(rule, endpoint=None, view_func=None, **kwargs): return _wrap(*args, **kwargs) +def patched_render_debugger_html(wrapped, instance, args, kwargs): + res = wrapped(*args, **kwargs) + core.dispatch("werkzeug.render_debugger_html", (res,)) + return res + + def patched_add_url_rule(wrapped, instance, args, kwargs): """Wrapper for flask.app.Flask.add_url_rule to wrap all views attached to this app""" diff --git a/ddtrace/contrib/internal/flask_cache/tracers.py b/ddtrace/contrib/internal/flask_cache/tracers.py index 2431ab2cbb7..9170bd4bd45 100644 --- a/ddtrace/contrib/internal/flask_cache/tracers.py +++ b/ddtrace/contrib/internal/flask_cache/tracers.py @@ -7,7 +7,7 @@ from ddtrace import config from ddtrace.constants import _ANALYTICS_SAMPLE_RATE_KEY -from ddtrace.constants import SPAN_MEASURED_KEY +from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.ext import SpanTypes from ddtrace.ext import db from ddtrace.internal.constants import COMPONENT @@ -88,7 +88,7 @@ def __trace(self, cmd): s.set_tag_str(COMPONENT, config.flask_cache.integration_name) - s.set_tag(SPAN_MEASURED_KEY) + s.set_tag(_SPAN_MEASURED_KEY) # set span tags s.set_tag_str(CACHE_BACKEND, self.config.get("CACHE_TYPE")) s.set_tags(self._datadog_meta) diff --git a/ddtrace/contrib/internal/graphql/patch.py b/ddtrace/contrib/internal/graphql/patch.py index 18916f4222a..589bef80d4b 100644 --- a/ddtrace/contrib/internal/graphql/patch.py +++ b/ddtrace/contrib/internal/graphql/patch.py @@ -24,9 +24,9 @@ from ddtrace import config from ddtrace.constants import _ANALYTICS_SAMPLE_RATE_KEY +from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.constants import ERROR_MSG from ddtrace.constants import ERROR_TYPE -from ddtrace.constants import SPAN_MEASURED_KEY from ddtrace.contrib import trace_utils from ddtrace.ext import SpanTypes from ddtrace.internal.constants import COMPONENT @@ -177,7 +177,7 @@ def _traced_execute(func, args, kwargs): ) as span: span.set_tag_str(COMPONENT, config.graphql.integration_name) - span.set_tag(SPAN_MEASURED_KEY) + span.set_tag(_SPAN_MEASURED_KEY) _set_span_operation_tags(span, document) span.set_tag_str(_GRAPHQL_SOURCE, source_str) @@ -207,7 +207,7 @@ def _traced_query(func, args, kwargs): span.set_tag_str(COMPONENT, config.graphql.integration_name) # mark span as measured and set sample rate - span.set_tag(SPAN_MEASURED_KEY) + span.set_tag(_SPAN_MEASURED_KEY) sample_rate = config.graphql.get_analytics_sample_rate() if sample_rate is not None: span.set_tag(_ANALYTICS_SAMPLE_RATE_KEY, sample_rate) diff --git a/ddtrace/contrib/internal/grpc/aio_client_interceptor.py b/ddtrace/contrib/internal/grpc/aio_client_interceptor.py index 5c03d1b8527..76455b1627f 100644 --- a/ddtrace/contrib/internal/grpc/aio_client_interceptor.py +++ b/ddtrace/contrib/internal/grpc/aio_client_interceptor.py @@ -14,10 +14,10 @@ from ddtrace import Span from ddtrace import config from ddtrace.constants import _ANALYTICS_SAMPLE_RATE_KEY +from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.constants import ERROR_MSG from ddtrace.constants import ERROR_TYPE from ddtrace.constants import SPAN_KIND -from ddtrace.constants import SPAN_MEASURED_KEY from ddtrace.contrib import trace_utils from ddtrace.contrib.internal.grpc import constants from ddtrace.contrib.internal.grpc import utils @@ -151,7 +151,7 @@ def _intercept_client_call(self, method_kind, client_call_details): # set span.kind to the type of operation being performed span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) - span.set_tag(SPAN_MEASURED_KEY) + span.set_tag(_SPAN_MEASURED_KEY) utils.set_grpc_method_meta(span, method_as_str, method_kind) utils.set_grpc_client_meta(span, self._host, self._port) diff --git a/ddtrace/contrib/internal/grpc/aio_server_interceptor.py b/ddtrace/contrib/internal/grpc/aio_server_interceptor.py index d5ec9ed32ab..d2e7efb0e00 100644 --- a/ddtrace/contrib/internal/grpc/aio_server_interceptor.py +++ b/ddtrace/contrib/internal/grpc/aio_server_interceptor.py @@ -16,10 +16,10 @@ from ddtrace import Span # noqa:F401 from ddtrace import config from ddtrace.constants import _ANALYTICS_SAMPLE_RATE_KEY +from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.constants import ERROR_MSG from ddtrace.constants import ERROR_TYPE from ddtrace.constants import SPAN_KIND -from ddtrace.constants import SPAN_MEASURED_KEY from ddtrace.contrib import trace_utils from ddtrace.contrib.internal.grpc import constants from ddtrace.contrib.internal.grpc.utils import set_grpc_method_meta @@ -191,7 +191,7 @@ def _create_span(pin, method, invocation_metadata, method_kind): # set span.kind to the type of operation being performed span.set_tag_str(SPAN_KIND, SpanKind.SERVER) - span.set_tag(SPAN_MEASURED_KEY) + span.set_tag(_SPAN_MEASURED_KEY) set_grpc_method_meta(span, method, method_kind) span.set_tag_str(constants.GRPC_SPAN_KIND_KEY, constants.GRPC_SPAN_KIND_VALUE_SERVER) diff --git a/ddtrace/contrib/internal/grpc/client_interceptor.py b/ddtrace/contrib/internal/grpc/client_interceptor.py index b389bbe71c8..b2f259c2f3e 100644 --- a/ddtrace/contrib/internal/grpc/client_interceptor.py +++ b/ddtrace/contrib/internal/grpc/client_interceptor.py @@ -5,11 +5,11 @@ from ddtrace import config from ddtrace.constants import _ANALYTICS_SAMPLE_RATE_KEY +from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.constants import ERROR_MSG from ddtrace.constants import ERROR_STACK from ddtrace.constants import ERROR_TYPE from ddtrace.constants import SPAN_KIND -from ddtrace.constants import SPAN_MEASURED_KEY from ddtrace.contrib import trace_utils from ddtrace.contrib.internal.grpc import constants from ddtrace.contrib.internal.grpc import utils @@ -196,7 +196,7 @@ def _intercept_client_call(self, method_kind, client_call_details): # set span.kind to the type of operation being performed span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) - span.set_tag(SPAN_MEASURED_KEY) + span.set_tag(_SPAN_MEASURED_KEY) utils.set_grpc_method_meta(span, client_call_details.method, method_kind) utils.set_grpc_client_meta(span, self._host, self._port) diff --git a/ddtrace/contrib/internal/grpc/server_interceptor.py b/ddtrace/contrib/internal/grpc/server_interceptor.py index e221f170e80..482ca8b23c5 100644 --- a/ddtrace/contrib/internal/grpc/server_interceptor.py +++ b/ddtrace/contrib/internal/grpc/server_interceptor.py @@ -3,10 +3,10 @@ from ddtrace import config from ddtrace.constants import _ANALYTICS_SAMPLE_RATE_KEY +from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.constants import ERROR_MSG from ddtrace.constants import ERROR_TYPE from ddtrace.constants import SPAN_KIND -from ddtrace.constants import SPAN_MEASURED_KEY from ddtrace.contrib import trace_utils from ddtrace.contrib.internal.grpc import constants from ddtrace.contrib.internal.grpc.utils import set_grpc_method_meta @@ -100,7 +100,7 @@ def _fn(self, method_kind, behavior, args, kwargs): # set span.kind tag equal to type of span span.set_tag_str(SPAN_KIND, SpanKind.SERVER) - span.set_tag(SPAN_MEASURED_KEY) + span.set_tag(_SPAN_MEASURED_KEY) set_grpc_method_meta(span, self._handler_call_details.method, method_kind) span.set_tag_str(constants.GRPC_SPAN_KIND_KEY, constants.GRPC_SPAN_KIND_VALUE_SERVER) diff --git a/ddtrace/contrib/internal/httplib/patch.py b/ddtrace/contrib/internal/httplib/patch.py index a1e367af3a1..79a8ea2816f 100644 --- a/ddtrace/contrib/internal/httplib/patch.py +++ b/ddtrace/contrib/internal/httplib/patch.py @@ -91,7 +91,7 @@ def _wrap_request(func, instance, args, kwargs): if should_skip_request(pin, instance): return func_to_call(*args, **kwargs) - cfg = config.get_from(instance) + cfg = config._get_from(instance) try: # Create a new span and attach to this instance (so we can retrieve/update/close later on the response) diff --git a/ddtrace/contrib/internal/httpx/patch.py b/ddtrace/contrib/internal/httpx/patch.py index a3a677c14ac..1cf4e6f00e1 100644 --- a/ddtrace/contrib/internal/httpx/patch.py +++ b/ddtrace/contrib/internal/httpx/patch.py @@ -6,8 +6,8 @@ from ddtrace import config from ddtrace.constants import _ANALYTICS_SAMPLE_RATE_KEY +from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.constants import SPAN_KIND -from ddtrace.constants import SPAN_MEASURED_KEY from ddtrace.contrib.internal.trace_utils import distributed_tracing_enabled from ddtrace.contrib.internal.trace_utils import ext_service from ddtrace.contrib.internal.trace_utils import set_http_meta @@ -86,7 +86,7 @@ def _get_service_name(pin, request): def _init_span(span, request): # type: (Span, httpx.Request) -> None - span.set_tag(SPAN_MEASURED_KEY) + span.set_tag(_SPAN_MEASURED_KEY) if distributed_tracing_enabled(config.httpx): HTTPPropagator.inject(span.context, request.headers) diff --git a/ddtrace/contrib/internal/jinja2/patch.py b/ddtrace/contrib/internal/jinja2/patch.py index 67c704c415d..6d044eb8ab8 100644 --- a/ddtrace/contrib/internal/jinja2/patch.py +++ b/ddtrace/contrib/internal/jinja2/patch.py @@ -4,7 +4,7 @@ from wrapt import wrap_function_wrapper as _w from ddtrace import config -from ddtrace.constants import SPAN_MEASURED_KEY +from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.contrib.internal.trace_utils import unwrap as _u from ddtrace.ext import SpanTypes from ddtrace.internal.constants import COMPONENT @@ -64,7 +64,7 @@ def _wrap_render(wrapped, instance, args, kwargs): with pin.tracer.trace("jinja2.render", pin.service, span_type=SpanTypes.TEMPLATE) as span: span.set_tag_str(COMPONENT, config.jinja2.integration_name) - span.set_tag(SPAN_MEASURED_KEY) + span.set_tag(_SPAN_MEASURED_KEY) try: return wrapped(*args, **kwargs) finally: diff --git a/ddtrace/contrib/internal/kafka/patch.py b/ddtrace/contrib/internal/kafka/patch.py index 6f69cda3239..6818afbede0 100644 --- a/ddtrace/contrib/internal/kafka/patch.py +++ b/ddtrace/contrib/internal/kafka/patch.py @@ -6,8 +6,8 @@ from ddtrace import config from ddtrace.constants import _ANALYTICS_SAMPLE_RATE_KEY +from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.constants import SPAN_KIND -from ddtrace.constants import SPAN_MEASURED_KEY from ddtrace.contrib import trace_utils from ddtrace.ext import SpanKind from ddtrace.ext import SpanTypes @@ -192,7 +192,7 @@ def traced_produce(func, instance, args, kwargs): span.set_tag(kafkax.PARTITION, partition) span.set_tag_str(kafkax.TOMBSTONE, str(value is None)) - span.set_tag(SPAN_MEASURED_KEY) + span.set_tag(_SPAN_MEASURED_KEY) if instance._dd_bootstrap_servers is not None: span.set_tag_str(kafkax.HOST_LIST, instance._dd_bootstrap_servers) rate = config.kafka.get_analytics_sample_rate() @@ -289,7 +289,7 @@ def _instrument_message(messages, pin, start_ns, instance, err): pass span.set_tag_str(kafkax.TOMBSTONE, str(is_tombstone)) span.set_tag(kafkax.MESSAGE_OFFSET, message_offset) - span.set_tag(SPAN_MEASURED_KEY) + span.set_tag(_SPAN_MEASURED_KEY) rate = config.kafka.get_analytics_sample_rate() if rate is not None: span.set_tag(_ANALYTICS_SAMPLE_RATE_KEY, rate) diff --git a/ddtrace/contrib/internal/kombu/patch.py b/ddtrace/contrib/internal/kombu/patch.py index fa63e5c4f86..9cbe8ac94a0 100644 --- a/ddtrace/contrib/internal/kombu/patch.py +++ b/ddtrace/contrib/internal/kombu/patch.py @@ -6,8 +6,8 @@ from ddtrace import config from ddtrace.constants import _ANALYTICS_SAMPLE_RATE_KEY +from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.constants import SPAN_KIND -from ddtrace.constants import SPAN_MEASURED_KEY # project from ddtrace.contrib import trace_utils @@ -117,7 +117,7 @@ def traced_receive(func, instance, args, kwargs): # set span.kind to the type of operation being performed s.set_tag_str(SPAN_KIND, SpanKind.CONSUMER) - s.set_tag(SPAN_MEASURED_KEY) + s.set_tag(_SPAN_MEASURED_KEY) # run the command exchange = message.delivery_info["exchange"] s.resource = exchange @@ -147,7 +147,7 @@ def traced_publish(func, instance, args, kwargs): # set span.kind to the type of operation being performed s.set_tag_str(SPAN_KIND, SpanKind.PRODUCER) - s.set_tag(SPAN_MEASURED_KEY) + s.set_tag(_SPAN_MEASURED_KEY) exchange_name = get_exchange_from_args(args) s.resource = exchange_name s.set_tag_str(kombux.EXCHANGE, exchange_name) diff --git a/ddtrace/contrib/internal/mako/patch.py b/ddtrace/contrib/internal/mako/patch.py index ca569477773..1f65ce7e34a 100644 --- a/ddtrace/contrib/internal/mako/patch.py +++ b/ddtrace/contrib/internal/mako/patch.py @@ -3,7 +3,7 @@ from mako.template import Template from ddtrace import config -from ddtrace.constants import SPAN_MEASURED_KEY +from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.contrib.internal.trace_utils import int_service from ddtrace.contrib.internal.trace_utils import unwrap as _u from ddtrace.contrib.internal.trace_utils import wrap as _w @@ -63,7 +63,7 @@ def _wrap_render(wrapped, instance, args, kwargs): ) as span: span.set_tag_str(COMPONENT, "mako") - span.set_tag(SPAN_MEASURED_KEY) + span.set_tag(_SPAN_MEASURED_KEY) try: return wrapped(*args, **kwargs) finally: diff --git a/ddtrace/contrib/internal/molten/patch.py b/ddtrace/contrib/internal/molten/patch.py index 6fc237a5b58..38fa949243c 100644 --- a/ddtrace/contrib/internal/molten/patch.py +++ b/ddtrace/contrib/internal/molten/patch.py @@ -6,8 +6,8 @@ from ddtrace import config from ddtrace.constants import _ANALYTICS_SAMPLE_RATE_KEY +from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.constants import SPAN_KIND -from ddtrace.constants import SPAN_MEASURED_KEY from ddtrace.contrib import trace_utils from ddtrace.contrib.internal.trace_utils import unwrap as _u from ddtrace.ext import SpanKind @@ -105,7 +105,7 @@ def patch_app_call(wrapped, instance, args, kwargs): # set span.kind tag equal to type of operation being performed span.set_tag_str(SPAN_KIND, SpanKind.SERVER) - span.set_tag(SPAN_MEASURED_KEY) + span.set_tag(_SPAN_MEASURED_KEY) # set analytics sample rate with global config enabled span.set_tag(_ANALYTICS_SAMPLE_RATE_KEY, config.molten.get_analytics_sample_rate(use_global_config=True)) diff --git a/ddtrace/contrib/internal/mysqldb/patch.py b/ddtrace/contrib/internal/mysqldb/patch.py index e4d124ee1f6..cde0f58629f 100644 --- a/ddtrace/contrib/internal/mysqldb/patch.py +++ b/ddtrace/contrib/internal/mysqldb/patch.py @@ -6,8 +6,8 @@ from ddtrace import config from ddtrace.appsec._iast._metrics import _set_metric_iast_instrumented_sink from ddtrace.appsec._iast.constants import VULN_SQL_INJECTION +from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.constants import SPAN_KIND -from ddtrace.constants import SPAN_MEASURED_KEY from ddtrace.contrib.dbapi import TracedConnection from ddtrace.contrib.internal.trace_utils import _convert_to_string from ddtrace.contrib.internal.trace_utils import ext_service @@ -101,7 +101,7 @@ def _connect(func, instance, args, kwargs): # set span.kind to the type of operation being performed span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) - span.set_tag(SPAN_MEASURED_KEY) + span.set_tag(_SPAN_MEASURED_KEY) conn = func(*args, **kwargs) return patch_conn(conn, *args, **kwargs) diff --git a/ddtrace/contrib/internal/openai/_endpoint_hooks.py b/ddtrace/contrib/internal/openai/_endpoint_hooks.py index 979e1774a8a..00ee44aef4b 100644 --- a/ddtrace/contrib/internal/openai/_endpoint_hooks.py +++ b/ddtrace/contrib/internal/openai/_endpoint_hooks.py @@ -37,7 +37,7 @@ class _EndpointHook: OPERATION_ID = "" # Each endpoint hook must provide an operationID as specified in the OpenAI API specs: # https://raw.githubusercontent.com/openai/openai-openapi/master/openapi.yaml - def _record_request(self, pin, integration, span, args, kwargs): + def _record_request(self, pin, integration, instance, span, args, kwargs): """ Set base-level openai tags, as well as request params from args and kwargs. All inherited EndpointHook classes should include a super call to this method before performing @@ -45,12 +45,12 @@ def _record_request(self, pin, integration, span, args, kwargs): """ endpoint = self.ENDPOINT_NAME if endpoint is None: - endpoint = "%s" % args[0].OBJECT_NAME + endpoint = "%s" % getattr(instance, "OBJECT_NAME", "") span.set_tag_str("openai.request.endpoint", "/%s/%s" % (API_VERSION, endpoint)) span.set_tag_str("openai.request.method", self.HTTP_METHOD_TYPE) if self._request_arg_params and len(self._request_arg_params) > 1: - for idx, arg in enumerate(self._request_arg_params, 1): + for idx, arg in enumerate(self._request_arg_params): if idx >= len(args): break if arg is None or args[idx] is None: @@ -74,8 +74,8 @@ def _record_request(self, pin, integration, span, args, kwargs): else: span.set_tag_str("openai.request.%s" % kw_attr, str(kwargs[kw_attr])) - def handle_request(self, pin, integration, span, args, kwargs): - self._record_request(pin, integration, span, args, kwargs) + def handle_request(self, pin, integration, instance, span, args, kwargs): + self._record_request(pin, integration, instance, span, args, kwargs) resp, error = yield if hasattr(resp, "parse"): # Users can request the raw response, in which case we need to process on the parsed response @@ -186,8 +186,8 @@ class _CompletionHook(_BaseCompletionHook): HTTP_METHOD_TYPE = "POST" OPERATION_ID = "createCompletion" - def _record_request(self, pin, integration, span, args, kwargs): - super()._record_request(pin, integration, span, args, kwargs) + def _record_request(self, pin, integration, instance, span, args, kwargs): + super()._record_request(pin, integration, instance, span, args, kwargs) if integration.is_pc_sampled_span(span): prompt = kwargs.get("prompt", "") if isinstance(prompt, str): @@ -241,8 +241,8 @@ class _ChatCompletionHook(_BaseCompletionHook): HTTP_METHOD_TYPE = "POST" OPERATION_ID = "createChatCompletion" - def _record_request(self, pin, integration, span, args, kwargs): - super()._record_request(pin, integration, span, args, kwargs) + def _record_request(self, pin, integration, instance, span, args, kwargs): + super()._record_request(pin, integration, instance, span, args, kwargs) for idx, m in enumerate(kwargs.get("messages", [])): role = getattr(m, "role", "") name = getattr(m, "name", "") @@ -305,12 +305,12 @@ class _EmbeddingHook(_EndpointHook): HTTP_METHOD_TYPE = "POST" OPERATION_ID = "createEmbedding" - def _record_request(self, pin, integration, span, args, kwargs): + def _record_request(self, pin, integration, instance, span, args, kwargs): """ Embedding endpoint allows multiple inputs, each of which we specify a request tag for, so have to manually set them in _pre_response(). """ - super()._record_request(pin, integration, span, args, kwargs) + super()._record_request(pin, integration, instance, span, args, kwargs) embedding_input = kwargs.get("input", "") if integration.is_pc_sampled_span(span): if isinstance(embedding_input, str) or isinstance(embedding_input[0], int): @@ -340,8 +340,8 @@ class _ListHook(_EndpointHook): HTTP_METHOD_TYPE = "GET" OPERATION_ID = "list" - def _record_request(self, pin, integration, span, args, kwargs): - super()._record_request(pin, integration, span, args, kwargs) + def _record_request(self, pin, integration, instance, span, args, kwargs): + super()._record_request(pin, integration, instance, span, args, kwargs) endpoint = span.get_tag("openai.request.endpoint") if endpoint.endswith("/models"): span.resource = "listModels" @@ -399,15 +399,21 @@ class _RetrieveHook(_EndpointHook): HTTP_METHOD_TYPE = "GET" OPERATION_ID = "retrieve" - def _record_request(self, pin, integration, span, args, kwargs): - super()._record_request(pin, integration, span, args, kwargs) + def _record_request(self, pin, integration, instance, span, args, kwargs): + super()._record_request(pin, integration, instance, span, args, kwargs) endpoint = span.get_tag("openai.request.endpoint") if endpoint.endswith("/models"): span.resource = "retrieveModel" - span.set_tag_str("openai.request.model", args[1] if len(args) >= 2 else kwargs.get("model", "")) + if len(args) >= 1: + span.set_tag_str("openai.request.model", args[0]) + else: + span.set_tag_str("openai.request.model", kwargs.get("model", kwargs.get("id", ""))) elif endpoint.endswith("/files"): span.resource = "retrieveFile" - span.set_tag_str("openai.request.file_id", args[1] if len(args) >= 2 else kwargs.get("file_id", "")) + if len(args) >= 1: + span.set_tag_str("openai.request.file_id", args[0]) + else: + span.set_tag_str("openai.request.file_id", kwargs.get("file_id", kwargs.get("id", ""))) span.set_tag_str("openai.request.endpoint", "%s/*" % endpoint) def _record_response(self, pin, integration, span, args, kwargs, resp, error): @@ -434,10 +440,6 @@ class _ModelRetrieveHook(_RetrieveHook): ENDPOINT_NAME = "models" OPERATION_ID = "retrieveModel" - def _record_request(self, pin, integration, span, args, kwargs): - super()._record_request(pin, integration, span, args, kwargs) - span.set_tag_str("openai.request.model", args[1] if len(args) >= 2 else kwargs.get("model", "")) - class _FileRetrieveHook(_RetrieveHook): """ @@ -447,10 +449,6 @@ class _FileRetrieveHook(_RetrieveHook): ENDPOINT_NAME = "files" OPERATION_ID = "retrieveFile" - def _record_request(self, pin, integration, span, args, kwargs): - super()._record_request(pin, integration, span, args, kwargs) - span.set_tag_str("openai.request.file_id", args[1] if len(args) >= 2 else kwargs.get("file_id", "")) - class _DeleteHook(_EndpointHook): """Hook for openai.DeletableAPIResource, which is used by File.delete, and Model.delete.""" @@ -461,15 +459,21 @@ class _DeleteHook(_EndpointHook): HTTP_METHOD_TYPE = "DELETE" OPERATION_ID = "delete" - def _record_request(self, pin, integration, span, args, kwargs): - super()._record_request(pin, integration, span, args, kwargs) + def _record_request(self, pin, integration, instance, span, args, kwargs): + super()._record_request(pin, integration, instance, span, args, kwargs) endpoint = span.get_tag("openai.request.endpoint") if endpoint.endswith("/models"): span.resource = "deleteModel" - span.set_tag_str("openai.request.model", args[1] if len(args) >= 2 else kwargs.get("model", "")) + if len(args) >= 1: + span.set_tag_str("openai.request.model", args[0]) + else: + span.set_tag_str("openai.request.model", kwargs.get("model", kwargs.get("sid", ""))) elif endpoint.endswith("/files"): span.resource = "deleteFile" - span.set_tag_str("openai.request.file_id", args[1] if len(args) >= 2 else kwargs.get("file_id", "")) + if len(args) >= 1: + span.set_tag_str("openai.request.file_id", args[0]) + else: + span.set_tag_str("openai.request.file_id", kwargs.get("file_id", kwargs.get("sid", ""))) span.set_tag_str("openai.request.endpoint", "%s/*" % endpoint) def _record_response(self, pin, integration, span, args, kwargs, resp, error): @@ -508,8 +512,8 @@ class _ImageHook(_EndpointHook): ENDPOINT_NAME = "images" HTTP_METHOD_TYPE = "POST" - def _record_request(self, pin, integration, span, args, kwargs): - super()._record_request(pin, integration, span, args, kwargs) + def _record_request(self, pin, integration, instance, span, args, kwargs): + super()._record_request(pin, integration, instance, span, args, kwargs) span.set_tag_str("openai.request.model", "dall-e") def _record_response(self, pin, integration, span, args, kwargs, resp, error): @@ -526,10 +530,10 @@ def _record_response(self, pin, integration, span, args, kwargs, resp, error): if "prompt" in self._request_kwarg_params: attrs_dict.update({"prompt": kwargs.get("prompt", "")}) if "image" in self._request_kwarg_params: - image = args[1] if len(args) >= 2 else kwargs.get("image", "") + image = args[0] if len(args) >= 1 else kwargs.get("image", "") attrs_dict.update({"image": image.name.split("/")[-1]}) if "mask" in self._request_kwarg_params: - mask = args[2] if len(args) >= 3 else kwargs.get("mask", "") + mask = args[1] if len(args) >= 2 else kwargs.get("mask", "") attrs_dict.update({"mask": mask.name.split("/")[-1]}) integration.log( span, "info" if error is None else "error", "sampled %s" % self.OPERATION_ID, attrs=attrs_dict @@ -560,12 +564,12 @@ class _ImageEditHook(_ImageHook): ENDPOINT_NAME = "images/edits" OPERATION_ID = "createImageEdit" - def _record_request(self, pin, integration, span, args, kwargs): - super()._record_request(pin, integration, span, args, kwargs) + def _record_request(self, pin, integration, instance, span, args, kwargs): + super()._record_request(pin, integration, instance, span, args, kwargs) if not integration.is_pc_sampled_span: return - image = args[1] if len(args) >= 2 else kwargs.get("image", "") - mask = args[2] if len(args) >= 3 else kwargs.get("mask", "") + image = args[0] if len(args) >= 1 else kwargs.get("image", "") + mask = args[1] if len(args) >= 2 else kwargs.get("mask", "") if image: if hasattr(image, "name"): span.set_tag_str("openai.request.image", integration.trunc(image.name.split("/")[-1])) @@ -584,11 +588,11 @@ class _ImageVariationHook(_ImageHook): ENDPOINT_NAME = "images/variations" OPERATION_ID = "createImageVariation" - def _record_request(self, pin, integration, span, args, kwargs): - super()._record_request(pin, integration, span, args, kwargs) + def _record_request(self, pin, integration, instance, span, args, kwargs): + super()._record_request(pin, integration, instance, span, args, kwargs) if not integration.is_pc_sampled_span: return - image = args[1] if len(args) >= 2 else kwargs.get("image", "") + image = args[0] if len(args) >= 1 else kwargs.get("image", "") if image: if hasattr(image, "name"): span.set_tag_str("openai.request.image", integration.trunc(image.name.split("/")[-1])) @@ -602,11 +606,11 @@ class _BaseAudioHook(_EndpointHook): ENDPOINT_NAME = "audio" HTTP_METHOD_TYPE = "POST" - def _record_request(self, pin, integration, span, args, kwargs): - super()._record_request(pin, integration, span, args, kwargs) + def _record_request(self, pin, integration, instance, span, args, kwargs): + super()._record_request(pin, integration, instance, span, args, kwargs) if not integration.is_pc_sampled_span: return - audio_file = args[2] if len(args) >= 3 else kwargs.get("file", "") + audio_file = args[1] if len(args) >= 2 else kwargs.get("file", "") if audio_file and hasattr(audio_file, "name"): span.set_tag_str("openai.request.filename", integration.trunc(audio_file.name.split("/")[-1])) else: @@ -626,7 +630,7 @@ def _record_response(self, pin, integration, span, args, kwargs, resp, error): if integration.is_pc_sampled_span(span): span.set_tag_str("openai.response.text", integration.trunc(text)) if integration.is_pc_sampled_log(span): - file_input = args[2] if len(args) >= 3 else kwargs.get("file", "") + file_input = args[1] if len(args) >= 2 else kwargs.get("file", "") integration.log( span, "info" if error is None else "error", @@ -685,8 +689,8 @@ class _ModerationHook(_EndpointHook): HTTP_METHOD_TYPE = "POST" OPERATION_ID = "createModeration" - def _record_request(self, pin, integration, span, args, kwargs): - super()._record_request(pin, integration, span, args, kwargs) + def _record_request(self, pin, integration, instance, span, args, kwargs): + super()._record_request(pin, integration, instance, span, args, kwargs) def _record_response(self, pin, integration, span, args, kwargs, resp, error): resp = super()._record_response(pin, integration, span, args, kwargs, resp, error) @@ -718,14 +722,14 @@ class _FileCreateHook(_BaseFileHook): "organization", "user_provided_filename", ) - _request_kwarg_params = ("purpose",) + _request_kwarg_params = ("purpose", "user_provided_filename") _response_attrs = ("id", "bytes", "created_at", "filename", "purpose", "status", "status_details") HTTP_METHOD_TYPE = "POST" OPERATION_ID = "createFile" - def _record_request(self, pin, integration, span, args, kwargs): - super()._record_request(pin, integration, span, args, kwargs) - fp = args[1] if len(args) >= 2 else kwargs.get("file", "") + def _record_request(self, pin, integration, instance, span, args, kwargs): + super()._record_request(pin, integration, instance, span, args, kwargs) + fp = args[0] if len(args) >= 1 else kwargs.get("file", "") if fp and hasattr(fp, "name"): span.set_tag_str("openai.request.filename", fp.name.split("/")[-1]) else: @@ -742,9 +746,9 @@ class _FileDownloadHook(_BaseFileHook): OPERATION_ID = "downloadFile" ENDPOINT_NAME = "files/*/content" - def _record_request(self, pin, integration, span, args, kwargs): - super()._record_request(pin, integration, span, args, kwargs) - span.set_tag_str("openai.request.file_id", args[1] if len(args) >= 2 else kwargs.get("file_id", "")) + def _record_request(self, pin, integration, instance, span, args, kwargs): + super()._record_request(pin, integration, instance, span, args, kwargs) + span.set_tag_str("openai.request.file_id", args[0] if len(args) >= 1 else kwargs.get("file_id", "")) def _record_response(self, pin, integration, span, args, kwargs, resp, error): resp = super()._record_response(pin, integration, span, args, kwargs, resp, error) diff --git a/ddtrace/contrib/internal/openai/patch.py b/ddtrace/contrib/internal/openai/patch.py index d87b06b3aba..39f79d13795 100644 --- a/ddtrace/contrib/internal/openai/patch.py +++ b/ddtrace/contrib/internal/openai/patch.py @@ -6,12 +6,14 @@ from ddtrace import config from ddtrace.contrib.internal.openai import _endpoint_hooks from ddtrace.contrib.internal.openai.utils import _format_openai_api_key +from ddtrace.contrib.trace_utils import unwrap +from ddtrace.contrib.trace_utils import with_traced_module +from ddtrace.contrib.trace_utils import wrap from ddtrace.internal.logger import get_logger from ddtrace.internal.schema import schematize_service_name from ddtrace.internal.utils.formats import asbool from ddtrace.internal.utils.formats import deep_getattr from ddtrace.internal.utils.version import parse_version -from ddtrace.internal.wrapping import wrap from ddtrace.llmobs._integrations import OpenAIIntegration from ddtrace.trace import Pin @@ -80,8 +82,9 @@ def get_version(): else: _RESOURCES = { "model.Model": { - "list": _endpoint_hooks._ListHook, - "retrieve": _endpoint_hooks._RetrieveHook, + "list": _endpoint_hooks._ModelListHook, + "retrieve": _endpoint_hooks._ModelRetrieveHook, + "delete": _endpoint_hooks._ModelDeleteHook, }, "completion.Completion": { "create": _endpoint_hooks._CompletionHook, @@ -105,19 +108,15 @@ def get_version(): "create": _endpoint_hooks._ModerationHook, }, "file.File": { - # File.list() and File.retrieve() share the same underlying method as Model.list() and Model.retrieve() - # which means they are already wrapped + "list": _endpoint_hooks._FileListHook, + "retrieve": _endpoint_hooks._FileRetrieveHook, "create": _endpoint_hooks._FileCreateHook, - "delete": _endpoint_hooks._DeleteHook, + "delete": _endpoint_hooks._FileDeleteHook, "download": _endpoint_hooks._FileDownloadHook, }, } -def _wrap_classmethod(obj, wrapper): - wrap(obj.__func__, wrapper) - - def patch(): # Avoid importing openai at the module level, eventually will be an import hook import openai @@ -127,72 +126,106 @@ def patch(): Pin().onto(openai) integration = OpenAIIntegration(integration_config=config.openai, openai=openai) + openai._datadog_integration = integration if OPENAI_VERSION >= (1, 0, 0): if OPENAI_VERSION >= (1, 8, 0): - wrap(openai._base_client.SyncAPIClient._process_response, _patched_convert(openai, integration)) - wrap(openai._base_client.AsyncAPIClient._process_response, _patched_convert(openai, integration)) + wrap(openai, "_base_client.SyncAPIClient._process_response", patched_convert(openai)) + wrap(openai, "_base_client.AsyncAPIClient._process_response", patched_convert(openai)) else: - wrap(openai._base_client.BaseClient._process_response, _patched_convert(openai, integration)) - wrap(openai.OpenAI.__init__, _patched_client_init(openai, integration)) - wrap(openai.AsyncOpenAI.__init__, _patched_client_init(openai, integration)) - wrap(openai.AzureOpenAI.__init__, _patched_client_init(openai, integration)) - wrap(openai.AsyncAzureOpenAI.__init__, _patched_client_init(openai, integration)) + wrap(openai, "_base_client.BaseClient._process_response", patched_convert(openai)) + wrap(openai, "OpenAI.__init__", patched_client_init(openai)) + wrap(openai, "AsyncOpenAI.__init__", patched_client_init(openai)) + wrap(openai, "AzureOpenAI.__init__", patched_client_init(openai)) + wrap(openai, "AsyncAzureOpenAI.__init__", patched_client_init(openai)) for resource, method_hook_dict in _RESOURCES.items(): if deep_getattr(openai.resources, resource) is None: continue for method_name, endpoint_hook in method_hook_dict.items(): - sync_method = deep_getattr(openai.resources, "%s.%s" % (resource, method_name)) - async_method = deep_getattr( - openai.resources, "%s.%s" % (".Async".join(resource.split(".")), method_name) - ) - wrap(sync_method, _patched_endpoint(openai, integration, endpoint_hook)) - wrap(async_method, _patched_endpoint_async(openai, integration, endpoint_hook)) + sync_method = "resources.{}.{}".format(resource, method_name) + async_method = "resources.{}.{}".format(".Async".join(resource.split(".")), method_name) + wrap(openai, sync_method, _patched_endpoint(openai, endpoint_hook)) + wrap(openai, async_method, _patched_endpoint_async(openai, endpoint_hook)) else: import openai.api_requestor - wrap(openai.api_requestor._make_session, _patched_make_session) - wrap(openai.util.convert_to_openai_object, _patched_convert(openai, integration)) + wrap(openai, "api_requestor._make_session", _patched_make_session) + wrap(openai, "util.convert_to_openai_object", patched_convert(openai)) for resource, method_hook_dict in _RESOURCES.items(): if deep_getattr(openai.api_resources, resource) is None: continue for method_name, endpoint_hook in method_hook_dict.items(): - sync_method = deep_getattr(openai.api_resources, "%s.%s" % (resource, method_name)) - async_method = deep_getattr(openai.api_resources, "%s.a%s" % (resource, method_name)) - _wrap_classmethod(sync_method, _patched_endpoint(openai, integration, endpoint_hook)) - _wrap_classmethod(async_method, _patched_endpoint_async(openai, integration, endpoint_hook)) + sync_method = "api_resources.{}.{}".format(resource, method_name) + async_method = "api_resources.{}.a{}".format(resource, method_name) + wrap(openai, sync_method, _patched_endpoint(openai, endpoint_hook)) + wrap(openai, async_method, _patched_endpoint_async(openai, endpoint_hook)) openai.__datadog_patch = True def unpatch(): - # FIXME: add unpatching. The current wrapping.unwrap method requires - # the wrapper function to be provided which we don't keep a reference to. - pass + import openai + if not getattr(openai, "__datadog_patch", False): + return -def _patched_client_init(openai, integration): - """ - Patch for `openai.OpenAI/AsyncOpenAI` client init methods to add the client object to the OpenAIIntegration object. - """ + openai.__datadog_patch = False - def patched_client_init(func, args, kwargs): - func(*args, **kwargs) - client = args[0] - integration._client = client - api_key = kwargs.get("api_key") - if api_key is None: - api_key = client.api_key - if api_key is not None: - integration.user_api_key = api_key - return + if OPENAI_VERSION >= (1, 0, 0): + if OPENAI_VERSION >= (1, 8, 0): + unwrap(openai._base_client.SyncAPIClient, "_process_response") + unwrap(openai._base_client.AsyncAPIClient, "_process_response") + else: + unwrap(openai._base_client.BaseClient, "_process_response") + unwrap(openai.OpenAI, "__init__") + unwrap(openai.AsyncOpenAI, "__init__") + unwrap(openai.AzureOpenAI, "__init__") + unwrap(openai.AsyncAzureOpenAI, "__init__") + + for resource, method_hook_dict in _RESOURCES.items(): + if deep_getattr(openai.resources, resource) is None: + continue + for method_name, _ in method_hook_dict.items(): + sync_resource = deep_getattr(openai.resources, resource) + async_resource = deep_getattr(openai.resources, ".Async".join(resource.split("."))) + unwrap(sync_resource, method_name) + unwrap(async_resource, method_name) + else: + import openai.api_requestor + + unwrap(openai.api_requestor, "_make_session") + unwrap(openai.util, "convert_to_openai_object") - return patched_client_init + for resource, method_hook_dict in _RESOURCES.items(): + if deep_getattr(openai.api_resources, resource) is None: + continue + for method_name, _ in method_hook_dict.items(): + resource_obj = deep_getattr(openai.api_resources, resource) + unwrap(resource_obj, method_name) + unwrap(resource_obj, "a{}".format(method_name)) + delattr(openai, "_datadog_integration") -def _patched_make_session(func, args, kwargs): + +@with_traced_module +def patched_client_init(openai, pin, func, instance, args, kwargs): + """ + Patch for `openai.OpenAI/AsyncOpenAI` client init methods to add the client object to the OpenAIIntegration object. + """ + func(*args, **kwargs) + integration = openai._datadog_integration + integration._client = instance + api_key = kwargs.get("api_key") + if api_key is None: + api_key = instance.api_key + if api_key is not None: + integration.user_api_key = api_key + return + + +def _patched_make_session(func, instance, args, kwargs): """Patch for `openai.api_requestor._make_session` which sets the service name on the requests session so that spans from the requests integration will use the service name openai. This is done so that the service break down will include OpenAI time spent querying the OpenAI backend. @@ -205,7 +238,7 @@ def _patched_make_session(func, args, kwargs): return session -def _traced_endpoint(endpoint_hook, integration, pin, args, kwargs): +def _traced_endpoint(endpoint_hook, integration, instance, pin, args, kwargs): span = integration.trace(pin, endpoint_hook.OPERATION_ID) openai_api_key = _format_openai_api_key(kwargs.get("api_key")) err = None @@ -214,7 +247,7 @@ def _traced_endpoint(endpoint_hook, integration, pin, args, kwargs): span.set_tag_str("openai.user.api_key", openai_api_key) try: # Start the hook - hook = endpoint_hook().handle_request(pin, integration, span, args, kwargs) + hook = endpoint_hook().handle_request(pin, integration, instance, span, args, kwargs) hook.send(None) resp, err = yield @@ -238,19 +271,11 @@ def _traced_endpoint(endpoint_hook, integration, pin, args, kwargs): integration.metric(span, "dist", "request.duration", span.duration_ns) -def _patched_endpoint(openai, integration, patch_hook): - def patched_endpoint(func, args, kwargs): - # FIXME: this is a temporary workaround for the fact that our bytecode wrapping seems to modify - # a function keyword argument into a cell when it shouldn't. This is only an issue on - # Python 3.11+. - if sys.version_info >= (3, 11) and kwargs.get("encoding_format", None): - kwargs["encoding_format"] = kwargs["encoding_format"].cell_contents - - pin = Pin._find(openai, args[0]) - if not pin or not pin.enabled(): - return func(*args, **kwargs) - - g = _traced_endpoint(patch_hook, integration, pin, args, kwargs) +def _patched_endpoint(openai, patch_hook): + @with_traced_module + def patched_endpoint(openai, pin, func, instance, args, kwargs): + integration = openai._datadog_integration + g = _traced_endpoint(patch_hook, integration, instance, pin, args, kwargs) g.send(None) resp, err = None, None try: @@ -267,22 +292,15 @@ def patched_endpoint(func, args, kwargs): # This return takes priority over `return resp` return e.value # noqa: B012 - return patched_endpoint + return patched_endpoint(openai) -def _patched_endpoint_async(openai, integration, patch_hook): +def _patched_endpoint_async(openai, patch_hook): # Same as _patched_endpoint but async - async def patched_endpoint(func, args, kwargs): - # FIXME: this is a temporary workaround for the fact that our bytecode wrapping seems to modify - # a function keyword argument into a cell when it shouldn't. This is only an issue on - # Python 3.11+. - if sys.version_info >= (3, 11) and kwargs.get("encoding_format", None): - kwargs["encoding_format"] = kwargs["encoding_format"].cell_contents - - pin = Pin._find(openai, args[0]) - if not pin or not pin.enabled(): - return await func(*args, **kwargs) - g = _traced_endpoint(patch_hook, integration, pin, args, kwargs) + @with_traced_module + async def patched_endpoint(openai, pin, func, instance, args, kwargs): + integration = openai._datadog_integration + g = _traced_endpoint(patch_hook, integration, instance, pin, args, kwargs) g.send(None) resp, err = None, None try: @@ -304,59 +322,54 @@ async def patched_endpoint(func, args, kwargs): # This return takes priority over `return resp` return e.value # noqa: B012 - return patched_endpoint + return patched_endpoint(openai) -def _patched_convert(openai, integration): - def patched_convert(func, args, kwargs): - """Patch convert captures header information in the openai response""" - pin = Pin.get_from(openai) - if not pin or not pin.enabled(): - return func(*args, **kwargs) - - span = pin.tracer.current_span() - if not span: - return func(*args, **kwargs) +@with_traced_module +def patched_convert(openai, pin, func, instance, args, kwargs): + """Patch convert captures header information in the openai response""" + integration = openai._datadog_integration + span = pin.tracer.current_span() + if not span: + return func(*args, **kwargs) - if OPENAI_VERSION < (1, 0, 0): - resp = args[0] - if not isinstance(resp, openai.openai_response.OpenAIResponse): - return func(*args, **kwargs) - headers = resp._headers - else: - resp = kwargs.get("response", {}) - headers = resp.headers - # This function is called for each chunk in the stream. - # To prevent needlessly setting the same tags for each chunk, short-circuit here. - if span.get_tag("openai.organization.name") is not None: + if OPENAI_VERSION < (1, 0, 0): + resp = args[0] + if not isinstance(resp, openai.openai_response.OpenAIResponse): return func(*args, **kwargs) - if headers.get("openai-organization"): - org_name = headers.get("openai-organization") - span.set_tag_str("openai.organization.name", org_name) - - # Gauge total rate limit - if headers.get("x-ratelimit-limit-requests"): - v = headers.get("x-ratelimit-limit-requests") - if v is not None: - integration.metric(span, "gauge", "ratelimit.requests", int(v)) - span.set_metric("openai.organization.ratelimit.requests.limit", int(v)) - if headers.get("x-ratelimit-limit-tokens"): - v = headers.get("x-ratelimit-limit-tokens") - if v is not None: - integration.metric(span, "gauge", "ratelimit.tokens", int(v)) - span.set_metric("openai.organization.ratelimit.tokens.limit", int(v)) - # Gauge and set span info for remaining requests and tokens - if headers.get("x-ratelimit-remaining-requests"): - v = headers.get("x-ratelimit-remaining-requests") - if v is not None: - integration.metric(span, "gauge", "ratelimit.remaining.requests", int(v)) - span.set_metric("openai.organization.ratelimit.requests.remaining", int(v)) - if headers.get("x-ratelimit-remaining-tokens"): - v = headers.get("x-ratelimit-remaining-tokens") - if v is not None: - integration.metric(span, "gauge", "ratelimit.remaining.tokens", int(v)) - span.set_metric("openai.organization.ratelimit.tokens.remaining", int(v)) - + headers = resp._headers + else: + resp = kwargs.get("response", {}) + headers = resp.headers + # This function is called for each chunk in the stream. + # To prevent needlessly setting the same tags for each chunk, short-circuit here. + if span.get_tag("openai.organization.name") is not None: return func(*args, **kwargs) - - return patched_convert + if headers.get("openai-organization"): + org_name = headers.get("openai-organization") + span.set_tag_str("openai.organization.name", org_name) + + # Gauge total rate limit + if headers.get("x-ratelimit-limit-requests"): + v = headers.get("x-ratelimit-limit-requests") + if v is not None: + integration.metric(span, "gauge", "ratelimit.requests", int(v)) + span.set_metric("openai.organization.ratelimit.requests.limit", int(v)) + if headers.get("x-ratelimit-limit-tokens"): + v = headers.get("x-ratelimit-limit-tokens") + if v is not None: + integration.metric(span, "gauge", "ratelimit.tokens", int(v)) + span.set_metric("openai.organization.ratelimit.tokens.limit", int(v)) + # Gauge and set span info for remaining requests and tokens + if headers.get("x-ratelimit-remaining-requests"): + v = headers.get("x-ratelimit-remaining-requests") + if v is not None: + integration.metric(span, "gauge", "ratelimit.remaining.requests", int(v)) + span.set_metric("openai.organization.ratelimit.requests.remaining", int(v)) + if headers.get("x-ratelimit-remaining-tokens"): + v = headers.get("x-ratelimit-remaining-tokens") + if v is not None: + integration.metric(span, "gauge", "ratelimit.remaining.tokens", int(v)) + span.set_metric("openai.organization.ratelimit.tokens.remaining", int(v)) + + return func(*args, **kwargs) diff --git a/ddtrace/contrib/internal/psycopg/async_connection.py b/ddtrace/contrib/internal/psycopg/async_connection.py index 8d400330ff5..f17b4c7a953 100644 --- a/ddtrace/contrib/internal/psycopg/async_connection.py +++ b/ddtrace/contrib/internal/psycopg/async_connection.py @@ -1,6 +1,6 @@ from ddtrace import config +from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.constants import SPAN_KIND -from ddtrace.constants import SPAN_MEASURED_KEY from ddtrace.contrib import dbapi_async from ddtrace.contrib.internal.psycopg.async_cursor import Psycopg3FetchTracedAsyncCursor from ddtrace.contrib.internal.psycopg.async_cursor import Psycopg3TracedAsyncCursor @@ -58,7 +58,7 @@ async def patched_connect_async(connect_func, _, args, kwargs): if span.get_tag(db.SYSTEM) is None: span.set_tag_str(db.SYSTEM, pin._config.dbms_name) - span.set_tag(SPAN_MEASURED_KEY) + span.set_tag(_SPAN_MEASURED_KEY) conn = await connect_func(*args, **kwargs) return patch_conn(conn, pin=pin, traced_conn_cls=traced_conn_cls) diff --git a/ddtrace/contrib/internal/psycopg/connection.py b/ddtrace/contrib/internal/psycopg/connection.py index c3b7caef2c4..6e9190421cb 100644 --- a/ddtrace/contrib/internal/psycopg/connection.py +++ b/ddtrace/contrib/internal/psycopg/connection.py @@ -1,6 +1,6 @@ from ddtrace import config +from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.constants import SPAN_KIND -from ddtrace.constants import SPAN_MEASURED_KEY from ddtrace.contrib import dbapi from ddtrace.contrib.internal.psycopg.cursor import Psycopg2FetchTracedCursor from ddtrace.contrib.internal.psycopg.cursor import Psycopg2TracedCursor @@ -102,7 +102,7 @@ def patched_connect(connect_func, _, args, kwargs): if span.get_tag(db.SYSTEM) is None: span.set_tag_str(db.SYSTEM, pin._config.dbms_name) - span.set_tag(SPAN_MEASURED_KEY) + span.set_tag(_SPAN_MEASURED_KEY) conn = connect_func(*args, **kwargs) return patch_conn(conn, pin=pin, traced_conn_cls=traced_conn_cls) diff --git a/ddtrace/contrib/internal/psycopg/extensions.py b/ddtrace/contrib/internal/psycopg/extensions.py index cebe5c1cdf8..f5960e073aa 100644 --- a/ddtrace/contrib/internal/psycopg/extensions.py +++ b/ddtrace/contrib/internal/psycopg/extensions.py @@ -6,8 +6,8 @@ import wrapt from ddtrace import config +from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.constants import SPAN_KIND -from ddtrace.constants import SPAN_MEASURED_KEY from ddtrace.ext import SpanKind from ddtrace.ext import SpanTypes from ddtrace.ext import db @@ -42,7 +42,7 @@ def execute(self, query, vars=None): # noqa: A002 # set span.kind to the type of operation being performed s.set_tag_str(SPAN_KIND, SpanKind.CLIENT) - s.set_tag(SPAN_MEASURED_KEY) + s.set_tag(_SPAN_MEASURED_KEY) if s.context.sampling_priority is None or s.context.sampling_priority <= 0: return super(TracedCursor, self).execute(query, vars) diff --git a/ddtrace/contrib/internal/pylibmc/client.py b/ddtrace/contrib/internal/pylibmc/client.py index 3ea6f09c62c..5321b533293 100644 --- a/ddtrace/contrib/internal/pylibmc/client.py +++ b/ddtrace/contrib/internal/pylibmc/client.py @@ -8,8 +8,8 @@ import ddtrace from ddtrace import config from ddtrace.constants import _ANALYTICS_SAMPLE_RATE_KEY +from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.constants import SPAN_KIND -from ddtrace.constants import SPAN_MEASURED_KEY from ddtrace.contrib.internal.pylibmc.addrs import parse_addresses from ddtrace.ext import SpanKind from ddtrace.ext import SpanTypes @@ -177,7 +177,7 @@ def _span(self, cmd_name): # set span.kind to the type of operation being performed span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) - span.set_tag(SPAN_MEASURED_KEY) + span.set_tag(_SPAN_MEASURED_KEY) try: self._tag_span(span) diff --git a/ddtrace/contrib/internal/pymemcache/client.py b/ddtrace/contrib/internal/pymemcache/client.py index 37e14842a94..574332586e5 100644 --- a/ddtrace/contrib/internal/pymemcache/client.py +++ b/ddtrace/contrib/internal/pymemcache/client.py @@ -18,8 +18,8 @@ # project from ddtrace.constants import _ANALYTICS_SAMPLE_RATE_KEY +from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.constants import SPAN_KIND -from ddtrace.constants import SPAN_MEASURED_KEY from ddtrace.ext import SpanKind from ddtrace.ext import SpanTypes from ddtrace.ext import db @@ -319,7 +319,7 @@ def _trace(func, p, method_name, *args, **kwargs): # set span.kind to the type of operation being performed span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) - span.set_tag(SPAN_MEASURED_KEY) + span.set_tag(_SPAN_MEASURED_KEY) # set analytics sample rate span.set_tag(_ANALYTICS_SAMPLE_RATE_KEY, config.pymemcache.get_analytics_sample_rate()) diff --git a/ddtrace/contrib/internal/pymongo/client.py b/ddtrace/contrib/internal/pymongo/client.py index 2cdf2185586..269a3120fbd 100644 --- a/ddtrace/contrib/internal/pymongo/client.py +++ b/ddtrace/contrib/internal/pymongo/client.py @@ -12,8 +12,8 @@ import ddtrace from ddtrace import config from ddtrace.constants import _ANALYTICS_SAMPLE_RATE_KEY +from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.constants import SPAN_KIND -from ddtrace.constants import SPAN_MEASURED_KEY from ddtrace.contrib import trace_utils from ddtrace.ext import SpanKind from ddtrace.ext import SpanTypes @@ -141,7 +141,7 @@ def _datadog_trace_operation(operation, wrapped): # set span.kind to the operation type being performed span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) - span.set_tag(SPAN_MEASURED_KEY) + span.set_tag(_SPAN_MEASURED_KEY) span.set_tag_str(mongox.DB, cmd.db) span.set_tag_str(mongox.COLLECTION, cmd.coll) span.set_tag_str(db.SYSTEM, mongox.SERVICE) @@ -265,7 +265,7 @@ def _trace_cmd(cmd, socket_instance, address): # set span.kind to the type of operation being performed s.set_tag_str(SPAN_KIND, SpanKind.CLIENT) - s.set_tag(SPAN_MEASURED_KEY) + s.set_tag(_SPAN_MEASURED_KEY) if cmd.db: s.set_tag_str(mongox.DB, cmd.db) if cmd: diff --git a/ddtrace/contrib/internal/pymongo/patch.py b/ddtrace/contrib/internal/pymongo/patch.py index 200a4a902b8..f37a07be5cd 100644 --- a/ddtrace/contrib/internal/pymongo/patch.py +++ b/ddtrace/contrib/internal/pymongo/patch.py @@ -3,8 +3,8 @@ import pymongo from ddtrace import config +from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.constants import SPAN_KIND -from ddtrace.constants import SPAN_MEASURED_KEY from ddtrace.contrib import trace_utils from ddtrace.ext import SpanKind from ddtrace.ext import SpanTypes @@ -129,7 +129,7 @@ def traced_get_socket(func, args, kwargs): with func(*args, **kwargs) as sock_info: set_address_tags(span, sock_info.address) - span.set_tag(SPAN_MEASURED_KEY) + span.set_tag(_SPAN_MEASURED_KEY) # Ensure the pin used on the traced mongo client is passed down to the socket instance # (via the server instance) Pin.get_from(instance).onto(sock_info) diff --git a/ddtrace/contrib/internal/pynamodb/patch.py b/ddtrace/contrib/internal/pynamodb/patch.py index 93171d176bd..29bf44b8df9 100644 --- a/ddtrace/contrib/internal/pynamodb/patch.py +++ b/ddtrace/contrib/internal/pynamodb/patch.py @@ -7,8 +7,8 @@ from ddtrace import config from ddtrace.constants import _ANALYTICS_SAMPLE_RATE_KEY +from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.constants import SPAN_KIND -from ddtrace.constants import SPAN_MEASURED_KEY from ddtrace.contrib import trace_utils from ddtrace.contrib.internal.trace_utils import unwrap from ddtrace.ext import SpanKind @@ -70,7 +70,7 @@ def patched_api_call(original_func, instance, args, kwargs): # set span.kind to the type of operation being performed span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) - span.set_tag(SPAN_MEASURED_KEY) + span.set_tag(_SPAN_MEASURED_KEY) try: operation = get_argument_value(args, kwargs, 0, "operation_name") diff --git a/ddtrace/contrib/internal/pyramid/trace.py b/ddtrace/contrib/internal/pyramid/trace.py index cd0569f62df..9942c673d4e 100644 --- a/ddtrace/contrib/internal/pyramid/trace.py +++ b/ddtrace/contrib/internal/pyramid/trace.py @@ -7,8 +7,8 @@ import ddtrace from ddtrace import config from ddtrace.constants import _ANALYTICS_SAMPLE_RATE_KEY +from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.constants import SPAN_KIND -from ddtrace.constants import SPAN_MEASURED_KEY from ddtrace.contrib import trace_utils from ddtrace.ext import SpanKind from ddtrace.ext import SpanTypes @@ -83,7 +83,7 @@ def trace_tween(request): # set span.kind to the type of operation being performed span.set_tag_str(SPAN_KIND, SpanKind.SERVER) - span.set_tag(SPAN_MEASURED_KEY) + span.set_tag(_SPAN_MEASURED_KEY) # Configure trace search sample rate # DEV: pyramid is special case maintains separate configuration from config api analytics_enabled = settings.get(SETTINGS_ANALYTICS_ENABLED) diff --git a/ddtrace/contrib/internal/pytest/_retry_utils.py b/ddtrace/contrib/internal/pytest/_retry_utils.py index eab45f049be..98133ec505a 100644 --- a/ddtrace/contrib/internal/pytest/_retry_utils.py +++ b/ddtrace/contrib/internal/pytest/_retry_utils.py @@ -117,7 +117,7 @@ def _retry_run_when(item, when, outcomes: RetryOutcomes) -> t.Tuple[CallInfo, _p ) else: call = CallInfo.from_call(lambda: hook(item=item), when=when) - report = pytest.TestReport.from_item_and_call(item=item, call=call) + report = item.ihook.pytest_runtest_makereport(item=item, call=call) if report.outcome == "passed": report.outcome = outcomes.PASSED elif report.outcome == "failed" or report.outcome == "error": diff --git a/ddtrace/contrib/internal/rediscluster/patch.py b/ddtrace/contrib/internal/rediscluster/patch.py index c550df7e9ea..23b37502310 100644 --- a/ddtrace/contrib/internal/rediscluster/patch.py +++ b/ddtrace/contrib/internal/rediscluster/patch.py @@ -7,8 +7,8 @@ # project from ddtrace import config from ddtrace.constants import _ANALYTICS_SAMPLE_RATE_KEY +from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.constants import SPAN_KIND -from ddtrace.constants import SPAN_MEASURED_KEY from ddtrace.contrib import trace_utils from ddtrace.contrib.internal.redis.patch import instrumented_execute_command from ddtrace.contrib.internal.redis.patch import instrumented_pipeline @@ -102,7 +102,7 @@ def traced_execute_pipeline(func, instance, args, kwargs): s.set_tag_str(SPAN_KIND, SpanKind.CLIENT) s.set_tag_str(COMPONENT, config.rediscluster.integration_name) s.set_tag_str(db.SYSTEM, redisx.APP) - s.set_tag(SPAN_MEASURED_KEY) + s.set_tag(_SPAN_MEASURED_KEY) s.set_tag_str(redisx.RAWCMD, resource) s.set_metric(redisx.PIPELINE_LEN, len(instance.command_stack)) diff --git a/ddtrace/contrib/internal/requests/connection.py b/ddtrace/contrib/internal/requests/connection.py index 9ad198faacd..0b58f8b6dc5 100644 --- a/ddtrace/contrib/internal/requests/connection.py +++ b/ddtrace/contrib/internal/requests/connection.py @@ -3,8 +3,8 @@ import ddtrace from ddtrace import config from ddtrace.constants import _ANALYTICS_SAMPLE_RATE_KEY +from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.constants import SPAN_KIND -from ddtrace.constants import SPAN_MEASURED_KEY from ddtrace.contrib import trace_utils from ddtrace.contrib.internal.trace_utils import _sanitized_url from ddtrace.ext import SpanKind @@ -75,7 +75,7 @@ def _wrap_send(func, instance, args, kwargs): hostname, path = _extract_hostname_and_path(url) host_without_port = hostname.split(":")[0] if hostname is not None else None - cfg = config.get_from(instance) + cfg = config._get_from(instance) service = None if cfg["split_by_domain"] and hostname: service = hostname @@ -93,11 +93,11 @@ def _wrap_send(func, instance, args, kwargs): # set span.kind to the type of operation being performed span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) - span.set_tag(SPAN_MEASURED_KEY) + span.set_tag(_SPAN_MEASURED_KEY) # Configure trace search sample rate # DEV: analytics enabled on per-session basis - cfg = config.get_from(instance) + cfg = config._get_from(instance) analytics_enabled = cfg.get("analytics_enabled") if analytics_enabled: span.set_tag(_ANALYTICS_SAMPLE_RATE_KEY, cfg.get("analytics_sample_rate", True)) diff --git a/ddtrace/contrib/internal/sanic/patch.py b/ddtrace/contrib/internal/sanic/patch.py index 5d105cf2f32..8e53ed41dc8 100644 --- a/ddtrace/contrib/internal/sanic/patch.py +++ b/ddtrace/contrib/internal/sanic/patch.py @@ -273,7 +273,7 @@ async def sanic_http_lifecycle_exception(request, exception): # Do not attach exception for exceptions not considered as errors # ex: Http 400s # DEV: We still need to set `__dd_span_call_finish` below - if not hasattr(exception, "status_code") or config.http_server.is_error_code(exception.status_code): + if not hasattr(exception, "status_code") or config._http_server.is_error_code(exception.status_code): ex_type = type(exception) ex_tb = getattr(exception, "__traceback__", None) span.set_exc_info(ex_type, exception, ex_tb) diff --git a/ddtrace/contrib/internal/sqlalchemy/engine.py b/ddtrace/contrib/internal/sqlalchemy/engine.py index 3b5f96be9e7..6e123c97db8 100644 --- a/ddtrace/contrib/internal/sqlalchemy/engine.py +++ b/ddtrace/contrib/internal/sqlalchemy/engine.py @@ -19,8 +19,8 @@ import ddtrace from ddtrace import config from ddtrace.constants import _ANALYTICS_SAMPLE_RATE_KEY +from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.constants import SPAN_KIND -from ddtrace.constants import SPAN_MEASURED_KEY from ddtrace.ext import SpanKind from ddtrace.ext import SpanTypes from ddtrace.ext import db @@ -102,7 +102,7 @@ def _before_cur_exec(self, conn, cursor, statement, *args): # set span.kind to the type of operation being performed span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) - span.set_tag(SPAN_MEASURED_KEY) + span.set_tag(_SPAN_MEASURED_KEY) if not _set_tags_from_url(span, conn.engine.url): _set_tags_from_cursor(span, self.vendor, cursor) diff --git a/ddtrace/contrib/internal/tornado/handlers.py b/ddtrace/contrib/internal/tornado/handlers.py index ff3a97cf2b8..f858e33ee29 100644 --- a/ddtrace/contrib/internal/tornado/handlers.py +++ b/ddtrace/contrib/internal/tornado/handlers.py @@ -4,8 +4,8 @@ from ddtrace import config from ddtrace.constants import _ANALYTICS_SAMPLE_RATE_KEY +from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.constants import SPAN_KIND -from ddtrace.constants import SPAN_MEASURED_KEY from ddtrace.contrib import trace_utils from ddtrace.contrib.internal.trace_utils import set_http_meta from ddtrace.ext import SpanKind @@ -50,7 +50,7 @@ def execute(func, handler, args, kwargs): # set span.kind to the type of operation being performed request_span.set_tag_str(SPAN_KIND, SpanKind.SERVER) - request_span.set_tag(SPAN_MEASURED_KEY) + request_span.set_tag(_SPAN_MEASURED_KEY) # set analytics sample rate # DEV: tornado is special case maintains separate configuration from config api analytics_enabled = settings["analytics_enabled"] @@ -140,7 +140,7 @@ def log_exception(func, handler, args, kwargs): # is not a 2xx. In this case we want to check the status code to be sure that # only 5xx are traced as errors, while any other HTTPError exception is handled as # usual. - if config.http_server.is_error_code(value.status_code): + if config._http_server.is_error_code(value.status_code): current_span.set_exc_info(*args) else: # any other uncaught exception should be reported as error diff --git a/ddtrace/contrib/internal/trace_utils.py b/ddtrace/contrib/internal/trace_utils.py index 7b7afc1b9c1..3e34eaca99e 100644 --- a/ddtrace/contrib/internal/trace_utils.py +++ b/ddtrace/contrib/internal/trace_utils.py @@ -485,7 +485,7 @@ def set_http_meta( log.debug("failed to convert http status code %r to int", status_code) else: span.set_tag_str(http.STATUS_CODE, str(status_code)) - if config.http_server.is_error_code(int_status_code): + if config._http_server.is_error_code(int_status_code): span.error = 1 if status_msg is not None: diff --git a/ddtrace/contrib/internal/vertexai/_utils.py b/ddtrace/contrib/internal/vertexai/_utils.py index 129b97fd920..81b0c13df2d 100644 --- a/ddtrace/contrib/internal/vertexai/_utils.py +++ b/ddtrace/contrib/internal/vertexai/_utils.py @@ -177,13 +177,13 @@ def _tag_request_content(span, integration, content, content_idx): tag_request_content_part_google("vertexai", span, integration, part, part_idx, content_idx) -def tag_request(span, integration, instance, args, kwargs): +def tag_request(span, integration, instance, args, kwargs, is_chat): """Tag the generation span with request details. Includes capturing generation configuration, system prompt, and messages. """ # instance is either a chat session or a model itself model_instance = instance if isinstance(instance, GenerativeModel) else instance._model - contents = get_argument_value(args, kwargs, 0, "contents") + contents = get_argument_value(args, kwargs, 0, "content" if is_chat else "contents") history = _get_attr(instance, "_history", []) if history: if isinstance(contents, list): diff --git a/ddtrace/contrib/internal/vertexai/patch.py b/ddtrace/contrib/internal/vertexai/patch.py index 82ae00e1a77..54222b87528 100644 --- a/ddtrace/contrib/internal/vertexai/patch.py +++ b/ddtrace/contrib/internal/vertexai/patch.py @@ -64,7 +64,7 @@ def _traced_generate(vertexai, pin, func, instance, args, kwargs, model_instance # history must be copied since it is modified during the LLM interaction history = getattr(instance, "history", [])[:] try: - tag_request(span, integration, instance, args, kwargs) + tag_request(span, integration, instance, args, kwargs, is_chat) generations = func(*args, **kwargs) if stream: return TracedVertexAIStreamResponse( @@ -99,7 +99,7 @@ async def _traced_agenerate(vertexai, pin, func, instance, args, kwargs, model_i # history must be copied since it is modified during the LLM interaction history = getattr(instance, "history", [])[:] try: - tag_request(span, integration, instance, args, kwargs) + tag_request(span, integration, instance, args, kwargs, is_chat) generations = await func(*args, **kwargs) if stream: return TracedAsyncVertexAIStreamResponse( diff --git a/ddtrace/contrib/internal/vertica/patch.py b/ddtrace/contrib/internal/vertica/patch.py index b365ade8c05..b223a17275d 100644 --- a/ddtrace/contrib/internal/vertica/patch.py +++ b/ddtrace/contrib/internal/vertica/patch.py @@ -5,8 +5,8 @@ import ddtrace from ddtrace import config from ddtrace.constants import _ANALYTICS_SAMPLE_RATE_KEY +from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.constants import SPAN_KIND -from ddtrace.constants import SPAN_MEASURED_KEY from ddtrace.contrib import trace_utils from ddtrace.ext import SpanKind from ddtrace.ext import SpanTypes @@ -233,7 +233,7 @@ def wrapper(wrapped, instance, args, kwargs): span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) if conf.get("measured", False): - span.set_tag(SPAN_MEASURED_KEY) + span.set_tag(_SPAN_MEASURED_KEY) span.set_tags(pin.tags) if "span_start" in conf: diff --git a/ddtrace/contrib/jinja2/__init__.py b/ddtrace/contrib/jinja2/__init__.py index cc8c6786b02..8437a51d9de 100644 --- a/ddtrace/contrib/jinja2/__init__.py +++ b/ddtrace/contrib/jinja2/__init__.py @@ -16,13 +16,13 @@ The library can be configured globally and per instance, using the Configuration API:: from ddtrace import config + from ddtrace.trace import Pin # Change service name globally config.jinja2['service_name'] = 'jinja-templates' # change the service name only for this environment - cfg = config.get_from(env) - cfg['service_name'] = 'jinja-templates' + Pin.override(env, service='jinja-templates') By default, the service name is set to None, so it is inherited from the parent span. If there is no parent span and the service name is not overridden the agent will drop the traces. diff --git a/ddtrace/contrib/openai/__init__.py b/ddtrace/contrib/openai/__init__.py index 88090b5f85a..da94047c2e8 100644 --- a/ddtrace/contrib/openai/__init__.py +++ b/ddtrace/contrib/openai/__init__.py @@ -248,7 +248,6 @@ Pin.override(openai, service="my-openai-service") """ # noqa: E501 - # Required to allow users to import from `ddtrace.contrib.openai.patch` directly import warnings as _w diff --git a/ddtrace/contrib/requests/__init__.py b/ddtrace/contrib/requests/__init__.py index efcb20f1219..7d034ce56bf 100644 --- a/ddtrace/contrib/requests/__init__.py +++ b/ddtrace/contrib/requests/__init__.py @@ -65,12 +65,11 @@ use the config API:: from ddtrace import config + from ddtrace.trace import Pin from requests import Session session = Session() - cfg = config.get_from(session) - cfg['service_name'] = 'auth-api' - cfg['distributed_tracing'] = False + Pin.override(session, service='auth-api') """ diff --git a/ddtrace/debugging/_exception/replay.py b/ddtrace/debugging/_exception/replay.py index 080b4cbfc61..c9da69bb5b2 100644 --- a/ddtrace/debugging/_exception/replay.py +++ b/ddtrace/debugging/_exception/replay.py @@ -21,6 +21,7 @@ from ddtrace.internal.rate_limiter import BudgetRateLimiterWithJitter as RateLimiter from ddtrace.internal.rate_limiter import RateLimitExceeded from ddtrace.internal.utils.time import HourGlass +from ddtrace.settings.exception_replay import config log = get_logger(__name__) @@ -225,7 +226,9 @@ def on_span_exception( seq = count(1) # 1-based sequence number - while chain: + frames_captured = 0 + + while chain and frames_captured <= config.max_frames: exc, _tb = chain.pop() # LIFO: reverse the chain if _tb is None or _tb.tb_frame is None: @@ -233,7 +236,7 @@ def on_span_exception( continue # DEV: We go from the handler up to the root exception - while _tb: + while _tb and frames_captured <= config.max_frames: frame = _tb.tb_frame code = frame.f_code seq_nr = next(seq) @@ -263,6 +266,9 @@ def on_span_exception( # Memoize frame.f_locals[SNAPSHOT_KEY] = snapshot_id = snapshot.uuid + # Count + frames_captured += 1 + # Add correlation tags on the span span.set_tag_str(FRAME_SNAPSHOT_ID_TAG % seq_nr, snapshot_id) span.set_tag_str(FRAME_FUNCTION_TAG % seq_nr, code.co_name) diff --git a/ddtrace/debugging/_signal/tracing.py b/ddtrace/debugging/_signal/tracing.py index 3c9eb3f447e..d509e336d52 100644 --- a/ddtrace/debugging/_signal/tracing.py +++ b/ddtrace/debugging/_signal/tracing.py @@ -4,7 +4,7 @@ import ddtrace from ddtrace._trace.span import Span -from ddtrace.constants import ORIGIN_KEY +from ddtrace.constants import _ORIGIN_KEY from ddtrace.debugging._expressions import DDExpressionEvaluationError from ddtrace.debugging._probe.model import Probe from ddtrace.debugging._probe.model import SpanDecorationFunctionProbe @@ -52,7 +52,7 @@ def enter(self, scope: t.Mapping[str, t.Any]) -> None: span.set_tags(probe.tags) # type: ignore[arg-type] span.set_tag_str(PROBE_ID_TAG_NAME, probe.probe_id) - span.set_tag_str(ORIGIN_KEY, "di") + span.set_tag_str(_ORIGIN_KEY, "di") def exit(self, retval: t.Any, exc_info: ExcInfoType, duration: float, scope: t.Mapping[str, t.Any]) -> None: if self._span_cm is not None: diff --git a/ddtrace/internal/_encoding.pyx b/ddtrace/internal/_encoding.pyx index f85fe7c6776..8db5bad7544 100644 --- a/ddtrace/internal/_encoding.pyx +++ b/ddtrace/internal/_encoding.pyx @@ -19,7 +19,7 @@ from ._utils cimport PyBytesLike_Check # DEV: This only occurs because there is a `constants.py` module # in both `ddtrace` and `ddtrace.internal` -from ..constants import ORIGIN_KEY +from ..constants import _ORIGIN_KEY as ORIGIN_KEY from .constants import SPAN_LINKS_KEY from .constants import SPAN_EVENTS_KEY from .constants import MAX_UINT_64BITS diff --git a/ddtrace/internal/appsec/product.py b/ddtrace/internal/appsec/product.py new file mode 100644 index 00000000000..126d6d2a04f --- /dev/null +++ b/ddtrace/internal/appsec/product.py @@ -0,0 +1,31 @@ +from ddtrace import config +from ddtrace.settings.asm import config as asm_config + + +requires = ["remote-configuration"] + + +def post_preload(): + pass + + +def start(): + if asm_config._asm_enabled or config._remote_config_enabled: + from ddtrace.appsec._remoteconfiguration import enable_appsec_rc + + enable_appsec_rc() + + +def restart(join=False): + if asm_config._asm_enabled or config._remote_config_enabled: + from ddtrace.appsec._remoteconfiguration import _forksafe_appsec_rc + + _forksafe_appsec_rc() + + +def stop(join=False): + pass + + +def at_exit(join=False): + pass diff --git a/ddtrace/internal/core/__init__.py b/ddtrace/internal/core/__init__.py index da31218f73c..3c2169d4cb1 100644 --- a/ddtrace/internal/core/__init__.py +++ b/ddtrace/internal/core/__init__.py @@ -115,7 +115,6 @@ def _on_jsonify_context_started_flask(ctx): from ..utils.deprecations import DDTraceDeprecationWarning from . import event_hub # noqa:F401 -from ._core import DDSketch # noqa:F401 from .event_hub import EventResultDict # noqa:F401 from .event_hub import dispatch from .event_hub import dispatch_with_results # noqa:F401 diff --git a/ddtrace/internal/datadog/profiling/cmake/FindLibdatadog.cmake b/ddtrace/internal/datadog/profiling/cmake/FindLibdatadog.cmake index 3a96fbeb353..7ba7e78164c 100644 --- a/ddtrace/internal/datadog/profiling/cmake/FindLibdatadog.cmake +++ b/ddtrace/internal/datadog/profiling/cmake/FindLibdatadog.cmake @@ -17,17 +17,17 @@ include(FetchContent) # Set version if not already set if(NOT DEFINED TAG_LIBDATADOG) set(TAG_LIBDATADOG - "v14.3.1" + "v15.0.0" CACHE STRING "libdatadog github tag") endif() if(NOT DEFINED DD_CHECKSUMS) set(DD_CHECKSUMS - "57f83aff275628bb1af89c22bb4bd696726daf2a9e09b6cd0d966b29e65a7ad6 libdatadog-aarch64-alpine-linux-musl.tar.gz" - "2be2efa98dfc32f109abdd79242a8e046a7a300c77634135eb293e000ecd4a4c libdatadog-aarch64-apple-darwin.tar.gz" - "36db8d50ccabb71571158ea13835c0f1d05d30b32135385f97c16343cfb6ddd4 libdatadog-aarch64-unknown-linux-gnu.tar.gz" - "2f61fd21cf2f8147743e414b4a8c77250a17be3aecc42a69ffe54f0a603d5c92 libdatadog-x86_64-alpine-linux-musl.tar.gz" - "f01f05600591063eba4faf388f54c155ab4e6302e5776c7855e3734955f7daf7 libdatadog-x86_64-unknown-linux-gnu.tar.gz") + "d5b969b293e5a9e5e36404a553bbafdd55ff6af0b089698bd989a878534df0c7 libdatadog-aarch64-alpine-linux-musl.tar.gz" + "4540ffb8ccb671550a39ba79226117086582c1eaf9714180a9e26bd6bb175860 libdatadog-aarch64-apple-darwin.tar.gz" + "31bceab4f56873b03b3728760d30e3abc493d32ca8fdc9e1f2ec2147ef4d5424 libdatadog-aarch64-unknown-linux-gnu.tar.gz" + "530348c4b02cc7096de2231476ec12db82e2cc6de12a87e5b28af47ea73d4e56 libdatadog-x86_64-alpine-linux-musl.tar.gz" + "5073ffc657bc4698f8bdd4935475734577bfb18c54dcbebc4f7d8c7595626e52 libdatadog-x86_64-unknown-linux-gnu.tar.gz") endif() # Determine platform-specific tarball name in a way that conforms to the libdatadog naming scheme in Github releases diff --git a/ddtrace/internal/datadog/profiling/crashtracker/CMakeLists.txt b/ddtrace/internal/datadog/profiling/crashtracker/CMakeLists.txt index c23a3e3ddce..8165613c07d 100644 --- a/ddtrace/internal/datadog/profiling/crashtracker/CMakeLists.txt +++ b/ddtrace/internal/datadog/profiling/crashtracker/CMakeLists.txt @@ -46,6 +46,10 @@ add_library(${EXTENSION_NAME} SHARED ${CRASHTRACKER_CPP_SRC}) add_ddup_config(${EXTENSION_NAME}) # Cython generates code that produces errors for the following, so relax compile options target_compile_options(${EXTENSION_NAME} PRIVATE -Wno-old-style-cast -Wno-shadow -Wno-address) +# tp_print is marked deprecated in Python 3.8, but cython still generates code using it +if("${Python3_VERSION_MINOR}" STREQUAL "8") + target_compile_options(${EXTENSION_NAME} PRIVATE -Wno-deprecated-declarations) +endif() # cmake may mutate the name of the library (e.g., lib- and -.so for dynamic libraries). This suppresses that behavior, # which is required to ensure all paths can be inferred correctly by setup.py. diff --git a/ddtrace/internal/datadog/profiling/dd_wrapper/src/crashtracker.cpp b/ddtrace/internal/datadog/profiling/dd_wrapper/src/crashtracker.cpp index d14c5380e19..5519489fc2b 100644 --- a/ddtrace/internal/datadog/profiling/dd_wrapper/src/crashtracker.cpp +++ b/ddtrace/internal/datadog/profiling/dd_wrapper/src/crashtracker.cpp @@ -207,8 +207,8 @@ Datadog::Crashtracker::start() auto result = ddog_crasht_init(config, receiver_config, metadata); ddog_Vec_Tag_drop(tags); - if (result.tag != DDOG_CRASHT_RESULT_OK) { // NOLINT (cppcoreguidelines-pro-type-union-access) - auto err = result.err; // NOLINT (cppcoreguidelines-pro-type-union-access) + if (result.tag != DDOG_VOID_RESULT_OK) { // NOLINT (cppcoreguidelines-pro-type-union-access) + auto err = result.err; // NOLINT (cppcoreguidelines-pro-type-union-access) std::string errmsg = err_to_msg(&err, "Error initializing crash tracker"); std::cerr << errmsg << std::endl; ddog_Error_drop(&err); diff --git a/ddtrace/internal/datadog/profiling/dd_wrapper/src/receiver_interface.cpp b/ddtrace/internal/datadog/profiling/dd_wrapper/src/receiver_interface.cpp index b95ef95604e..81a02eddcff 100644 --- a/ddtrace/internal/datadog/profiling/dd_wrapper/src/receiver_interface.cpp +++ b/ddtrace/internal/datadog/profiling/dd_wrapper/src/receiver_interface.cpp @@ -18,8 +18,8 @@ crashtracker_receiver_entry() // cppcheck-suppress unusedFunction { // Assumes that this will be called only in the receiver binary, which is a // fresh process - ddog_crasht_Result new_result = ddog_crasht_receiver_entry_point_stdin(); - if (new_result.tag != DDOG_CRASHT_RESULT_OK) { + ddog_VoidResult new_result = ddog_crasht_receiver_entry_point_stdin(); + if (new_result.tag != DDOG_VOID_RESULT_OK) { ddog_CharSlice message = ddog_Error_message(&new_result.err); //`write` may not write what we want it to write, but there's nothing we can do about it, diff --git a/ddtrace/internal/datadog/profiling/ddup/CMakeLists.txt b/ddtrace/internal/datadog/profiling/ddup/CMakeLists.txt index 6a4cb4e8803..12ceab6fcb6 100644 --- a/ddtrace/internal/datadog/profiling/ddup/CMakeLists.txt +++ b/ddtrace/internal/datadog/profiling/ddup/CMakeLists.txt @@ -49,6 +49,10 @@ add_library(${EXTENSION_NAME} SHARED ${DDUP_CPP_SRC}) add_ddup_config(${EXTENSION_NAME}) # Cython generates code that produces errors for the following, so relax compile options target_compile_options(${EXTENSION_NAME} PRIVATE -Wno-old-style-cast -Wno-shadow -Wno-address) +# tp_print is marked deprecated in Python 3.8, but cython still generates code using it +if("${Python3_VERSION_MINOR}" STREQUAL "8") + target_compile_options(${EXTENSION_NAME} PRIVATE -Wno-deprecated-declarations) +endif() # cmake may mutate the name of the library (e.g., lib- and -.so for dynamic libraries). This suppresses that behavior, # which is required to ensure all paths can be inferred correctly by setup.py. diff --git a/ddtrace/internal/datastreams/processor.py b/ddtrace/internal/datastreams/processor.py index 5afdb07c9a2..deb9388537b 100644 --- a/ddtrace/internal/datastreams/processor.py +++ b/ddtrace/internal/datastreams/processor.py @@ -20,7 +20,7 @@ from ddtrace.internal import compat from ddtrace.internal.atexit import register_on_exit_signal from ddtrace.internal.constants import DEFAULT_SERVICE_NAME -from ddtrace.internal.core import DDSketch +from ddtrace.internal.native import DDSketch from ddtrace.internal.utils.retry import fibonacci_backoff_with_jitter from .._encoding import packb diff --git a/ddtrace/internal/iast/product.py b/ddtrace/internal/iast/product.py new file mode 100644 index 00000000000..ccbc61b2f5a --- /dev/null +++ b/ddtrace/internal/iast/product.py @@ -0,0 +1,29 @@ +""" +This is the entry point for the IAST instrumentation. `enable_iast_propagation` is called on patch_all function +too but patch_all depends of DD_TRACE_ENABLED environment variable. This is the reason why we need to call it +here and it's not a duplicate call due to `enable_iast_propagation` has a global variable to avoid multiple calls. +""" +from ddtrace.appsec._iast._utils import _is_iast_enabled + + +def post_preload(): + pass + + +def start(): + if _is_iast_enabled(): + from ddtrace.appsec._iast import enable_iast_propagation + + enable_iast_propagation() + + +def restart(join=False): + pass + + +def stop(join=False): + pass + + +def at_exit(join=False): + pass diff --git a/ddtrace/internal/native/__init__.py b/ddtrace/internal/native/__init__.py new file mode 100644 index 00000000000..0c85a824b44 --- /dev/null +++ b/ddtrace/internal/native/__init__.py @@ -0,0 +1 @@ +from ._native import DDSketch # noqa: F401 diff --git a/ddtrace/internal/core/_core.pyi b/ddtrace/internal/native/_native.pyi similarity index 100% rename from ddtrace/internal/core/_core.pyi rename to ddtrace/internal/native/_native.pyi diff --git a/ddtrace/internal/processor/stats.py b/ddtrace/internal/processor/stats.py index f79f460582e..295a6e289a2 100644 --- a/ddtrace/internal/processor/stats.py +++ b/ddtrace/internal/processor/stats.py @@ -8,10 +8,10 @@ from ddtrace._trace.processor import SpanProcessor from ddtrace._trace.span import _is_top_level from ddtrace.internal import compat -from ddtrace.internal.core import DDSketch +from ddtrace.internal.native import DDSketch from ddtrace.internal.utils.retry import fibonacci_backoff_with_jitter -from ...constants import SPAN_MEASURED_KEY +from ...constants import _SPAN_MEASURED_KEY from .._encoding import packb from ..agent import get_connection from ..compat import get_connection_response @@ -38,7 +38,7 @@ def _is_measured(span): # type: (Span) -> bool """Return whether the span is flagged to be measured or not.""" - return span._metrics.get(SPAN_MEASURED_KEY) == 1 + return span._metrics.get(_SPAN_MEASURED_KEY) == 1 """ diff --git a/ddtrace/internal/remoteconfig/worker.py b/ddtrace/internal/remoteconfig/worker.py index 7ad8c592d2e..5429e599e74 100644 --- a/ddtrace/internal/remoteconfig/worker.py +++ b/ddtrace/internal/remoteconfig/worker.py @@ -13,7 +13,7 @@ from ddtrace.internal.remoteconfig.utils import get_poll_interval_seconds from ddtrace.internal.service import ServiceStatus from ddtrace.internal.utils.time import StopWatch -from ddtrace.settings import _config as ddconfig +from ddtrace.settings import _global_config as ddconfig log = get_logger(__name__) diff --git a/ddtrace/internal/sampling.py b/ddtrace/internal/sampling.py index 997d3af77fd..e64c0e27bc5 100644 --- a/ddtrace/internal/sampling.py +++ b/ddtrace/internal/sampling.py @@ -15,19 +15,19 @@ from typing_extensions import TypedDict from ddtrace._trace.sampling_rule import SamplingRule # noqa:F401 +from ddtrace.constants import _SAMPLING_AGENT_DECISION +from ddtrace.constants import _SAMPLING_RULE_DECISION from ddtrace.constants import _SINGLE_SPAN_SAMPLING_MAX_PER_SEC from ddtrace.constants import _SINGLE_SPAN_SAMPLING_MAX_PER_SEC_NO_LIMIT from ddtrace.constants import _SINGLE_SPAN_SAMPLING_MECHANISM from ddtrace.constants import _SINGLE_SPAN_SAMPLING_RATE -from ddtrace.constants import SAMPLING_AGENT_DECISION -from ddtrace.constants import SAMPLING_RULE_DECISION from ddtrace.internal.constants import _CATEGORY_TO_PRIORITIES from ddtrace.internal.constants import _KEEP_PRIORITY_INDEX from ddtrace.internal.constants import _REJECT_PRIORITY_INDEX from ddtrace.internal.constants import SAMPLING_DECISION_TRACE_TAG_KEY from ddtrace.internal.glob_matching import GlobMatcher from ddtrace.internal.logger import get_logger -from ddtrace.settings import _config as config +from ddtrace.settings import _global_config as config from .rate_limiter import RateLimiter @@ -282,18 +282,18 @@ def _set_sampling_tags(span, sampled, sample_rate, priority_category): # type: (Span, bool, float, str) -> None mechanism = SamplingMechanism.TRACE_SAMPLING_RULE if priority_category == PriorityCategory.RULE_DEFAULT: - span.set_metric(SAMPLING_RULE_DECISION, sample_rate) + span.set_metric(_SAMPLING_RULE_DECISION, sample_rate) if priority_category == PriorityCategory.RULE_CUSTOMER: - span.set_metric(SAMPLING_RULE_DECISION, sample_rate) + span.set_metric(_SAMPLING_RULE_DECISION, sample_rate) mechanism = SamplingMechanism.REMOTE_USER_RULE if priority_category == PriorityCategory.RULE_DYNAMIC: - span.set_metric(SAMPLING_RULE_DECISION, sample_rate) + span.set_metric(_SAMPLING_RULE_DECISION, sample_rate) mechanism = SamplingMechanism.REMOTE_DYNAMIC_RULE elif priority_category == PriorityCategory.DEFAULT: mechanism = SamplingMechanism.DEFAULT elif priority_category == PriorityCategory.AUTO: mechanism = SamplingMechanism.AGENT_RATE - span.set_metric(SAMPLING_AGENT_DECISION, sample_rate) + span.set_metric(_SAMPLING_AGENT_DECISION, sample_rate) priorities = _CATEGORY_TO_PRIORITIES[priority_category] _set_priority(span, priorities[_KEEP_PRIORITY_INDEX] if sampled else priorities[_REJECT_PRIORITY_INDEX]) set_sampling_decision_maker(span.context, mechanism) diff --git a/ddtrace/internal/schema/processor.py b/ddtrace/internal/schema/processor.py index 33d9b431c1b..91b0feff5dc 100644 --- a/ddtrace/internal/schema/processor.py +++ b/ddtrace/internal/schema/processor.py @@ -1,6 +1,6 @@ from ddtrace import config from ddtrace._trace.processor import TraceProcessor -from ddtrace.constants import BASE_SERVICE_KEY +from ddtrace.constants import _BASE_SERVICE_KEY from . import schematize_service_name @@ -22,4 +22,4 @@ def process_trace(self, trace): return trace def _update_dd_base_service(self, span): - span.set_tag_str(key=BASE_SERVICE_KEY, value=self._global_service) + span.set_tag_str(key=_BASE_SERVICE_KEY, value=self._global_service) diff --git a/ddtrace/internal/utils/http.py b/ddtrace/internal/utils/http.py index 7e85ce01356..f9c13827d3d 100644 --- a/ddtrace/internal/utils/http.py +++ b/ddtrace/internal/utils/http.py @@ -16,7 +16,7 @@ from typing import Tuple # noqa:F401 from typing import Union # noqa:F401 -from ddtrace.constants import USER_ID_KEY +from ddtrace.constants import _USER_ID_KEY from ddtrace.internal import compat from ddtrace.internal._unpatched import unpatched_open as open # noqa: A001 from ddtrace.internal.compat import parse @@ -164,13 +164,13 @@ def w3c_get_dd_list_member(context): "t.dm:{}".format((w3c_encode_tag((_W3C_TRACESTATE_INVALID_CHARS_REGEX_VALUE, "_", sampling_decision)))) ) # since this can change, we need to grab the value off the current span - usr_id = context._meta.get(USER_ID_KEY) + usr_id = context._meta.get(_USER_ID_KEY) if usr_id: tags.append("t.usr.id:{}".format(w3c_encode_tag((_W3C_TRACESTATE_INVALID_CHARS_REGEX_VALUE, "_", usr_id)))) current_tags_len = sum(len(i) for i in tags) for k, v in _get_metas_to_propagate(context): - if k not in [SAMPLING_DECISION_TRACE_TAG_KEY, USER_ID_KEY]: + if k not in [SAMPLING_DECISION_TRACE_TAG_KEY, _USER_ID_KEY]: # for key replace ",", "=", and characters outside the ASCII range 0x20 to 0x7E # for value replace ",", ";", "~" and characters outside the ASCII range 0x20 to 0x7E k = k.replace("_dd.p.", "t.") diff --git a/ddtrace/internal/writer/writer.py b/ddtrace/internal/writer/writer.py index 01b05515984..357fcf3917f 100644 --- a/ddtrace/internal/writer/writer.py +++ b/ddtrace/internal/writer/writer.py @@ -14,11 +14,11 @@ import ddtrace from ddtrace.internal.utils.retry import fibonacci_backoff_with_jitter -from ddtrace.settings import _config as config +from ddtrace.settings import _global_config as config from ddtrace.settings.asm import config as asm_config from ddtrace.vendor.dogstatsd import DogStatsd -from ...constants import KEEP_SPANS_RATE_KEY +from ...constants import _KEEP_SPANS_RATE_KEY from ...internal.utils.formats import parse_tags_str from ...internal.utils.http import Response from ...internal.utils.time import StopWatch @@ -219,7 +219,7 @@ def _set_drop_rate(self) -> None: def _set_keep_rate(self, trace): if trace: - trace[0].set_metric(KEEP_SPANS_RATE_KEY, 1.0 - self._drop_sma.get()) + trace[0].set_metric(_KEEP_SPANS_RATE_KEY, 1.0 - self._drop_sma.get()) def _reset_connection(self) -> None: with self._conn_lck: diff --git a/ddtrace/llmobs/_integrations/base.py b/ddtrace/llmobs/_integrations/base.py index 4d2fbea9767..97be297b7a4 100644 --- a/ddtrace/llmobs/_integrations/base.py +++ b/ddtrace/llmobs/_integrations/base.py @@ -9,13 +9,15 @@ from ddtrace import config from ddtrace._trace.sampler import RateSampler from ddtrace._trace.span import Span -from ddtrace.constants import SPAN_MEASURED_KEY +from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.contrib.internal.trace_utils import int_service from ddtrace.ext import SpanTypes from ddtrace.internal.agent import get_stats_url from ddtrace.internal.dogstatsd import get_dogstatsd_client from ddtrace.internal.hostname import get_hostname from ddtrace.internal.logger import get_logger +from ddtrace.internal.telemetry import telemetry_writer +from ddtrace.internal.telemetry.constants import TELEMETRY_NAMESPACE from ddtrace.internal.utils.formats import asbool from ddtrace.llmobs._llmobs import LLMObs from ddtrace.llmobs._log_writer import V2LogWriter @@ -124,10 +126,19 @@ def trace(self, pin: Pin, operation_id: str, submit_to_llmobs: bool = False, **k span_type=SpanTypes.LLM if (submit_to_llmobs and self.llmobs_enabled) else None, ) # Enable trace metrics for these spans so users can see per-service openai usage in APM. - span.set_tag(SPAN_MEASURED_KEY) + span.set_tag(_SPAN_MEASURED_KEY) self._set_base_span_tags(span, **kwargs) if submit_to_llmobs and self.llmobs_enabled: LLMObs._instance._activate_llmobs_span(span) + telemetry_writer.add_count_metric( + namespace=TELEMETRY_NAMESPACE.MLOBS, + name="span.start", + value=1, + tags=( + ("integration", self._integration_name), + ("autoinstrumented", "true"), + ), + ) return span @classmethod diff --git a/ddtrace/llmobs/_integrations/vertexai.py b/ddtrace/llmobs/_integrations/vertexai.py index 4019268e0c4..933cd685a1f 100644 --- a/ddtrace/llmobs/_integrations/vertexai.py +++ b/ddtrace/llmobs/_integrations/vertexai.py @@ -5,6 +5,7 @@ from typing import Optional from ddtrace import Span +from ddtrace.internal.utils import ArgumentError from ddtrace.internal.utils import get_argument_value from ddtrace.llmobs._constants import INPUT_MESSAGES from ddtrace.llmobs._constants import METADATA @@ -45,7 +46,11 @@ def _llmobs_set_tags( metadata = llmobs_get_metadata_google(kwargs, instance) system_instruction = get_system_instructions_from_google_model(instance) - input_contents = get_argument_value(args, kwargs, 0, "contents") + input_contents = None + try: + input_contents = get_argument_value(args, kwargs, 0, "content") + except ArgumentError: + input_contents = get_argument_value(args, kwargs, 0, "contents") input_messages = self._extract_input_message(input_contents, history, system_instruction) output_messages = [{"content": ""}] diff --git a/ddtrace/llmobs/_llmobs.py b/ddtrace/llmobs/_llmobs.py index 2eb9ea8bbc7..0c4c15a16ce 100644 --- a/ddtrace/llmobs/_llmobs.py +++ b/ddtrace/llmobs/_llmobs.py @@ -27,6 +27,7 @@ from ddtrace.internal.service import ServiceStatusError from ddtrace.internal.telemetry import telemetry_writer from ddtrace.internal.telemetry.constants import TELEMETRY_APM_PRODUCT +from ddtrace.internal.telemetry.constants import TELEMETRY_NAMESPACE from ddtrace.internal.utils.formats import asbool from ddtrace.internal.utils.formats import parse_tags_str from ddtrace.llmobs import _constants as constants @@ -543,6 +544,15 @@ def _start_span( model_provider: Optional[str] = None, ml_app: Optional[str] = None, ) -> Span: + telemetry_writer.add_count_metric( + namespace=TELEMETRY_NAMESPACE.MLOBS, + name="span.start", + value=1, + tags=( + ("autoinstrumented", "false"), + ("kind", operation_kind), + ), + ) if name is None: name = operation_kind span = self.tracer.trace(name, resource=operation_kind, span_type=SpanTypes.LLM) diff --git a/ddtrace/propagation/_database_monitoring.py b/ddtrace/propagation/_database_monitoring.py index 5b585b13210..817d23c4ebf 100644 --- a/ddtrace/propagation/_database_monitoring.py +++ b/ddtrace/propagation/_database_monitoring.py @@ -10,7 +10,7 @@ from ..internal import compat from ..internal.utils import get_argument_value from ..internal.utils import set_argument_value -from ..settings import _config as dd_config +from ..settings import _global_config as dd_config from ..settings._database_monitoring import dbm_config diff --git a/ddtrace/settings/__init__.py b/ddtrace/settings/__init__.py index 2c3a0bf7807..ebbb0c31f7b 100644 --- a/ddtrace/settings/__init__.py +++ b/ddtrace/settings/__init__.py @@ -1,12 +1,12 @@ from .._hooks import Hooks -from .config import Config +from ._config import Config from .exceptions import ConfigException from .http import HttpConfig from .integration import IntegrationConfig # Default global config -_config = Config() +_global_config = Config() __all__ = [ "Config", diff --git a/ddtrace/settings/_config.py b/ddtrace/settings/_config.py new file mode 100644 index 00000000000..df3fe4177d1 --- /dev/null +++ b/ddtrace/settings/_config.py @@ -0,0 +1,1025 @@ +from copy import deepcopy +import json +import os +import re +import sys +from typing import Any # noqa:F401 +from typing import Callable # noqa:F401 +from typing import Dict # noqa:F401 +from typing import List # noqa:F401 +from typing import Optional # noqa:F401 +from typing import Tuple # noqa:F401 +from typing import Union # noqa:F401 + +from ddtrace.internal._file_queue import File_Queue +from ddtrace.internal.serverless import in_azure_function +from ddtrace.internal.serverless import in_gcp_function +from ddtrace.internal.telemetry import telemetry_writer +from ddtrace.internal.utils.cache import cachedmethod +from ddtrace.internal.utils.deprecations import DDTraceDeprecationWarning +from ddtrace.vendor.debtcollector import deprecate + +from .._trace.pin import Pin +from ..internal import gitmetadata +from ..internal.constants import _PROPAGATION_BEHAVIOR_DEFAULT +from ..internal.constants import _PROPAGATION_BEHAVIOR_IGNORE +from ..internal.constants import _PROPAGATION_STYLE_DEFAULT +from ..internal.constants import _PROPAGATION_STYLE_NONE +from ..internal.constants import DEFAULT_BUFFER_SIZE +from ..internal.constants import DEFAULT_MAX_PAYLOAD_SIZE +from ..internal.constants import DEFAULT_PROCESSING_INTERVAL +from ..internal.constants import DEFAULT_REUSE_CONNECTIONS +from ..internal.constants import DEFAULT_SAMPLING_RATE_LIMIT +from ..internal.constants import DEFAULT_TIMEOUT +from ..internal.constants import PROPAGATION_STYLE_ALL +from ..internal.constants import PROPAGATION_STYLE_B3_SINGLE +from ..internal.logger import get_logger +from ..internal.schema import DEFAULT_SPAN_SERVICE_NAME +from ..internal.serverless import in_aws_lambda +from ..internal.utils.formats import asbool +from ..internal.utils.formats import parse_tags_str +from ._core import get_config as _get_config +from ._inferred_base_service import detect_service +from ._otel_remapper import otel_remapping as _otel_remapping +from .endpoint_config import fetch_config_from_endpoint +from .http import HttpConfig +from .integration import IntegrationConfig + + +if sys.version_info >= (3, 8): + from typing import Literal # noqa:F401 +else: + from typing_extensions import Literal + + +log = get_logger(__name__) + +ENDPOINT_FETCHED_CONFIG = fetch_config_from_endpoint() + +DD_TRACE_OBFUSCATION_QUERY_STRING_REGEXP_DEFAULT = ( + r"(?ix)" + r"(?:" # JSON-ish leading quote + r'(?:"|%22)?' + r")" + r"(?:" # common keys" + r"(?:old[-_]?|new[-_]?)?p(?:ass)?w(?:or)?d(?:1|2)?" # pw, password variants + r"|pass(?:[-_]?phrase)?" # pass, passphrase variants + r"|secret" + r"|(?:" # key, key_id variants + r"api[-_]?" + r"|private[-_]?" + r"|public[-_]?" + r"|access[-_]?" + r"|secret[-_]?" + r"|app(?:lica" + r"tion)?[-_]?" + r")key(?:[-_]?id)?" + r"|token" + r"|consumer[-_]?(?:id|key|secret)" + r"|sign(?:ed|ature)?" + r"|auth(?:entication|orization)?" + r")" + r"(?:" + # '=' query string separator, plus value til next '&' separator + r"(?:\s|%20)*(?:=|%3D)[^&]+" + # JSON-ish '": "somevalue"', key being handled with case above, without the opening '"' + r'|(?:"|%22)' # closing '"' at end of key + r"(?:\s|%20)*(?::|%3A)(?:\s|%20)*" # ':' key-value separator, with surrounding spaces + r'(?:"|%22)' # opening '"' at start of value + r'(?:%2[^2]|%[^2]|[^"%])+' # value + r'(?:"|%22)' # closing '"' at end of value + r")" + r"|(?:" # other common secret values + r" bearer(?:\s|%20)+[a-z0-9._\-]+" + r"|token(?::|%3A)[a-z0-9]{13}" + r"|gh[opsu]_[0-9a-zA-Z]{36}" + r"|ey[I-L](?:[\w=-]|%3D)+\.ey[I-L](?:[\w=-]|%3D)+(?:\.(?:[\w.+/=-]|%3D|%2F|%2B)+)?" + r"|-{5}BEGIN(?:[a-z\s]|%20)+PRIVATE(?:\s|%20)KEY-{5}[^\-]+-{5}END" + r"(?:[a-z\s]|%20)+PRIVATE(?:\s|%20)KEY(?:-{5})?(?:\n|%0A)?" + r"|(?:ssh-(?:rsa|dss)|ecdsa-[a-z0-9]+-[a-z0-9]+)(?:\s|%20|%09)+(?:[a-z0-9/.+]" + r"|%2F|%5C|%2B){100,}(?:=|%3D)*(?:(?:\s|%20|%09)+[a-z0-9._-]+)?" + r")" +) + + +def _parse_propagation_styles(styles_str): + # type: (str) -> Optional[List[str]] + """Helper to parse http propagation extract/inject styles via env variables. + + The expected format is:: + + + + + + + +
+

IndexError + at /

+
No exception message supplied
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Request Method:GET
Request URL:http://localhost:8000/
Django Version:5.1.5
Exception Type:IndexError
Exception Location:/home/foobaruser/sources/minimal-django-example/app.py, line 20, in index_view
Raised during:__main__.index_view
Python Executable:/home/foobaruser/.pyenv/versions/testsca/bin/python
Python Version:3.12.5
Python Path:
['/home/foobaruser/sources/minimal-django-example',
+ '/home/foobaruser/.pyenv/versions/3.12.5/lib/python312.zip',
+ '/home/foobaruser/.pyenv/versions/3.12.5/lib/python3.12',
+ '/home/foobaruser/.pyenv/versions/3.12.5/lib/python3.12/lib-dynload',
+ '/home/foobaruser/.pyenv/versions/testsca/lib/python3.12/site-packages']
Server time:Mon, 20 Jan 2025 04:38:00 -0600
+
+ +
+ + + + +
+

Traceback + Switch to copy-and-paste view +

+
+
    + + +
  • + + /home/foobaruser/.pyenv/versions/testsca/lib/python3.12/site-packages/django/core/handlers/exception.py, line 55, in inner + + + +
    + +
      + +
    1. + +
    2.         return inner
    3. + +
    4.     else:
    5. + +
    6. + +
    7.         @wraps(get_response)
    8. + +
    9.         def inner(request):
    10. + +
    11.             try:
    12. + +
    + +
      +
    1.                 response = get_response(request)
      +                               ^^^^^^^^^^^^^^^^^^^^^
      …
    2. +
    + +
      + +
    1.             except Exception as exc:
    2. + +
    3.                 response = response_for_exception(request, exc)
    4. + +
    5.             return response
    6. + +
    7. + +
    8.         return inner
    9. + +
    10. + +
    + +
    + + + + +
    + Local vars + + + + + + + + + + + + + + + + + + + + + + + + + + +
    VariableValue
    exc
    IndexError()
    get_response
    <bound method BaseHandler._get_response of <django.core.handlers.wsgi.WSGIHandler object at 0x739fa54fbf20>>
    request
    <WSGIRequest: GET '/'>
    +
    + +
  • + + +
  • + + /home/foobaruser/.pyenv/versions/testsca/lib/python3.12/site-packages/django/core/handlers/base.py, line 197, in _get_response + + + +
    + +
      + +
    1. + +
    2.         if response is None:
    3. + +
    4.             wrapped_callback = self.make_view_atomic(callback)
    5. + +
    6.             # If it is an asynchronous view, run it in a subthread.
    7. + +
    8.             if iscoroutinefunction(wrapped_callback):
    9. + +
    10.                 wrapped_callback = async_to_sync(wrapped_callback)
    11. + +
    12.             try:
    13. + +
    + +
      +
    1.                 response = wrapped_callback(request, *callback_args, **callback_kwargs)
      +                                ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
      …
    2. +
    + +
      + +
    1.             except Exception as e:
    2. + +
    3.                 response = self.process_exception_by_middleware(e, request)
    4. + +
    5.                 if response is None:
    6. + +
    7.                     raise
    8. + +
    9. + +
    10.         # Complain if the view returned None (a common error).
    11. + +
    + +
    + + + + +
    + Local vars + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    VariableValue
    callback
    <function index_view at 0x739fa76b4d60>
    callback_args
    ()
    callback_kwargs
    {}
    request
    <WSGIRequest: GET '/'>
    response
    None
    self
    <django.core.handlers.wsgi.WSGIHandler object at 0x739fa54fbf20>
    wrapped_callback
    <function index_view at 0x739fa76b4d60>
    +
    + +
  • + + +
  • + + /home/foobaruser/sources/minimal-django-example/app.py, line 20, in index_view + + + +
    + +
      + +
    1.             "DIRS": ["templates"],
    2. + +
    3.         },
    4. + +
    5.     ],
    6. + +
    7. )
    8. + +
    9. + +
    10. + +
    11. def index_view(request):
    12. + +
    + +
      +
    1.     raise IndexError()
      +        ^^^^^^^^^^^^^^^^^^
      …
    2. +
    + +
      + +
    1.     return HttpResponse("<h1>Hello World From Django!</h1>")
    2. + +
    3. + +
    4. + +
    5. def hello_view(request, name):
    6. + +
    7.     return render(request, "template.html", {"name": name})
    8. + +
    9. + +
    + +
    + + + + +
    + Local vars + + + + + + + + + + + + + + + + +
    VariableValue
    request
    <WSGIRequest: GET '/'>
    +
    + +
  • + +
+
+ +
+
+ + + + + +

+ +
+
+ +
+ + +
+

Request information

+ + + +

USER

+

[unable to retrieve the current user]

+ + +

GET

+ +

No GET data

+ + +

POST

+ +

No POST data

+ + +

FILES

+ +

No FILES data

+ + + + + + + + + + + + + + + + + + + +
VariableValue
csrftoken
'********************'
+ + +

META

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
VariableValue
AWS_ASSUME_ROLE_TTL
'1h'
AWS_SESSION_TTL
'24h'
AWS_VAULT_BACKEND
'secret-service'
AWS_VAULT_KEYCHAIN_NAME
'********************'
COLORTERM
'truecolor'
CONTENT_LENGTH
''
CONTENT_TYPE
'text/plain'
DBUS_SESSION_BUS_ADDRESS
'unix:path=/run/user/1000/bus,guid=c3ac961ccc4c263877782e00678e0c9e'
DBUS_STARTER_ADDRESS
'unix:path=/run/user/1000/bus,guid=c3ac961ccc4c263877782e00678e0c9e'
DBUS_STARTER_BUS_TYPE
'session'
DESKTOP_SESSION
'ubuntu'
DISPLAY
':0'
EDITOR
'vim'
EMAIL
'Juanjo Alvarez <juanjo@juanjoalvarez.net>'
GATEWAY_INTERFACE
'CGI/1.1'
GDMSESSION
'ubuntu'
GITLAB_TOKEN
'********************'
GNOME_DESKTOP_SESSION_ID
'this-is-deprecated'
GNOME_SETUP_DISPLAY
':1'
GNOME_SHELL_SESSION_MODE
'ubuntu'
GOROOT
'/home/foobaruser/go'
GTK_MODULES
'gail:atk-bridge'
HDIV_PHP_EXTENSION_PATH
'/home/foobaruser/php/hdiv-php-extension/'
HOME
'/home/foobaruser'
HTTP_ACCEPT
'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7'
HTTP_ACCEPT_ENCODING
'gzip, deflate, br, zstd'
HTTP_ACCEPT_LANGUAGE
'en-US,en;q=0.9'
HTTP_CACHE_CONTROL
'max-age=0'
HTTP_CONNECTION
'keep-alive'
HTTP_COOKIE
'********************'
HTTP_DNT
'1'
HTTP_HOST
'localhost:8000'
HTTP_SEC_CH_UA
'"Google Chrome";v="131", "Chromium";v="131", "Not_A Brand";v="24"'
HTTP_SEC_CH_UA_MOBILE
'?0'
HTTP_SEC_CH_UA_PLATFORM
'"Linux"'
HTTP_SEC_FETCH_DEST
'document'
HTTP_SEC_FETCH_MODE
'navigate'
HTTP_SEC_FETCH_SITE
'none'
HTTP_SEC_FETCH_USER
'?1'
HTTP_UPGRADE_INSECURE_REQUESTS
'1'
HTTP_USER_AGENT
('Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) '
+ 'Chrome/131.0.0.0 Safari/537.36')
IM_CONFIG_CHECK_ENV
'1'
IM_CONFIG_PHASE
'1'
INVOCATION_ID
'161a1822b4c24d129b1f0e3ef745a30f'
JOURNAL_STREAM
'8:61973'
LANG
'en_US.UTF-8'
LC_ADDRESS
'es_ES.UTF-8'
LC_IDENTIFICATION
'es_ES.UTF-8'
LC_MEASUREMENT
'es_ES.UTF-8'
LC_MONETARY
'es_ES.UTF-8'
LC_NAME
'es_ES.UTF-8'
LC_NUMERIC
'es_ES.UTF-8'
LC_PAPER
'es_ES.UTF-8'
LC_TELEPHONE
'es_ES.UTF-8'
LC_TIME
'es_ES.UTF-8'
LOGNAME
'foobaruser'
LS_COLORS
''
LS_OPTIONS
'-N --color=auto -h'
MANAGERPID
'4414'
MANPATH
':/opt/puppetlabs/puppet/share/man'
OMF_CONFIG
'/home/foobaruser/.config/omf'
OMF_PATH
'/home/foobaruser/.local/share/omf'
PATH
'/home/foobaruser/.pyenv/versions/testsca/bin:/home/foobaruser/.pyenv/libexec:/home/foobaruser/.pyenv/plugins/python-build/bin:/home/foobaruser/.pyenv/plugins/pyenv-virtualenv/bin:/home/foobaruser/.pyenv/plugins/pyenv-update/bin:/home/foobaruser/.pyenv/plugins/pyenv-installer/bin:/home/foobaruser/.pyenv/plugins/pyenv-doctor/bin:/home/foobaruser/.pyenv/plugins/pyenv-virtualenv/shims:/home/foobaruser/.pyenv/shims:/home/foobaruser/.pyenv/bin:/home/foobaruser/pyenv/bin:/home/foobaruser/.local/bin:/home/foobaruser/sources/graalvm-ce-java11-20.1.0/bin:/home/foobaruser/.fzf/bin:/home/foobaruser/.cargo/bin:/home/foobaruser/.krew/bin:/home/foobaruser/.tfenv/bin:/home/foobaruser/dd/devtools/bin:/usr/local/bin:/home/foobaruser/.local/bin:/home/foobaruser/.krew/bin:/home/foobaruser/.pyenv/bin:/home/foobaruser/.tfenv/bin:/home/foobaruser/dd/devtools/bin:/usr/local/bin:/home/foobaruser/.yarn/bin:/home/foobaruser/.config/yarn/global/node_modules/.bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/usr/local/games:/snap/bin:/snap/bin:/opt/puppetlabs/bin:/home/foobaruser/bin:/home/foobaruser/sync/work/d/dmd2:/home/foobaruser/sources/nim/bin:/home/foobaruser/.nimble/bin'
PATH_INFO
'/'
PWD
'/home/foobaruser/sources/minimal-django-example'
PYENV_DIR
'/home/foobaruser/sources/minimal-django-example'
PYENV_HOOK_PATH
'/home/foobaruser/.pyenv/pyenv.d:/usr/etc/pyenv.d:/usr/local/etc/pyenv.d:/etc/pyenv.d:/usr/lib/pyenv/hooks:/home/foobaruser/.pyenv/plugins/pyenv-virtualenv/etc/pyenv.d:/home/foobaruser/.pyenv/plugins/pyenv-which-ext/etc/pyenv.d'
PYENV_ROOT
'/home/foobaruser/.pyenv'
PYENV_SHELL
'fish'
PYENV_VERSION
'testsca'
PYENV_VIRTUALENV_INIT
'1'
PYENV_VIRTUAL_ENV
'/home/foobaruser/.pyenv/versions/3.12.5/envs/testsca'
QT_ACCESSIBILITY
'1'
QT_IM_MODULE
'ibus'
QUERY_STRING
''
REMOTE_ADDR
'127.0.0.1'
REMOTE_HOST
''
REQUEST_METHOD
'GET'
RSYNC_PASSWORD
'********************'
RUN_MAIN
'true'
SCRIPT_NAME
''
SERVER_NAME
'localhost'
SERVER_PORT
'8000'
SERVER_PROTOCOL
'HTTP/1.1'
SERVER_SOFTWARE
'WSGIServer/0.2'
SESSION_MANAGER
'local/foobaruser-ThinkPad-P15v-Gen-2i:@/tmp/.ICE-unix/5619,unix/foobaruser-ThinkPad-P15v-Gen-2i:/tmp/.ICE-unix/5619'
SHELL
'/bin/bash'
SHLVL
'1'
SSH_AGENT_LAUNCHER
'gnome-keyring'
SSH_AUTH_SOCK
'/run/user/1000/keyring/ssh'
SYSTEMD_EXEC_PID
'5619'
TERM
'xterm-256color'
TILIX_ID
'62221076-149e-426c-bba3-cf14d3a9099a'
USER
'foobaruser'
USERNAME
'foobaruser'
VIRTUAL_ENV
'/home/foobaruser/.pyenv/versions/3.12.5/envs/testsca'
VTE_VERSION
'6800'
WAYLAND_DISPLAY
'wayland-0'
XAUTHORITY
'/run/user/1000/.mutter-Xwaylandauth.50H8Z2'
XDG_CONFIG_DIRS
'/etc/xdg/xdg-ubuntu:/etc/xdg'
XDG_CURRENT_DESKTOP
'ubuntu:GNOME'
XDG_DATA_DIRS
'/usr/share/ubuntu:/home/foobaruser/.local/share/flatpak/exports/share:/var/lib/flatpak/exports/share:/usr/local/share/:/usr/share/:/var/lib/snapd/desktop'
XDG_MENU_PREFIX
'gnome-'
XDG_RUNTIME_DIR
'/run/user/1000'
XDG_SESSION_CLASS
'user'
XDG_SESSION_DESKTOP
'ubuntu'
XDG_SESSION_TYPE
'wayland'
XMODIFIERS
'@im=ibus'
wsgi.errors
<_io.TextIOWrapper name='<stderr>' mode='w' encoding='utf-8'>
wsgi.file_wrapper
<class 'wsgiref.util.FileWrapper'>
wsgi.input
<django.core.handlers.wsgi.LimitedStream object at 0x739fa53ad4b0>
wsgi.multiprocess
False
wsgi.multithread
True
wsgi.run_once
False
wsgi.url_scheme
'http'
wsgi.version
(1, 0)
+ + +

Settings

+

Using settings module

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
SettingValue
ABSOLUTE_URL_OVERRIDES
{}
ADMINS
[]
ALLOWED_HOSTS
[]
APPEND_SLASH
True
AUTHENTICATION_BACKENDS
['django.contrib.auth.backends.ModelBackend']
AUTH_PASSWORD_VALIDATORS
'********************'
AUTH_USER_MODEL
'auth.User'
CACHES
{'default': {'BACKEND': 'django.core.cache.backends.locmem.LocMemCache'}}
CACHE_MIDDLEWARE_ALIAS
'default'
CACHE_MIDDLEWARE_KEY_PREFIX
'********************'
CACHE_MIDDLEWARE_SECONDS
600
CSRF_COOKIE_AGE
31449600
CSRF_COOKIE_DOMAIN
None
CSRF_COOKIE_HTTPONLY
False
CSRF_COOKIE_NAME
'csrftoken'
CSRF_COOKIE_PATH
'/'
CSRF_COOKIE_SAMESITE
'Lax'
CSRF_COOKIE_SECURE
False
CSRF_FAILURE_VIEW
'django.views.csrf.csrf_failure'
CSRF_HEADER_NAME
'HTTP_X_CSRFTOKEN'
CSRF_TRUSTED_ORIGINS
[]
CSRF_USE_SESSIONS
False
DATABASES
{'default': {'ATOMIC_REQUESTS': False,
+             'AUTOCOMMIT': True,
+             'CONN_HEALTH_CHECKS': False,
+             'CONN_MAX_AGE': 0,
+             'ENGINE': 'django.db.backends.dummy',
+             'HOST': '',
+             'NAME': '',
+             'OPTIONS': {},
+             'PASSWORD': '********************',
+             'PORT': '',
+             'TEST': {'CHARSET': None,
+                      'COLLATION': None,
+                      'MIGRATE': True,
+                      'MIRROR': None,
+                      'NAME': None},
+             'TIME_ZONE': None,
+             'USER': ''}}
DATABASE_ROUTERS
[]
DATA_UPLOAD_MAX_MEMORY_SIZE
2621440
DATA_UPLOAD_MAX_NUMBER_FIELDS
1000
DATA_UPLOAD_MAX_NUMBER_FILES
100
DATETIME_FORMAT
'N j, Y, P'
DATETIME_INPUT_FORMATS
['%Y-%m-%d %H:%M:%S',
+ '%Y-%m-%d %H:%M:%S.%f',
+ '%Y-%m-%d %H:%M',
+ '%m/%d/%Y %H:%M:%S',
+ '%m/%d/%Y %H:%M:%S.%f',
+ '%m/%d/%Y %H:%M',
+ '%m/%d/%y %H:%M:%S',
+ '%m/%d/%y %H:%M:%S.%f',
+ '%m/%d/%y %H:%M']
DATE_FORMAT
'N j, Y'
DATE_INPUT_FORMATS
['%Y-%m-%d',
+ '%m/%d/%Y',
+ '%m/%d/%y',
+ '%b %d %Y',
+ '%b %d, %Y',
+ '%d %b %Y',
+ '%d %b, %Y',
+ '%B %d %Y',
+ '%B %d, %Y',
+ '%d %B %Y',
+ '%d %B, %Y']
DEBUG
True
DEBUG_PROPAGATE_EXCEPTIONS
False
DECIMAL_SEPARATOR
'.'
DEFAULT_AUTO_FIELD
'django.db.models.AutoField'
DEFAULT_CHARSET
'utf-8'
DEFAULT_EXCEPTION_REPORTER
'django.views.debug.ExceptionReporter'
DEFAULT_EXCEPTION_REPORTER_FILTER
'django.views.debug.SafeExceptionReporterFilter'
DEFAULT_FROM_EMAIL
'webmaster@localhost'
DEFAULT_INDEX_TABLESPACE
''
DEFAULT_TABLESPACE
''
DISALLOWED_USER_AGENTS
[]
EMAIL_BACKEND
'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST
'localhost'
EMAIL_HOST_PASSWORD
'********************'
EMAIL_HOST_USER
''
EMAIL_PORT
25
EMAIL_SSL_CERTFILE
None
EMAIL_SSL_KEYFILE
'********************'
EMAIL_SUBJECT_PREFIX
'[Django] '
EMAIL_TIMEOUT
None
EMAIL_USE_LOCALTIME
False
EMAIL_USE_SSL
False
EMAIL_USE_TLS
False
FILE_UPLOAD_DIRECTORY_PERMISSIONS
None
FILE_UPLOAD_HANDLERS
['django.core.files.uploadhandler.MemoryFileUploadHandler',
+ 'django.core.files.uploadhandler.TemporaryFileUploadHandler']
FILE_UPLOAD_MAX_MEMORY_SIZE
2621440
FILE_UPLOAD_PERMISSIONS
420
FILE_UPLOAD_TEMP_DIR
None
FIRST_DAY_OF_WEEK
0
FIXTURE_DIRS
[]
FORCE_SCRIPT_NAME
None
FORMAT_MODULE_PATH
None
FORMS_URLFIELD_ASSUME_HTTPS
False
FORM_RENDERER
'django.forms.renderers.DjangoTemplates'
IGNORABLE_404_URLS
[]
INSTALLED_APPS
[]
INTERNAL_IPS
[]
LANGUAGES
[('af', 'Afrikaans'),
+ ('ar', 'Arabic'),
+ ('ar-dz', 'Algerian Arabic'),
+ ('ast', 'Asturian'),
+ ('az', 'Azerbaijani'),
+ ('bg', 'Bulgarian'),
+ ('be', 'Belarusian'),
+ ('bn', 'Bengali'),
+ ('br', 'Breton'),
+ ('bs', 'Bosnian'),
+ ('ca', 'Catalan'),
+ ('ckb', 'Central Kurdish (Sorani)'),
+ ('cs', 'Czech'),
+ ('cy', 'Welsh'),
+ ('da', 'Danish'),
+ ('de', 'German'),
+ ('dsb', 'Lower Sorbian'),
+ ('el', 'Greek'),
+ ('en', 'English'),
+ ('en-au', 'Australian English'),
+ ('en-gb', 'British English'),
+ ('eo', 'Esperanto'),
+ ('es', 'Spanish'),
+ ('es-ar', 'Argentinian Spanish'),
+ ('es-co', 'Colombian Spanish'),
+ ('es-mx', 'Mexican Spanish'),
+ ('es-ni', 'Nicaraguan Spanish'),
+ ('es-ve', 'Venezuelan Spanish'),
+ ('et', 'Estonian'),
+ ('eu', 'Basque'),
+ ('fa', 'Persian'),
+ ('fi', 'Finnish'),
+ ('fr', 'French'),
+ ('fy', 'Frisian'),
+ ('ga', 'Irish'),
+ ('gd', 'Scottish Gaelic'),
+ ('gl', 'Galician'),
+ ('he', 'Hebrew'),
+ ('hi', 'Hindi'),
+ ('hr', 'Croatian'),
+ ('hsb', 'Upper Sorbian'),
+ ('hu', 'Hungarian'),
+ ('hy', 'Armenian'),
+ ('ia', 'Interlingua'),
+ ('id', 'Indonesian'),
+ ('ig', 'Igbo'),
+ ('io', 'Ido'),
+ ('is', 'Icelandic'),
+ ('it', 'Italian'),
+ ('ja', 'Japanese'),
+ ('ka', 'Georgian'),
+ ('kab', 'Kabyle'),
+ ('kk', 'Kazakh'),
+ ('km', 'Khmer'),
+ ('kn', 'Kannada'),
+ ('ko', 'Korean'),
+ ('ky', 'Kyrgyz'),
+ ('lb', 'Luxembourgish'),
+ ('lt', 'Lithuanian'),
+ ('lv', 'Latvian'),
+ ('mk', 'Macedonian'),
+ ('ml', 'Malayalam'),
+ ('mn', 'Mongolian'),
+ ('mr', 'Marathi'),
+ ('ms', 'Malay'),
+ ('my', 'Burmese'),
+ ('nb', 'Norwegian Bokmål'),
+ ('ne', 'Nepali'),
+ ('nl', 'Dutch'),
+ ('nn', 'Norwegian Nynorsk'),
+ ('os', 'Ossetic'),
+ ('pa', 'Punjabi'),
+ ('pl', 'Polish'),
+ ('pt', 'Portuguese'),
+ ('pt-br', 'Brazilian Portuguese'),
+ ('ro', 'Romanian'),
+ ('ru', 'Russian'),
+ ('sk', 'Slovak'),
+ ('sl', 'Slovenian'),
+ ('sq', 'Albanian'),
+ ('sr', 'Serbian'),
+ ('sr-latn', 'Serbian Latin'),
+ ('sv', 'Swedish'),
+ ('sw', 'Swahili'),
+ ('ta', 'Tamil'),
+ ('te', 'Telugu'),
+ ('tg', 'Tajik'),
+ ('th', 'Thai'),
+ ('tk', 'Turkmen'),
+ ('tr', 'Turkish'),
+ ('tt', 'Tatar'),
+ ('udm', 'Udmurt'),
+ ('ug', 'Uyghur'),
+ ('uk', 'Ukrainian'),
+ ('ur', 'Urdu'),
+ ('uz', 'Uzbek'),
+ ('vi', 'Vietnamese'),
+ ('zh-hans', 'Simplified Chinese'),
+ ('zh-hant', 'Traditional Chinese')]
LANGUAGES_BIDI
['he', 'ar', 'ar-dz', 'ckb', 'fa', 'ug', 'ur']
LANGUAGE_CODE
'en-us'
LANGUAGE_COOKIE_AGE
None
LANGUAGE_COOKIE_DOMAIN
None
LANGUAGE_COOKIE_HTTPONLY
False
LANGUAGE_COOKIE_NAME
'django_language'
LANGUAGE_COOKIE_PATH
'/'
LANGUAGE_COOKIE_SAMESITE
None
LANGUAGE_COOKIE_SECURE
False
LOCALE_PATHS
[]
LOGGING
{}
LOGGING_CONFIG
'logging.config.dictConfig'
LOGIN_REDIRECT_URL
'/accounts/profile/'
LOGIN_URL
'/accounts/login/'
LOGOUT_REDIRECT_URL
None
MANAGERS
[]
MEDIA_ROOT
''
MEDIA_URL
'/'
MESSAGE_STORAGE
'django.contrib.messages.storage.fallback.FallbackStorage'
MIDDLEWARE
[]
MIGRATION_MODULES
{}
MONTH_DAY_FORMAT
'F j'
NUMBER_GROUPING
0
PASSWORD_HASHERS
'********************'
PASSWORD_RESET_TIMEOUT
'********************'
PREPEND_WWW
False
ROOT_URLCONF
'__main__'
SECRET_KEY
'********************'
SECRET_KEY_FALLBACKS
'********************'
SECURE_CONTENT_TYPE_NOSNIFF
True
SECURE_CROSS_ORIGIN_OPENER_POLICY
'same-origin'
SECURE_HSTS_INCLUDE_SUBDOMAINS
False
SECURE_HSTS_PRELOAD
False
SECURE_HSTS_SECONDS
0
SECURE_PROXY_SSL_HEADER
None
SECURE_REDIRECT_EXEMPT
[]
SECURE_REFERRER_POLICY
'same-origin'
SECURE_SSL_HOST
None
SECURE_SSL_REDIRECT
False
SERVER_EMAIL
'root@localhost'
SESSION_CACHE_ALIAS
'default'
SESSION_COOKIE_AGE
1209600
SESSION_COOKIE_DOMAIN
None
SESSION_COOKIE_HTTPONLY
True
SESSION_COOKIE_NAME
'sessionid'
SESSION_COOKIE_PATH
'/'
SESSION_COOKIE_SAMESITE
'Lax'
SESSION_COOKIE_SECURE
False
SESSION_ENGINE
'django.contrib.sessions.backends.db'
SESSION_EXPIRE_AT_BROWSER_CLOSE
False
SESSION_FILE_PATH
None
SESSION_SAVE_EVERY_REQUEST
False
SESSION_SERIALIZER
'django.contrib.sessions.serializers.JSONSerializer'
SHORT_DATETIME_FORMAT
'm/d/Y P'
SHORT_DATE_FORMAT
'm/d/Y'
SIGNING_BACKEND
'django.core.signing.TimestampSigner'
SILENCED_SYSTEM_CHECKS
[]
STATICFILES_DIRS
[]
STATICFILES_FINDERS
['django.contrib.staticfiles.finders.FileSystemFinder',
+ 'django.contrib.staticfiles.finders.AppDirectoriesFinder']
STATIC_ROOT
None
STATIC_URL
None
STORAGES
{'default': {'BACKEND': 'django.core.files.storage.FileSystemStorage'},
+ 'staticfiles': {'BACKEND': 'django.contrib.staticfiles.storage.StaticFilesStorage'}}
TEMPLATES
[{'BACKEND': 'django.template.backends.django.DjangoTemplates',
+  'DIRS': ['templates']}]
TEST_NON_SERIALIZED_APPS
[]
TEST_RUNNER
'django.test.runner.DiscoverRunner'
THOUSAND_SEPARATOR
','
TIME_FORMAT
'P'
TIME_INPUT_FORMATS
['%H:%M:%S', '%H:%M:%S.%f', '%H:%M']
TIME_ZONE
'America/Chicago'
USE_I18N
True
USE_THOUSAND_SEPARATOR
False
USE_TZ
True
USE_X_FORWARDED_HOST
False
USE_X_FORWARDED_PORT
False
WSGI_APPLICATION
None
X_FRAME_OPTIONS
'DENY'
YEAR_MONTH_FORMAT
'F Y'
+ +
+
+ + +
+

+ You’re seeing this error because you have DEBUG = True in your + Django settings file. Change that to False, and Django will + display a standard page generated by the handler for this status code. +

+
+ + + diff --git a/tests/appsec/iast/fixtures/plain_stacktrace.txt b/tests/appsec/iast/fixtures/plain_stacktrace.txt new file mode 100644 index 00000000000..be648d208ae --- /dev/null +++ b/tests/appsec/iast/fixtures/plain_stacktrace.txt @@ -0,0 +1,35 @@ +Environment: + + +Request Method: GET +Request URL: http://localhost:8000/ + +Django Version: 5.1.5 +Python Version: 3.12.5 +Installed Applications: +[] +Installed Middleware: +[] + +Traceback (most recent call last): + File "/usr/local/lib/python3.9/site-packages/some_module.py", line 42, in process_data + result = complex_calculation(data) + File "/usr/local/lib/python3.9/site-packages/another_module.py", line 158, in complex_calculation + intermediate = perform_subtask(data_slice) + File "/usr/local/lib/python3.9/site-packages/subtask_module.py", line 27, in perform_subtask + processed = handle_special_case(data_slice) + File "/usr/local/lib/python3.9/site-packages/special_cases.py", line 84, in handle_special_case + return apply_algorithm(data_slice, params) + File "/usr/local/lib/python3.9/site-packages/algorithm_module.py", line 112, in apply_algorithm + step_result = execute_step(data, params) + File "/usr/local/lib/python3.9/site-packages/step_execution.py", line 55, in execute_step + temp = pre_process(data) + File "/usr/local/lib/python3.9/site-packages/pre_processing.py", line 33, in pre_process + validated_data = validate_input(data) + File "/usr/local/lib/python3.9/site-packages/validation.py", line 66, in validate_input + check_constraints(data) + File "/usr/local/lib/python3.9/site-packages/constraints.py", line 19, in check_constraints + raise ValueError("Constraint violation at step 9") +ValueError: Constraint violation at step 9 + +Lorem Ipsum Foobar diff --git a/tests/appsec/iast/taint_sinks/test_sql_injection_redacted.py b/tests/appsec/iast/taint_sinks/test_sql_injection_redacted.py index 01645cf1d39..e00af701427 100644 --- a/tests/appsec/iast/taint_sinks/test_sql_injection_redacted.py +++ b/tests/appsec/iast/taint_sinks/test_sql_injection_redacted.py @@ -29,7 +29,7 @@ list(get_parametrize(VULN_SQL_INJECTION, ignore_list=_ignore_list)), ) def test_sqli_redaction_suite(evidence_input, sources_expected, vulnerabilities_expected, iast_context_defaults): - with override_global_config(dict(_deduplication_enabled=False)): + with override_global_config(dict(_iast_deduplication_enabled=False)): tainted_object = _taint_pyobject_multiranges( evidence_input["value"], [ diff --git a/tests/appsec/iast/taint_sinks/test_stacktrace_leak.py b/tests/appsec/iast/taint_sinks/test_stacktrace_leak.py new file mode 100644 index 00000000000..45c40f43df7 --- /dev/null +++ b/tests/appsec/iast/taint_sinks/test_stacktrace_leak.py @@ -0,0 +1,39 @@ +import os + +from ddtrace.appsec._iast.constants import VULN_STACKTRACE_LEAK +from ddtrace.appsec._iast.taint_sinks.stacktrace_leak import asm_check_stacktrace_leak +from tests.appsec.iast.taint_sinks.conftest import _get_span_report + + +def _load_html_django_stacktrace(): + return open(os.path.join(os.path.dirname(__file__), "../fixtures/django_debug_page.html")).read() + + +def _load_text_stacktrace(): + return open(os.path.join(os.path.dirname(__file__), "../fixtures/plain_stacktrace.txt")).read() + + +def test_asm_check_stacktrace_leak_html(iast_context_defaults): + asm_check_stacktrace_leak(_load_html_django_stacktrace()) + span_report = _get_span_report() + vulnerabilities = list(span_report.vulnerabilities) + vulnerabilities_types = [vuln.type for vuln in vulnerabilities] + assert len(vulnerabilities) == 1 + assert VULN_STACKTRACE_LEAK in vulnerabilities_types + assert ( + vulnerabilities[0].evidence.value + == 'Module: ".home.foobaruser.sources.minimal-django-example.app.py"\nException: IndexError' + ) + + +def test_asm_check_stacktrace_leak_text(iast_context_defaults): + asm_check_stacktrace_leak(_load_text_stacktrace()) + span_report = _get_span_report() + vulnerabilities = list(span_report.vulnerabilities) + vulnerabilities_types = [vuln.type for vuln in vulnerabilities] + assert len(vulnerabilities) == 1 + assert VULN_STACKTRACE_LEAK in vulnerabilities_types + assert ( + vulnerabilities[0].evidence.value + == 'Module: ".usr.local.lib.python3.9.site-packages.constraints.py"\nException: ValueError' + ) diff --git a/tests/appsec/iast/taint_tracking/conftest.py b/tests/appsec/iast/taint_tracking/conftest.py index b08bb398a27..831506e8137 100644 --- a/tests/appsec/iast/taint_tracking/conftest.py +++ b/tests/appsec/iast/taint_tracking/conftest.py @@ -7,7 +7,7 @@ @pytest.fixture(autouse=True) def iast_create_context(): - with override_global_config(dict(_iast_enabled=True, _deduplication_enabled=False, request_sampling=100.0)): + with override_global_config(dict(_iast_enabled=True, _iast_deduplication_enabled=False, request_sampling=100.0)): _start_iast_context_and_oce() yield _end_iast_context_and_oce() diff --git a/tests/appsec/iast/test_processor.py b/tests/appsec/iast/test_processor.py index 3deaa60a530..3bb5eaa5015 100644 --- a/tests/appsec/iast/test_processor.py +++ b/tests/appsec/iast/test_processor.py @@ -5,8 +5,8 @@ from ddtrace.appsec._constants import IAST from ddtrace.appsec._iast import oce from ddtrace.appsec._iast._iast_request_context import get_iast_reporter +from ddtrace.constants import _SAMPLING_PRIORITY_KEY from ddtrace.constants import AUTO_KEEP -from ddtrace.constants import SAMPLING_PRIORITY_KEY from ddtrace.constants import USER_KEEP from ddtrace.ext import SpanTypes from tests.utils import DummyTracer @@ -61,7 +61,7 @@ def test_appsec_iast_processor_ensure_span_is_manual_keep(iast_context_defaults, result = span.get_tag(IAST.JSON) assert len(json.loads(result)["vulnerabilities"]) == 1 - assert span.get_metric(SAMPLING_PRIORITY_KEY) is USER_KEEP + assert span.get_metric(_SAMPLING_PRIORITY_KEY) is USER_KEEP @pytest.mark.skip_iast_check_logs @@ -74,7 +74,7 @@ def test_appsec_iast_processor_ensure_span_is_sampled(iast_context_defaults, sam with override_global_config( dict( _iast_enabled=True, - _deduplication_enabled=False, + _iast_deduplication_enabled=False, _iast_request_sampling=sampling_rate, ) ): @@ -87,9 +87,9 @@ def test_appsec_iast_processor_ensure_span_is_sampled(iast_context_defaults, sam result = span.get_tag(IAST.JSON) if sampling_rate == 0.0: assert result is None - assert span.get_metric(SAMPLING_PRIORITY_KEY) is AUTO_KEEP + assert span.get_metric(_SAMPLING_PRIORITY_KEY) is AUTO_KEEP assert span.get_metric(IAST.ENABLED) == 0.0 else: assert len(json.loads(result)["vulnerabilities"]) == 1 - assert span.get_metric(SAMPLING_PRIORITY_KEY) is USER_KEEP + assert span.get_metric(_SAMPLING_PRIORITY_KEY) is USER_KEEP assert span.get_metric(IAST.ENABLED) == 1.0 diff --git a/tests/appsec/iast_aggregated_memcheck/test_aggregated_memleaks.py b/tests/appsec/iast_aggregated_memcheck/test_aggregated_memleaks.py index 4980259a3a6..2430c872658 100644 --- a/tests/appsec/iast_aggregated_memcheck/test_aggregated_memleaks.py +++ b/tests/appsec/iast_aggregated_memcheck/test_aggregated_memleaks.py @@ -5,7 +5,9 @@ @pytest.mark.asyncio async def test_aggregated_leaks(): - with override_global_config(dict(_iast_enabled=True, _deduplication_enabled=False, _iast_request_sampling=100.0)): + with override_global_config( + dict(_iast_enabled=True, _iast_deduplication_enabled=False, _iast_request_sampling=100.0) + ): from scripts.iast.leak_functions import iast_leaks result = await iast_leaks(60000, 0.2, 500) == 0 diff --git a/tests/appsec/integrations/django_tests/conftest.py b/tests/appsec/integrations/django_tests/conftest.py index 76ffa4a3763..e24eb07081e 100644 --- a/tests/appsec/integrations/django_tests/conftest.py +++ b/tests/appsec/integrations/django_tests/conftest.py @@ -22,7 +22,7 @@ def pytest_configure(): with override_global_config( dict( _iast_enabled=True, - _deduplication_enabled=False, + _iast_deduplication_enabled=False, _iast_request_sampling=100.0, ) ): @@ -55,7 +55,7 @@ def test_spans(tracer): with override_global_config( dict( _iast_enabled=True, - _deduplication_enabled=False, + _iast_deduplication_enabled=False, _iast_request_sampling=100.0, ) ): diff --git a/tests/appsec/integrations/django_tests/django_app/urls.py b/tests/appsec/integrations/django_tests/django_app/urls.py index be2d142baa2..c9dffbde8d8 100644 --- a/tests/appsec/integrations/django_tests/django_app/urls.py +++ b/tests/appsec/integrations/django_tests/django_app/urls.py @@ -81,4 +81,6 @@ def shutdown(request): handler("appsec/validate_querydict/$", views.validate_querydict, name="validate_querydict"), path("appsec/path-params///", views.path_params_view, name="path-params-view"), path("appsec/checkuser//", views.checkuser_view, name="checkuser"), + path("appsec/stacktrace_leak/", views.stacktrace_leak_view), + path("appsec/stacktrace_leak_500/", views.stacktrace_leak_500_view), ] diff --git a/tests/appsec/integrations/django_tests/django_app/views.py b/tests/appsec/integrations/django_tests/django_app/views.py index ef4fd78b138..74cc239cf34 100644 --- a/tests/appsec/integrations/django_tests/django_app/views.py +++ b/tests/appsec/integrations/django_tests/django_app/views.py @@ -273,3 +273,20 @@ def validate_querydict(request): return HttpResponse( "x=%s, all=%s, keys=%s, urlencode=%s" % (str(res), str(lres), str(keys), qd.urlencode()), status=200 ) + + +def stacktrace_leak_view(request): + from tests.appsec.iast.taint_sinks.test_stacktrace_leak import _load_html_django_stacktrace + + return HttpResponse(_load_html_django_stacktrace()) + + +def stacktrace_leak_500_view(request): + try: + raise Exception("FooBar Exception") + except Exception: + import sys + + from django.views.debug import technical_500_response + + return technical_500_response(request, *sys.exc_info()) diff --git a/tests/appsec/integrations/django_tests/test_django_appsec_iast.py b/tests/appsec/integrations/django_tests/test_django_appsec_iast.py index d2c52337482..8f4768d8a8c 100644 --- a/tests/appsec/integrations/django_tests/test_django_appsec_iast.py +++ b/tests/appsec/integrations/django_tests/test_django_appsec_iast.py @@ -3,6 +3,7 @@ import pytest +from ddtrace.appsec._asm_request_context import start_context from ddtrace.appsec._constants import IAST from ddtrace.appsec._iast import oce from ddtrace.appsec._iast._patch_modules import patch_iast @@ -11,6 +12,8 @@ from ddtrace.appsec._iast.constants import VULN_HEADER_INJECTION from ddtrace.appsec._iast.constants import VULN_INSECURE_COOKIE from ddtrace.appsec._iast.constants import VULN_SQL_INJECTION +from ddtrace.appsec._iast.constants import VULN_STACKTRACE_LEAK +from ddtrace.ext import SpanTypes from ddtrace.internal.compat import urlencode from tests.appsec.iast.iast_utils import get_line_and_hash from tests.utils import override_env @@ -22,9 +25,7 @@ @pytest.fixture(autouse=True) def iast_context(): - with override_env( - {IAST.ENV: "True", IAST.ENV_REQUEST_SAMPLING: "100", "_DD_APPSEC_DEDUPLICATION_ENABLED": "false"} - ): + with override_env({IAST.ENV: "True", IAST.ENV_REQUEST_SAMPLING: "100", "DD_IAST_DEDUPLICATION_ENABLED": "false"}): yield @@ -84,7 +85,7 @@ def _aux_appsec_get_root_span_with_exception( @pytest.mark.skipif(not python_supported_by_iast(), reason="Python version not supported by IAST") def test_django_weak_hash(client, test_spans, tracer): - with override_global_config(dict(_iast_enabled=True, _deduplication_enabled=False)): + with override_global_config(dict(_iast_enabled=True, _iast_deduplication_enabled=False)): oce.reconfigure() patch_iast({"weak_hash": True}) root_span, _ = _aux_appsec_get_root_span(client, test_spans, tracer, url="/appsec/weak-hash/") @@ -97,7 +98,7 @@ def test_django_weak_hash(client, test_spans, tracer): @pytest.mark.skipif(not python_supported_by_iast(), reason="Python version not supported by IAST") def test_django_tainted_user_agent_iast_enabled(client, test_spans, tracer): - with override_global_config(dict(_iast_enabled=True, _deduplication_enabled=False)): + with override_global_config(dict(_iast_enabled=True, _iast_deduplication_enabled=False)): root_span, response = _aux_appsec_get_root_span( client, test_spans, @@ -154,7 +155,7 @@ def test_django_view_with_exception(client, test_spans, tracer, payload, content @pytest.mark.skipif(not python_supported_by_iast(), reason="Python version not supported by IAST") def test_django_tainted_user_agent_iast_disabled(client, test_spans, tracer): - with override_global_config(dict(_iast_enabled=False, _deduplication_enabled=False)): + with override_global_config(dict(_iast_enabled=False, _iast_deduplication_enabled=False)): oce.reconfigure() root_span, response = _aux_appsec_get_root_span( @@ -176,7 +177,9 @@ def test_django_tainted_user_agent_iast_disabled(client, test_spans, tracer): @pytest.mark.django_db() @pytest.mark.skipif(not python_supported_by_iast(), reason="Python version not supported by IAST") def test_django_tainted_user_agent_iast_enabled_sqli_http_request_parameter(client, test_spans, tracer): - with override_global_config(dict(_iast_enabled=True, _deduplication_enabled=False, _iast_request_sampling=100.0)): + with override_global_config( + dict(_iast_enabled=True, _iast_deduplication_enabled=False, _iast_request_sampling=100.0) + ): root_span, response = _aux_appsec_get_root_span( client, test_spans, @@ -222,8 +225,10 @@ def test_django_tainted_user_agent_iast_enabled_sqli_http_request_parameter(clie @pytest.mark.django_db() @pytest.mark.skipif(not python_supported_by_iast(), reason="Python version not supported by IAST") -def test_django_tainted_user_agent_iast_enabled_sqli_http_request_parameter_name_get(client, test_spans, tracer): - with override_global_config(dict(_iast_enabled=True, _deduplication_enabled=False, _iast_request_sampling=100.0)): +def test_django_sqli_http_request_parameter_name_get(client, test_spans, tracer): + with override_global_config( + dict(_iast_enabled=True, _iast_deduplication_enabled=False, _iast_request_sampling=100.0) + ): root_span, response = _aux_appsec_get_root_span( client, test_spans, @@ -271,8 +276,10 @@ def test_django_tainted_user_agent_iast_enabled_sqli_http_request_parameter_name @pytest.mark.django_db() @pytest.mark.skipif(not python_supported_by_iast(), reason="Python version not supported by IAST") -def test_django_tainted_user_agent_iast_enabled_sqli_http_request_parameter_name_post(client, test_spans, tracer): - with override_global_config(dict(_iast_enabled=True, _deduplication_enabled=False, _iast_request_sampling=100.0)): +def test_django_sqli_http_request_parameter_name_post(client, test_spans, tracer): + with override_global_config( + dict(_iast_enabled=True, _iast_deduplication_enabled=False, _iast_request_sampling=100.0) + ): root_span, response = _aux_appsec_get_root_span( client, test_spans, @@ -321,8 +328,8 @@ def test_django_tainted_user_agent_iast_enabled_sqli_http_request_parameter_name @pytest.mark.django_db() @pytest.mark.skipif(not python_supported_by_iast(), reason="Python version not supported by IAST") -def test_django_tainted_user_agent_iast_enabled_sqli_http_request_header_value(client, test_spans, tracer): - with override_global_config(dict(_iast_enabled=True, _deduplication_enabled=False)): +def test_django_sqli_http_request_header_value(client, test_spans, tracer): + with override_global_config(dict(_iast_enabled=True, _iast_deduplication_enabled=False)): root_span, response = _aux_appsec_get_root_span( client, test_spans, @@ -359,7 +366,7 @@ def test_django_tainted_user_agent_iast_enabled_sqli_http_request_header_value(c @pytest.mark.django_db() @pytest.mark.skipif(not python_supported_by_iast(), reason="Python version not supported by IAST") -def test_django_tainted_user_agent_iast_disabled_sqli_http_request_header_value(client, test_spans, tracer): +def test_django_iast_disabled_sqli_http_request_header_value(client, test_spans, tracer): with override_global_config(dict(_iast_enabled=False)): root_span, response = _aux_appsec_get_root_span( client, @@ -379,8 +386,8 @@ def test_django_tainted_user_agent_iast_disabled_sqli_http_request_header_value( @pytest.mark.django_db() @pytest.mark.skipif(not python_supported_by_iast(), reason="Python version not supported by IAST") -def test_django_tainted_user_agent_iast_enabled_sqli_http_request_header_name(client, test_spans, tracer): - with override_global_config(dict(_iast_enabled=True, _deduplication_enabled=False)): +def test_django_sqli_http_request_header_name(client, test_spans, tracer): + with override_global_config(dict(_iast_enabled=True, _iast_deduplication_enabled=False)): root_span, response = _aux_appsec_get_root_span( client, test_spans, @@ -417,7 +424,7 @@ def test_django_tainted_user_agent_iast_enabled_sqli_http_request_header_name(cl @pytest.mark.django_db() @pytest.mark.skipif(not python_supported_by_iast(), reason="Python version not supported by IAST") -def test_django_tainted_user_agent_iast_disabled_sqli_http_request_header_name(client, test_spans, tracer): +def test_django_iast_disabled_sqli_http_request_header_name(client, test_spans, tracer): with override_global_config(dict(_iast_enabled=False)): root_span, response = _aux_appsec_get_root_span( client, @@ -437,7 +444,7 @@ def test_django_tainted_user_agent_iast_disabled_sqli_http_request_header_name(c @pytest.mark.django_db() @pytest.mark.skipif(not python_supported_by_iast(), reason="Python version not supported by IAST") -def test_django_iast_enabled_full_sqli_http_path_parameter(client, test_spans, tracer): +def test_django_sqli_http_path_parameter(client, test_spans, tracer): root_span, response = _aux_appsec_get_root_span( client, test_spans, @@ -472,7 +479,7 @@ def test_django_iast_enabled_full_sqli_http_path_parameter(client, test_spans, t @pytest.mark.django_db() @pytest.mark.skipif(not python_supported_by_iast(), reason="Python version not supported by IAST") -def test_django_iast_disabled_full_sqli_http_path_parameter(client, test_spans, tracer): +def test_django_iast_disabled_sqli_http_path_parameter(client, test_spans, tracer): with override_global_config(dict(_iast_enabled=False)): root_span, response = _aux_appsec_get_root_span( client, @@ -490,8 +497,8 @@ def test_django_iast_disabled_full_sqli_http_path_parameter(client, test_spans, @pytest.mark.django_db() @pytest.mark.skipif(not python_supported_by_iast(), reason="Python version not supported by IAST") -def test_django_tainted_user_agent_iast_enabled_sqli_http_cookies_name(client, test_spans, tracer): - with override_global_config(dict(_iast_enabled=True, _deduplication_enabled=False)): +def test_django_sqli_http_cookies_name(client, test_spans, tracer): + with override_global_config(dict(_iast_enabled=True, _iast_deduplication_enabled=False)): root_span, response = _aux_appsec_get_root_span( client, test_spans, @@ -530,7 +537,7 @@ def test_django_tainted_user_agent_iast_enabled_sqli_http_cookies_name(client, t @pytest.mark.django_db() @pytest.mark.skipif(not python_supported_by_iast(), reason="Python version not supported by IAST") -def test_django_tainted_iast_disabled_sqli_http_cookies_name(client, test_spans, tracer): +def test_django_iast_disabled_sqli_http_cookies_name(client, test_spans, tracer): with override_global_config(dict(_iast_enabled=False)): root_span, response = _aux_appsec_get_root_span( client, @@ -548,8 +555,8 @@ def test_django_tainted_iast_disabled_sqli_http_cookies_name(client, test_spans, @pytest.mark.django_db() @pytest.mark.skipif(not python_supported_by_iast(), reason="Python version not supported by IAST") -def test_django_tainted_user_agent_iast_enabled_sqli_http_cookies_value(client, test_spans, tracer): - with override_global_config(dict(_iast_enabled=True, _deduplication_enabled=False)): +def test_django_sqli_http_cookies_value(client, test_spans, tracer): + with override_global_config(dict(_iast_enabled=True, _iast_deduplication_enabled=False)): root_span, response = _aux_appsec_get_root_span( client, test_spans, @@ -590,7 +597,7 @@ def test_django_tainted_user_agent_iast_enabled_sqli_http_cookies_value(client, @pytest.mark.django_db() @pytest.mark.skipif(not python_supported_by_iast(), reason="Python version not supported by IAST") -def test_django_tainted_iast_disabled_sqli_http_cookies_value(client, test_spans, tracer): +def test_django_iast_disabled_sqli_http_cookies_value(client, test_spans, tracer): with override_global_config(dict(_iast_enabled=False)): root_span, response = _aux_appsec_get_root_span( client, @@ -615,8 +622,8 @@ def test_django_tainted_iast_disabled_sqli_http_cookies_value(client, test_spans ) @pytest.mark.django_db() @pytest.mark.skipif(not python_supported_by_iast(), reason="Python version not supported by IAST") -def test_django_tainted_user_agent_iast_enabled_sqli_http_body(client, test_spans, tracer, payload, content_type): - with override_global_config(dict(_iast_enabled=True, _deduplication_enabled=False)): +def test_django_sqli_http_body(client, test_spans, tracer, payload, content_type): + with override_global_config(dict(_iast_enabled=True, _iast_deduplication_enabled=False)): root_span, response = _aux_appsec_get_root_span( client, test_spans, @@ -692,7 +699,7 @@ def test_django_tainted_http_body_empty(client, test_spans, tracer, payload, con @pytest.mark.django_db() @pytest.mark.skipif(not python_supported_by_iast(), reason="Python version not supported by IAST") -def test_django_tainted_iast_disabled_sqli_http_body(client, test_spans, tracer): +def test_django_iast_disabled_sqli_http_body(client, test_spans, tracer): with override_global_config(dict(_iast_enabled=False)): root_span, response = _aux_appsec_get_root_span( client, @@ -710,7 +717,7 @@ def test_django_tainted_iast_disabled_sqli_http_body(client, test_spans, tracer) @pytest.mark.skipif(not python_supported_by_iast(), reason="Python version not supported by IAST") -def test_querydict_django_with_iast(client, test_spans, tracer): +def test_django_querydict(client, test_spans, tracer): with override_global_config(dict(_iast_enabled=True)): root_span, response = _aux_appsec_get_root_span( client, @@ -729,7 +736,7 @@ def test_querydict_django_with_iast(client, test_spans, tracer): @pytest.mark.skipif(not python_supported_by_iast(), reason="Python version not supported by IAST") def test_django_command_injection(client, test_spans, tracer): - with override_global_config(dict(_iast_enabled=True, _deduplication_enabled=False)): + with override_global_config(dict(_iast_enabled=True, _iast_deduplication_enabled=False)): oce.reconfigure() patch_iast({"command_injection": True}) from ddtrace.appsec._common_module_patches import patch_common_modules @@ -762,7 +769,7 @@ def test_django_command_injection(client, test_spans, tracer): @pytest.mark.skipif(not python_supported_by_iast(), reason="Python version not supported by IAST") def test_django_header_injection(client, test_spans, tracer): - with override_global_config(dict(_iast_enabled=True, _deduplication_enabled=False)): + with override_global_config(dict(_iast_enabled=True, _iast_deduplication_enabled=False)): oce.reconfigure() patch_iast({"header_injection": True}) root_span, _ = _aux_appsec_get_root_span( @@ -790,7 +797,7 @@ def test_django_header_injection(client, test_spans, tracer): @pytest.mark.skipif(not python_supported_by_iast(), reason="Python version not supported by IAST") def test_django_insecure_cookie(client, test_spans, tracer): - with override_global_config(dict(_iast_enabled=True, _deduplication_enabled=False)): + with override_global_config(dict(_iast_enabled=True, _iast_deduplication_enabled=False)): oce.reconfigure() root_span, _ = _aux_appsec_get_root_span( client, @@ -815,7 +822,7 @@ def test_django_insecure_cookie(client, test_spans, tracer): @pytest.mark.skipif(not python_supported_by_iast(), reason="Python version not supported by IAST") def test_django_insecure_cookie_secure(client, test_spans, tracer): - with override_global_config(dict(_iast_enabled=True, _deduplication_enabled=False)): + with override_global_config(dict(_iast_enabled=True, _iast_deduplication_enabled=False)): oce.reconfigure() root_span, _ = _aux_appsec_get_root_span( client, @@ -831,7 +838,7 @@ def test_django_insecure_cookie_secure(client, test_spans, tracer): @pytest.mark.skipif(not python_supported_by_iast(), reason="Python version not supported by IAST") def test_django_insecure_cookie_empty_cookie(client, test_spans, tracer): - with override_global_config(dict(_iast_enabled=True, _deduplication_enabled=False)): + with override_global_config(dict(_iast_enabled=True, _iast_deduplication_enabled=False)): oce.reconfigure() root_span, _ = _aux_appsec_get_root_span( client, @@ -847,7 +854,7 @@ def test_django_insecure_cookie_empty_cookie(client, test_spans, tracer): @pytest.mark.skipif(not python_supported_by_iast(), reason="Python version not supported by IAST") def test_django_insecure_cookie_2_insecure_1_secure(client, test_spans, tracer): - with override_global_config(dict(_iast_enabled=True, _deduplication_enabled=False)): + with override_global_config(dict(_iast_enabled=True, _iast_deduplication_enabled=False)): oce.reconfigure() root_span, _ = _aux_appsec_get_root_span( client, @@ -865,7 +872,7 @@ def test_django_insecure_cookie_2_insecure_1_secure(client, test_spans, tracer): @pytest.mark.skipif(not python_supported_by_iast(), reason="Python version not supported by IAST") def test_django_insecure_cookie_special_characters(client, test_spans, tracer): - with override_global_config(dict(_iast_enabled=True, _deduplication_enabled=False)): + with override_global_config(dict(_iast_enabled=True, _iast_deduplication_enabled=False)): oce.reconfigure() root_span, _ = _aux_appsec_get_root_span( client, @@ -886,3 +893,69 @@ def test_django_insecure_cookie_special_characters(client, test_spans, tracer): assert "line" not in vulnerability["location"].keys() assert vulnerability["location"]["spanId"] assert vulnerability["hash"] + + +@pytest.mark.skipif(not python_supported_by_iast(), reason="Python version not supported by IAST") +def test_django_stacktrace_leak(client, test_spans, tracer): + with override_global_config(dict(_iast_enabled=True, _deduplication_enabled=False)): + oce.reconfigure() + root_span, _ = _aux_appsec_get_root_span( + client, + test_spans, + tracer, + url="/appsec/stacktrace_leak/", + ) + + assert root_span.get_metric(IAST.ENABLED) == 1.0 + + loaded = json.loads(root_span.get_tag(IAST.JSON)) + assert loaded["sources"] == [] + assert len(loaded["vulnerabilities"]) == 1 + vulnerability = loaded["vulnerabilities"][0] + assert vulnerability["type"] == VULN_STACKTRACE_LEAK + assert vulnerability["evidence"] == { + "valueParts": [ + {"value": 'Module: ".home.foobaruser.sources.minimal-django-example.app.py"\nException: IndexError'} + ] + } + assert vulnerability["hash"] + + +@pytest.fixture +def debug_mode(): + from django.conf import settings + + original_debug = settings.DEBUG + settings.DEBUG = True + yield + settings.DEBUG = original_debug + + +@pytest.mark.skipif(not python_supported_by_iast(), reason="Python version not supported by IAST") +def test_django_stacktrace_from_technical_500_response(client, test_spans, tracer, debug_mode): + with override_global_config(dict(_iast_enabled=True, _deduplication_enabled=False)): + with tracer.trace("test", span_type=SpanTypes.WEB, service="test") as span: + start_context(span) + oce.reconfigure() + root_span, response = _aux_appsec_get_root_span( + client, + test_spans, + tracer, + url="/appsec/stacktrace_leak_500/", + content_type="text/html", + ) + + assert response.status_code == 500, "Expected a 500 status code" + assert root_span.get_metric(IAST.ENABLED) == 1.0 + + loaded = json.loads(root_span.get_tag(IAST.JSON)) + assert loaded["sources"] == [] + assert len(loaded["vulnerabilities"]) == 1 + vulnerability = loaded["vulnerabilities"][0] + assert vulnerability["type"] == VULN_STACKTRACE_LEAK + assert vulnerability["evidence"] == { + "valueParts": [ + {"value": "Module: tests.appsec.integrations.django_tests.django_app.views\nException: Exception"} + ] + } + assert vulnerability["hash"] diff --git a/tests/appsec/integrations/flask_tests/test_iast_flask_telemetry.py b/tests/appsec/integrations/flask_tests/test_iast_flask_telemetry.py index 99e00112e6f..0d5899d0a7f 100644 --- a/tests/appsec/integrations/flask_tests/test_iast_flask_telemetry.py +++ b/tests/appsec/integrations/flask_tests/test_iast_flask_telemetry.py @@ -25,15 +25,18 @@ def test_flask_instrumented_metrics(telemetry_writer): metrics_result = telemetry_writer._namespace._metrics_data metrics_source_tags_result = [metric._tags[0][1] for metric in metrics_result["generate-metrics"]["iast"].values()] - assert len(metrics_source_tags_result) == 8 + assert len(metrics_source_tags_result) == 11 assert VULN_PATH_TRAVERSAL in metrics_source_tags_result assert origin_to_str(OriginType.HEADER_NAME) in metrics_source_tags_result assert origin_to_str(OriginType.HEADER) in metrics_source_tags_result + assert origin_to_str(OriginType.PARAMETER_NAME) in metrics_source_tags_result assert origin_to_str(OriginType.PARAMETER) in metrics_source_tags_result assert origin_to_str(OriginType.PATH) in metrics_source_tags_result assert origin_to_str(OriginType.PATH_PARAMETER) in metrics_source_tags_result assert origin_to_str(OriginType.QUERY) in metrics_source_tags_result assert origin_to_str(OriginType.BODY) in metrics_source_tags_result + assert origin_to_str(OriginType.COOKIE_NAME) in metrics_source_tags_result + assert origin_to_str(OriginType.COOKIE) in metrics_source_tags_result def test_flask_instrumented_metrics_iast_disabled(telemetry_writer): diff --git a/tests/appsec/integrations/flask_tests/test_iast_psycopg2.py b/tests/appsec/integrations/flask_tests/test_iast_psycopg2.py index d6d25f7ffc2..1522e1136ca 100644 --- a/tests/appsec/integrations/flask_tests/test_iast_psycopg2.py +++ b/tests/appsec/integrations/flask_tests/test_iast_psycopg2.py @@ -11,7 +11,9 @@ @pytest.fixture(autouse=True) def iast_create_context(): - with override_global_config(dict(_iast_enabled=True, _deduplication_enabled=False, _iast_request_sampling=100.0)): + with override_global_config( + dict(_iast_enabled=True, _iast_deduplication_enabled=False, _iast_request_sampling=100.0) + ): _start_iast_context_and_oce() yield _end_iast_context_and_oce() diff --git a/tests/ci_visibility/test_ci_visibility.py b/tests/ci_visibility/test_ci_visibility.py index 9eee9eb43da..525d9b014fa 100644 --- a/tests/ci_visibility/test_ci_visibility.py +++ b/tests/ci_visibility/test_ci_visibility.py @@ -684,7 +684,7 @@ def test_civisibilitywriter_evp_proxy_url(self): ) ), mock.patch( "ddtrace.internal.agent.get_trace_url", return_value="http://evpproxy.bar:1234" - ), mock.patch("ddtrace.settings.config.Config", _get_default_civisibility_ddconfig()), mock.patch( + ), mock.patch("ddtrace.settings._config.Config", _get_default_civisibility_ddconfig()), mock.patch( "ddtrace.tracer", ddtrace.Tracer() ), mock.patch( "ddtrace.internal.ci_visibility.recorder.CIVisibility._agent_evp_proxy_is_available", return_value=True diff --git a/tests/conftest.py b/tests/conftest.py index 5ee2933f187..abd0f0dc25e 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -131,7 +131,8 @@ def auto_enable_crashtracking(): def enable_crashtracking(auto_enable_crashtracking): if auto_enable_crashtracking: crashtracking.start() - assert crashtracking.is_started() + # JJJ + # assert crashtracking.is_started() yield diff --git a/tests/contrib/aiohttp/test_middleware.py b/tests/contrib/aiohttp/test_middleware.py index 30d40654314..097548d7b2b 100644 --- a/tests/contrib/aiohttp/test_middleware.py +++ b/tests/contrib/aiohttp/test_middleware.py @@ -4,8 +4,8 @@ import pytest from ddtrace._trace.sampler import RateSampler +from ddtrace.constants import _SAMPLING_PRIORITY_KEY from ddtrace.constants import ERROR_MSG -from ddtrace.constants import SAMPLING_PRIORITY_KEY from ddtrace.constants import USER_KEEP from ddtrace.contrib.internal.aiohttp.middlewares import CONFIG_KEY from ddtrace.contrib.internal.aiohttp.middlewares import trace_app @@ -381,7 +381,7 @@ async def test_distributed_tracing(app_tracer, aiohttp_client): # with the right trace_id and parent_id assert span.trace_id == 100 assert span.parent_id == 42 - assert span.get_metric(SAMPLING_PRIORITY_KEY) is USER_KEEP + assert span.get_metric(_SAMPLING_PRIORITY_KEY) is USER_KEEP @flaky(1735812000) @@ -408,7 +408,7 @@ async def test_distributed_tracing_with_sampling_true(app_tracer, aiohttp_client # with the right trace_id and parent_id assert 100 == span.trace_id assert 42 == span.parent_id - assert 1 == span.get_metric(SAMPLING_PRIORITY_KEY) + assert 1 == span.get_metric(_SAMPLING_PRIORITY_KEY) @flaky(1735812000) @@ -435,7 +435,7 @@ async def test_distributed_tracing_with_sampling_false(app_tracer, aiohttp_clien # with the right trace_id and parent_id assert 100 == span.trace_id assert 42 == span.parent_id - assert 0 == span.get_metric(SAMPLING_PRIORITY_KEY) + assert 0 == span.get_metric(_SAMPLING_PRIORITY_KEY) async def test_distributed_tracing_disabled(app_tracer, aiohttp_client): @@ -487,11 +487,11 @@ async def test_distributed_tracing_sub_span(app_tracer, aiohttp_client): # with the right trace_id and parent_id assert 100 == span.trace_id assert 42 == span.parent_id - assert 0 == span.get_metric(SAMPLING_PRIORITY_KEY) + assert 0 == span.get_metric(_SAMPLING_PRIORITY_KEY) # check parenting is OK with custom sub-span created within server code assert 100 == sub_span.trace_id assert span.span_id == sub_span.parent_id - assert sub_span.get_metric(SAMPLING_PRIORITY_KEY) is None + assert sub_span.get_metric(_SAMPLING_PRIORITY_KEY) is None def _assert_200_parenting(client, traces): diff --git a/tests/contrib/cherrypy/test_middleware.py b/tests/contrib/cherrypy/test_middleware.py index 000f15610a0..9bc4a600136 100644 --- a/tests/contrib/cherrypy/test_middleware.py +++ b/tests/contrib/cherrypy/test_middleware.py @@ -11,10 +11,10 @@ import ddtrace from ddtrace import config +from ddtrace.constants import _SAMPLING_PRIORITY_KEY from ddtrace.constants import ERROR_MSG from ddtrace.constants import ERROR_STACK from ddtrace.constants import ERROR_TYPE -from ddtrace.constants import SAMPLING_PRIORITY_KEY from ddtrace.contrib.internal.cherrypy.middleware import TraceMiddleware from ddtrace.ext import http from tests.contrib.patch import emit_integration_and_version_to_test_agent @@ -286,7 +286,7 @@ def test_propagation(self): # ensure the propagation worked well assert s.trace_id == 1234 assert s.parent_id == 4567 - assert s.get_metric(SAMPLING_PRIORITY_KEY) == 2 + assert s.get_metric(_SAMPLING_PRIORITY_KEY) == 2 def test_disabled_distributed_tracing_config(self): previous_distributed_tracing = config.cherrypy["distributed_tracing"] @@ -313,7 +313,7 @@ def test_disabled_distributed_tracing_config(self): # ensure the propagation worked well assert s.trace_id != 1234 assert s.parent_id != 4567 - assert s.get_metric(SAMPLING_PRIORITY_KEY) != 2 + assert s.get_metric(_SAMPLING_PRIORITY_KEY) != 2 config.cherrypy["distributed_tracing"] = previous_distributed_tracing @@ -342,7 +342,7 @@ def test_disabled_distributed_tracing_middleware(self): # ensure the propagation worked well assert s.trace_id != 1234 assert s.parent_id != 4567 - assert s.get_metric(SAMPLING_PRIORITY_KEY) != 2 + assert s.get_metric(_SAMPLING_PRIORITY_KEY) != 2 cherrypy.tools.tracer.use_distributed_tracing = previous_distributed_tracing diff --git a/tests/contrib/dbapi/test_dbapi_appsec.py b/tests/contrib/dbapi/test_dbapi_appsec.py index d43d9c37e3c..085166df575 100644 --- a/tests/contrib/dbapi/test_dbapi_appsec.py +++ b/tests/contrib/dbapi/test_dbapi_appsec.py @@ -19,7 +19,7 @@ def setUp(self): with override_global_config( dict( _iast_enabled=True, - _deduplication_enabled=False, + _iast_deduplication_enabled=False, _iast_request_sampling=100.0, ) ): @@ -29,7 +29,7 @@ def setUp(self): def tearDown(self): with override_global_config( - dict(_iast_enabled=True, _deduplication_enabled=False, _iast_request_sampling=100.0) + dict(_iast_enabled=True, _iast_deduplication_enabled=False, _iast_request_sampling=100.0) ): _end_iast_context_and_oce() diff --git a/tests/contrib/django/test_django.py b/tests/contrib/django/test_django.py index 7b8a0e18ef7..79baceb1652 100644 --- a/tests/contrib/django/test_django.py +++ b/tests/contrib/django/test_django.py @@ -18,10 +18,10 @@ import wrapt from ddtrace import config +from ddtrace.constants import _SAMPLING_PRIORITY_KEY from ddtrace.constants import ERROR_MSG from ddtrace.constants import ERROR_STACK from ddtrace.constants import ERROR_TYPE -from ddtrace.constants import SAMPLING_PRIORITY_KEY from ddtrace.constants import USER_KEEP from ddtrace.contrib import trace_utils from ddtrace.contrib.internal.django.patch import instrument_view @@ -1729,7 +1729,7 @@ def test_django_request_distributed(client, test_spans): trace_id=12345, parent_id=78910, metrics={ - SAMPLING_PRIORITY_KEY: USER_KEEP, + _SAMPLING_PRIORITY_KEY: USER_KEEP, }, ) assert root.get_tag("span.kind") == "server" diff --git a/tests/contrib/fastapi/test_fastapi_appsec_iast.py b/tests/contrib/fastapi/test_fastapi_appsec_iast.py index b9d663f1d9a..23174d81abf 100644 --- a/tests/contrib/fastapi/test_fastapi_appsec_iast.py +++ b/tests/contrib/fastapi/test_fastapi_appsec_iast.py @@ -26,9 +26,11 @@ from ddtrace.appsec._iast.constants import VULN_NO_HTTPONLY_COOKIE from ddtrace.appsec._iast.constants import VULN_NO_SAMESITE_COOKIE from ddtrace.appsec._iast.constants import VULN_SQL_INJECTION +from ddtrace.appsec._iast.constants import VULN_STACKTRACE_LEAK from ddtrace.contrib.internal.fastapi.patch import patch as patch_fastapi from ddtrace.contrib.internal.sqlite3.patch import patch as patch_sqlite_sqli from tests.appsec.iast.iast_utils import get_line_and_hash +from tests.appsec.iast.taint_sinks.test_stacktrace_leak import _load_text_stacktrace from tests.utils import override_env from tests.utils import override_global_config @@ -107,7 +109,7 @@ async def test_route(request: Request): assert result["ranges_origin"] == "http.request.parameter" -def test_query_param_name_source(fastapi_application, client, tracer, test_spans): +def test_query_param_name_source_get(fastapi_application, client, tracer, test_spans): @fastapi_application.get("/index.html") async def test_route(request: Request): query_params = [k for k in request.query_params.keys() if k == "iast_queryparam"][0] @@ -120,6 +122,8 @@ async def test_route(request: Request): "ranges_start": ranges_result[0].start, "ranges_length": ranges_result[0].length, "ranges_origin": origin_to_str(ranges_result[0].source.origin), + "ranges_origin_name": ranges_result[0].source.name, + "ranges_origin_value": ranges_result[0].source.value, } ) @@ -137,6 +141,45 @@ async def test_route(request: Request): assert result["ranges_start"] == 0 assert result["ranges_length"] == 15 assert result["ranges_origin"] == "http.request.parameter.name" + assert result["ranges_origin_name"] == "iast_queryparam" + assert result["ranges_origin_value"] == "iast_queryparam" + + +def test_query_param_name_source_post(fastapi_application, client, tracer, test_spans): + @fastapi_application.post("/index.html") + async def test_route(request: Request): + form_data = await request.form() + query_params = [k for k in form_data.keys() if k == "iast_queryparam"][0] + ranges_result = get_tainted_ranges(query_params) + + return JSONResponse( + { + "result": query_params, + "is_tainted": len(ranges_result), + "ranges_start": ranges_result[0].start, + "ranges_length": ranges_result[0].length, + "ranges_origin": origin_to_str(ranges_result[0].source.origin), + "ranges_origin_name": ranges_result[0].source.name, + "ranges_origin_value": ranges_result[0].source.value, + } + ) + + with override_global_config(dict(_iast_enabled=True, _iast_request_sampling=100.0)): + # disable callback + _aux_appsec_prepare_tracer(tracer) + resp = client.post( + "/index.html", + data={"iast_queryparam": "test1234"}, + ) + assert resp.status_code == 200 + result = json.loads(get_response_body(resp)) + assert result["result"] == "iast_queryparam" + assert result["is_tainted"] == 1 + assert result["ranges_start"] == 0 + assert result["ranges_length"] == 15 + assert result["ranges_origin"] == "http.request.parameter.name" + assert result["ranges_origin_name"] == "iast_queryparam" + assert result["ranges_origin_value"] == "iast_queryparam" def test_header_value_source(fastapi_application, client, tracer, test_spans): @@ -184,6 +227,8 @@ async def test_route(request: Request): "ranges_start": ranges_result[0].start, "ranges_length": ranges_result[0].length, "ranges_origin": origin_to_str(ranges_result[0].source.origin), + "ranges_origin_name": ranges_result[0].source.name, + "ranges_origin_value": ranges_result[0].source.value, } ) @@ -201,6 +246,8 @@ async def test_route(request: Request): assert result["ranges_start"] == 0 assert result["ranges_length"] == 11 assert result["ranges_origin"] == "http.request.header.name" + assert result["ranges_origin_name"] == "iast_header" + assert result["ranges_origin_value"] == "iast_header" @pytest.mark.skipif(sys.version_info < (3, 9), reason="typing.Annotated was introduced on 3.9") @@ -601,7 +648,9 @@ async def test_route(param_str): # label test_fastapi_sqli_path_parameter cur.execute(add_aspect("SELECT 1 FROM ", param_str)) - with override_global_config(dict(_iast_enabled=True, _deduplication_enabled=False, _iast_request_sampling=100.0)): + with override_global_config( + dict(_iast_enabled=True, _iast_deduplication_enabled=False, _iast_request_sampling=100.0) + ): # disable callback _aux_appsec_prepare_tracer(tracer) resp = client.get( @@ -657,7 +706,9 @@ def insecure_cookie(request: Request): return response - with override_global_config(dict(_iast_enabled=True, _deduplication_enabled=False, _iast_request_sampling=100.0)): + with override_global_config( + dict(_iast_enabled=True, _iast_deduplication_enabled=False, _iast_request_sampling=100.0) + ): _aux_appsec_prepare_tracer(tracer) resp = client.get( "/insecure_cookie/?iast_queryparam=insecure", @@ -698,7 +749,9 @@ def insecure_cookie(request: Request): return response - with override_global_config(dict(_iast_enabled=True, _deduplication_enabled=False, _iast_request_sampling=100.0)): + with override_global_config( + dict(_iast_enabled=True, _iast_deduplication_enabled=False, _iast_request_sampling=100.0) + ): _aux_appsec_prepare_tracer(tracer) resp = client.get( "/insecure_cookie/?iast_queryparam=insecure", @@ -733,7 +786,9 @@ def insecure_cookie(request: Request): return response - with override_global_config(dict(_iast_enabled=True, _deduplication_enabled=False, _iast_request_sampling=100.0)): + with override_global_config( + dict(_iast_enabled=True, _iast_deduplication_enabled=False, _iast_request_sampling=100.0) + ): _aux_appsec_prepare_tracer(tracer) resp = client.get( "/insecure_cookie/?iast_queryparam=insecure", @@ -809,7 +864,9 @@ def insecure_cookie(request: Request): return response - with override_global_config(dict(_iast_enabled=True, _deduplication_enabled=False, _iast_request_sampling=100.0)): + with override_global_config( + dict(_iast_enabled=True, _iast_deduplication_enabled=False, _iast_request_sampling=100.0) + ): _aux_appsec_prepare_tracer(tracer) resp = client.get( "/insecure_cookie/?iast_queryparam=insecure", @@ -844,7 +901,9 @@ async def header_injection(request: Request): return result_response - with override_global_config(dict(_iast_enabled=True, _deduplication_enabled=False, _iast_request_sampling=100.0)): + with override_global_config( + dict(_iast_enabled=True, _iast_deduplication_enabled=False, _iast_request_sampling=100.0) + ): _aux_appsec_prepare_tracer(tracer) patch_iast({"header_injection": True}) resp = client.get( @@ -883,7 +942,9 @@ async def header_injection_inline_response(request: Request): headers={"Header-Injection": tainted_string, "Vary": tainted_string, "Foo": "bar"}, ) - with override_global_config(dict(_iast_enabled=True, _deduplication_enabled=False, _iast_request_sampling=100.0)): + with override_global_config( + dict(_iast_enabled=True, _iast_deduplication_enabled=False, _iast_request_sampling=100.0) + ): _aux_appsec_prepare_tracer(tracer) patch_iast({"header_injection": True}) resp = client.get( @@ -901,3 +962,28 @@ async def header_injection_inline_response(request: Request): assert len(loaded["vulnerabilities"]) == 1 vulnerability = loaded["vulnerabilities"][0] assert vulnerability["type"] == VULN_HEADER_INJECTION + + +def test_fastapi_stacktrace_leak(fastapi_application, client, tracer, test_spans): + @fastapi_application.get("/stacktrace_leak/", response_class=PlainTextResponse) + async def stacktrace_leak_inline_response(request: Request): + return PlainTextResponse( + content=_load_text_stacktrace(), + ) + + with override_global_config(dict(_iast_enabled=True, _deduplication_enabled=False, _iast_request_sampling=100.0)): + _aux_appsec_prepare_tracer(tracer) + resp = client.get( + "/stacktrace_leak/", + ) + assert resp.status_code == 200 + + span = test_spans.pop_traces()[0][0] + assert span.get_metric(IAST.ENABLED) == 1.0 + + iast_tag = span.get_tag(IAST.JSON) + assert iast_tag is not None + loaded = json.loads(iast_tag) + assert len(loaded["vulnerabilities"]) == 1 + vulnerability = loaded["vulnerabilities"][0] + assert vulnerability["type"] == VULN_STACKTRACE_LEAK diff --git a/tests/contrib/flask/test_flask_appsec_iast.py b/tests/contrib/flask/test_flask_appsec_iast.py index bedf2b58bdc..ceeb7ecadc8 100644 --- a/tests/contrib/flask/test_flask_appsec_iast.py +++ b/tests/contrib/flask/test_flask_appsec_iast.py @@ -1,5 +1,6 @@ import json import sys +import traceback from flask import request from importlib_metadata import version @@ -15,6 +16,7 @@ from ddtrace.appsec._iast.constants import VULN_NO_HTTPONLY_COOKIE from ddtrace.appsec._iast.constants import VULN_NO_SAMESITE_COOKIE from ddtrace.appsec._iast.constants import VULN_SQL_INJECTION +from ddtrace.appsec._iast.constants import VULN_STACKTRACE_LEAK from ddtrace.appsec._iast.taint_sinks.header_injection import patch as patch_header_injection from ddtrace.contrib.internal.sqlite3.patch import patch as patch_sqlite_sqli from tests.appsec.iast.iast_utils import get_line_and_hash @@ -39,7 +41,7 @@ def setUp(self): with override_env({"_DD_IAST_USE_ROOT_SPAN": "false"}), override_global_config( dict( _iast_enabled=True, - _deduplication_enabled=False, + _iast_deduplication_enabled=False, _iast_request_sampling=100.0, ) ): @@ -71,7 +73,7 @@ def sqli_1(param_str): with override_global_config( dict( _iast_enabled=True, - _deduplication_enabled=False, + _iast_deduplication_enabled=False, _iast_request_sampling=100.0, ) ): @@ -123,7 +125,7 @@ def sqli_2(param_str): with override_global_config( dict( _iast_enabled=True, - _deduplication_enabled=False, + _iast_deduplication_enabled=False, ) ): resp = self.client.post( @@ -304,7 +306,7 @@ def sqli_5(param_str, param_int): with override_global_config( dict( _iast_enabled=True, - _deduplication_enabled=False, + _iast_deduplication_enabled=False, _iast_request_sampling=100.0, ) ): @@ -337,7 +339,9 @@ def sqli_6(param_str): class MockSpan: _trace_id_64bits = 17577308072598193742 - with override_global_config(dict(_iast_enabled=True, _deduplication_enabled=False, _iast_request_sampling=0.0)): + with override_global_config( + dict(_iast_enabled=True, _iast_deduplication_enabled=False, _iast_request_sampling=0.0) + ): oce.reconfigure() _iast_start_request(MockSpan()) resp = self.client.post("/sqli/hello/?select%20from%20table", data={"name": "test"}) @@ -367,7 +371,7 @@ def sqli_7(): with override_global_config( dict( _iast_enabled=True, - _deduplication_enabled=False, + _iast_deduplication_enabled=False, _iast_request_sampling=100.0, ) ): @@ -434,7 +438,7 @@ def sqli_8(): with override_global_config( dict( _iast_enabled=True, - _deduplication_enabled=False, + _iast_deduplication_enabled=False, ) ): if tuple(map(int, werkzeug_version.split("."))) >= (2, 3): @@ -495,7 +499,7 @@ def sqli_9(): with override_global_config( dict( _iast_enabled=True, - _deduplication_enabled=False, + _iast_deduplication_enabled=False, ) ): resp = self.client.get("/sqli/parameter/?table=sqlite_master") @@ -550,7 +554,7 @@ def sqli_13(): with override_global_config( dict( _iast_enabled=True, - _deduplication_enabled=False, + _iast_deduplication_enabled=False, _iast_request_sampling=100.0, ) ): @@ -608,7 +612,7 @@ def sqli_14(): with override_global_config( dict( _iast_enabled=True, - _deduplication_enabled=False, + _iast_deduplication_enabled=False, _iast_request_sampling=100.0, ) ): @@ -673,7 +677,7 @@ def sqli_10(): with override_global_config( dict( _iast_enabled=True, - _deduplication_enabled=False, + _iast_deduplication_enabled=False, _iast_request_sampling=100.0, ) ): @@ -1078,7 +1082,7 @@ def sqli_10(): _iast_enabled=True, _asm_enabled=True, _api_security_enabled=True, - _deduplication_enabled=False, + _iast_deduplication_enabled=False, _iast_request_sampling=100.0, ) ): @@ -1171,7 +1175,7 @@ def header_injection(): with override_global_config( dict( _iast_enabled=True, - _deduplication_enabled=False, + _iast_deduplication_enabled=False, ) ): resp = self.client.post("/header_injection/", data={"name": "test"}) @@ -1208,7 +1212,7 @@ def header_injection(): with override_global_config( dict( _iast_enabled=True, - _deduplication_enabled=False, + _iast_deduplication_enabled=False, ) ): resp = self.client.post("/header_injection/", data={"name": "test"}) @@ -1237,7 +1241,7 @@ def header_injection(): with override_global_config( dict( _iast_enabled=True, - _deduplication_enabled=False, + _iast_deduplication_enabled=False, ) ): resp = self.client.post("/header_injection/", data={"name": "test"}) @@ -1266,7 +1270,7 @@ def insecure_cookie(): with override_global_config( dict( _iast_enabled=True, - _deduplication_enabled=False, + _iast_deduplication_enabled=False, ) ): resp = self.client.post("/insecure_cookie/", data={"name": "test"}) @@ -1304,7 +1308,7 @@ def insecure_cookie_empty(): with override_global_config( dict( _iast_enabled=True, - _deduplication_enabled=False, + _iast_deduplication_enabled=False, ) ): resp = self.client.post("/insecure_cookie_empty/", data={"name": "test"}) @@ -1334,7 +1338,7 @@ def no_http_only_cookie(): with override_global_config( dict( _iast_enabled=True, - _deduplication_enabled=False, + _iast_deduplication_enabled=False, ) ): resp = self.client.post("/no_http_only_cookie/", data={"name": "test"}) @@ -1372,7 +1376,7 @@ def no_http_only_cookie_empty(): with override_global_config( dict( _iast_enabled=True, - _deduplication_enabled=False, + _iast_deduplication_enabled=False, _iast_request_sampling=100.0, ) ): @@ -1403,7 +1407,7 @@ def no_samesite_cookie(): with override_global_config( dict( _iast_enabled=True, - _deduplication_enabled=False, + _iast_deduplication_enabled=False, ) ): resp = self.client.post("/no_samesite_cookie/", data={"name": "test"}) @@ -1441,7 +1445,7 @@ def no_samesite_cookie_empty(): with override_global_config( dict( _iast_enabled=True, - _deduplication_enabled=False, + _iast_deduplication_enabled=False, ) ): resp = self.client.post("/no_samesite_cookie_empty/", data={"name": "test"}) @@ -1469,7 +1473,7 @@ def cookie_secure(): with override_global_config( dict( _iast_enabled=True, - _deduplication_enabled=False, + _iast_deduplication_enabled=False, _iast_request_sampling=100.0, ) ): @@ -1482,6 +1486,105 @@ def cookie_secure(): loaded = root_span.get_tag(IAST.JSON) assert loaded is None + @pytest.mark.skipif(not python_supported_by_iast(), reason="Python version not supported by IAST") + def test_flask_stacktrace_leak(self): + @self.app.route("/stacktrace_leak/") + def stacktrace_leak(): + from flask import Response + + return Response( + """Traceback (most recent call last): + File "/usr/local/lib/python3.9/site-packages/some_module.py", line 42, in process_data + result = complex_calculation(data) + File "/usr/local/lib/python3.9/site-packages/another_module.py", line 158, in complex_calculation + intermediate = perform_subtask(data_slice) + File "/usr/local/lib/python3.9/site-packages/subtask_module.py", line 27, in perform_subtask + processed = handle_special_case(data_slice) + File "/usr/local/lib/python3.9/site-packages/special_cases.py", line 84, in handle_special_case + return apply_algorithm(data_slice, params) + File "/usr/local/lib/python3.9/site-packages/algorithm_module.py", line 112, in apply_algorithm + step_result = execute_step(data, params) + File "/usr/local/lib/python3.9/site-packages/step_execution.py", line 55, in execute_step + temp = pre_process(data) + File "/usr/local/lib/python3.9/site-packages/pre_processing.py", line 33, in pre_process + validated_data = validate_input(data) + File "/usr/local/lib/python3.9/site-packages/validation.py", line 66, in validate_input + check_constraints(data) + File "/usr/local/lib/python3.9/site-packages/constraints.py", line 19, in check_constraints + raise ValueError("Constraint violation at step 9") +ValueError: Constraint violation at step 9 + +Lorem Ipsum Foobar +""" + ) + + with override_global_config( + dict( + _iast_enabled=True, + _deduplication_enabled=False, + ) + ): + resp = self.client.get("/stacktrace_leak/") + assert resp.status_code == 200 + + root_span = self.pop_spans()[0] + assert root_span.get_metric(IAST.ENABLED) == 1.0 + + loaded = json.loads(root_span.get_tag(IAST.JSON)) + assert loaded["sources"] == [] + assert len(loaded["vulnerabilities"]) == 1 + vulnerability = loaded["vulnerabilities"][0] + assert vulnerability["type"] == VULN_STACKTRACE_LEAK + assert vulnerability["evidence"] == { + "valueParts": [ + {"value": 'Module: ".usr.local.lib.python3.9.site-packages.constraints.py"\nException: ValueError'} + ] + } + + @pytest.mark.skipif(not python_supported_by_iast(), reason="Python version not supported by IAST") + def test_flask_stacktrace_leak_from_debug_page(self): + try: + from werkzeug.debug.tbtools import DebugTraceback + except ImportError: + return # this version of werkzeug does not have the DebugTraceback + + @self.app.route("/stacktrace_leak_debug/") + def stacktrace_leak(): + from flask import Response + + try: + raise ValueError() + except ValueError as exc: + dt = DebugTraceback( + exc, + traceback.TracebackException.from_exception(exc), + ) + + # Render the debugger HTML + html = dt.render_debugger_html(evalex=False, secret="test_secret", evalex_trusted=False) + return Response(html, mimetype="text/html") + + with override_global_config( + dict( + _iast_enabled=True, + _deduplication_enabled=False, + ) + ): + resp = self.client.get("/stacktrace_leak_debug/") + assert resp.status_code == 200 + + root_span = self.pop_spans()[0] + assert root_span.get_metric(IAST.ENABLED) == 1.0 + + loaded = json.loads(root_span.get_tag(IAST.JSON)) + assert loaded["sources"] == [] + assert len(loaded["vulnerabilities"]) == 1 + vulnerability = loaded["vulnerabilities"][0] + assert vulnerability["type"] == VULN_STACKTRACE_LEAK + assert "valueParts" in vulnerability["evidence"] + assert "tests.contrib.flask.test_flask_appsec_iast" in vulnerability["evidence"]["valueParts"][0]["value"] + assert "Exception: ValueError" in vulnerability["evidence"]["valueParts"][0]["value"] + class FlaskAppSecIASTDisabledTestCase(BaseFlaskTestCase): @pytest.fixture(autouse=True) diff --git a/tests/contrib/gevent/test_tracer.py b/tests/contrib/gevent/test_tracer.py index c34f06d41d6..d804a4e9c59 100644 --- a/tests/contrib/gevent/test_tracer.py +++ b/tests/contrib/gevent/test_tracer.py @@ -6,7 +6,7 @@ import ddtrace from ddtrace.constants import ERROR_MSG -from ddtrace.constants import SAMPLING_PRIORITY_KEY +from ddtrace.constants import _SAMPLING_PRIORITY_KEY from ddtrace.constants import USER_KEEP from ddtrace._trace.context import Context from ddtrace.contrib.internal.gevent.patch import patch @@ -143,9 +143,9 @@ def green_2(): worker_1 = spans[1] worker_2 = spans[2] # check sampling priority - assert parent_span.get_metric(SAMPLING_PRIORITY_KEY) == USER_KEEP - assert worker_1.get_metric(SAMPLING_PRIORITY_KEY) is None - assert worker_2.get_metric(SAMPLING_PRIORITY_KEY) is None + assert parent_span.get_metric(_SAMPLING_PRIORITY_KEY) == USER_KEEP + assert worker_1.get_metric(_SAMPLING_PRIORITY_KEY) is None + assert worker_2.get_metric(_SAMPLING_PRIORITY_KEY) is None def test_trace_spawn_multiple_greenlets_multiple_traces(self): # multiple greenlets must be part of the same trace diff --git a/tests/contrib/httplib/test_httplib_distributed.py b/tests/contrib/httplib/test_httplib_distributed.py index 40e5e891662..706921388bc 100644 --- a/tests/contrib/httplib/test_httplib_distributed.py +++ b/tests/contrib/httplib/test_httplib_distributed.py @@ -71,14 +71,14 @@ def test_propagation_disabled(self): def test_propagation_connection_true(self): conn = self.get_http_connection(SOCKET) - cfg = config.get_from(conn) + cfg = config._get_from(conn) cfg["distributed_tracing"] = True self.request(conn=conn) self.check_enabled() def test_propagation_connection_false(self): conn = self.get_http_connection(SOCKET) - cfg = config.get_from(conn) + cfg = config._get_from(conn) cfg["distributed_tracing"] = False self.request(conn=conn) self.check_disabled() diff --git a/tests/contrib/jinja2/test_jinja2.py b/tests/contrib/jinja2/test_jinja2.py index 64002fd6555..ce91c7e5ed0 100644 --- a/tests/contrib/jinja2/test_jinja2.py +++ b/tests/contrib/jinja2/test_jinja2.py @@ -136,7 +136,7 @@ def test_service_name(self): loader = jinja2.loaders.FileSystemLoader(TMPL_DIR) env = jinja2.Environment(loader=loader) - cfg = config.get_from(env) + cfg = config._get_from(env) cfg["service_name"] = "renderer" t = env.get_template("template.html") diff --git a/tests/contrib/openai/cassettes/v1/completion_stream_wrong_api_key.yaml b/tests/contrib/openai/cassettes/v1/completion_stream_wrong_api_key.yaml new file mode 100644 index 00000000000..512263ce56c --- /dev/null +++ b/tests/contrib/openai/cassettes/v1/completion_stream_wrong_api_key.yaml @@ -0,0 +1,77 @@ +interactions: +- request: + body: '{"model":"text-curie-001","prompt":"how does openai tokenize prompts?","max_tokens":150,"n":1,"stream":true,"temperature":0.8}' + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-length: + - '126' + content-type: + - application/json + host: + - api.openai.com + user-agent: + - OpenAI/Python 1.59.7 + x-stainless-arch: + - arm64 + x-stainless-async: + - 'false' + x-stainless-lang: + - python + x-stainless-os: + - MacOS + x-stainless-package-version: + - 1.59.7 + x-stainless-retry-count: + - '0' + x-stainless-runtime: + - CPython + x-stainless-runtime-version: + - 3.13.1 + method: POST + uri: https://api.openai.com/v1/completions + response: + body: + string: "{\n \"error\": {\n \"message\": \"Incorrect API key provided: + sk-wrong****-key. You can find your API key at https://platform.openai.com/account/api-keys.\",\n + \ \"type\": \"invalid_request_error\",\n \"param\": null,\n \"code\": + \"invalid_api_key\"\n }\n}\n" + headers: + CF-Cache-Status: + - DYNAMIC + CF-RAY: + - 9058b3cc3bcdd63c-IAD + Connection: + - keep-alive + Content-Length: + - '266' + Content-Type: + - application/json; charset=utf-8 + Date: + - Tue, 21 Jan 2025 16:32:48 GMT + Server: + - cloudflare + Set-Cookie: + - __cf_bm=WUZdhCkUNTJUEkju8qgk4MKCHL7CFOaIUNvU0L9XmvA-1737477168-1.0.1.1-RJ7MOiDyJEfHrXSN0WQVgZFtkxlkwBL3p.5t3._uu77WPJSM8tYzI3wMHSu.yMwD9QkrbgR5yavkTN.RTWl_1A; + path=/; expires=Tue, 21-Jan-25 17:02:48 GMT; domain=.api.openai.com; HttpOnly; + Secure; SameSite=None + - _cfuvid=7KOfpy1ICNI532AjhDxBh2qtnyNpsjauHeWi6dEJgT4-1737477168271-0.0.1.1-604800000; + path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None + X-Content-Type-Options: + - nosniff + alt-svc: + - h3=":443"; ma=86400 + strict-transport-security: + - max-age=31536000; includeSubDomains; preload + vary: + - Origin + x-request-id: + - req_c45bfc7515dca54ef87c667f8210af23 + status: + code: 401 + message: Unauthorized +version: 1 diff --git a/tests/contrib/openai/test_openai_llmobs.py b/tests/contrib/openai/test_openai_llmobs.py index a145877c8c8..4c15b1ffad3 100644 --- a/tests/contrib/openai/test_openai_llmobs.py +++ b/tests/contrib/openai/test_openai_llmobs.py @@ -339,6 +339,10 @@ def test_completion(self, openai, ddtrace_global_config, mock_llmobs_writer, moc ) ) + @pytest.mark.skipif( + parse_version(openai_module.version.VERSION) >= (1, 60), + reason="latest openai versions use modified azure requests", + ) def test_completion_azure( self, openai, azure_openai_config, ddtrace_global_config, mock_llmobs_writer, mock_tracer ): @@ -369,6 +373,10 @@ def test_completion_azure( ) ) + @pytest.mark.skipif( + parse_version(openai_module.version.VERSION) >= (1, 60), + reason="latest openai versions use modified azure requests", + ) async def test_completion_azure_async( self, openai, azure_openai_config, ddtrace_global_config, mock_llmobs_writer, mock_tracer ): @@ -458,6 +466,10 @@ def test_chat_completion(self, openai, ddtrace_global_config, mock_llmobs_writer ) ) + @pytest.mark.skipif( + parse_version(openai_module.version.VERSION) >= (1, 60), + reason="latest openai versions use modified azure requests", + ) def test_chat_completion_azure( self, openai, azure_openai_config, ddtrace_global_config, mock_llmobs_writer, mock_tracer ): @@ -488,6 +500,10 @@ def test_chat_completion_azure( ) ) + @pytest.mark.skipif( + parse_version(openai_module.version.VERSION) >= (1, 60), + reason="latest openai versions use modified azure requests", + ) async def test_chat_completion_azure_async( self, openai, azure_openai_config, ddtrace_global_config, mock_llmobs_writer, mock_tracer ): diff --git a/tests/contrib/openai/test_openai_patch.py b/tests/contrib/openai/test_openai_patch.py index caab79117cf..6a995213180 100644 --- a/tests/contrib/openai/test_openai_patch.py +++ b/tests/contrib/openai/test_openai_patch.py @@ -3,14 +3,10 @@ # removed the ``_generated`` suffix from the file name, to prevent the content # from being overwritten by future re-generations. +from ddtrace.contrib.internal.openai.patch import OPENAI_VERSION from ddtrace.contrib.internal.openai.patch import get_version from ddtrace.contrib.internal.openai.patch import patch - - -try: - from ddtrace.contrib.internal.openai.patch import unpatch -except ImportError: - unpatch = None +from ddtrace.contrib.internal.openai.patch import unpatch from tests.contrib.patch import PatchTestCase @@ -22,10 +18,268 @@ class TestOpenaiPatch(PatchTestCase.Base): __get_version__ = get_version def assert_module_patched(self, openai): - pass + if OPENAI_VERSION >= (1, 0, 0): + if OPENAI_VERSION >= (1, 8, 0): + self.assert_wrapped(openai._base_client.SyncAPIClient._process_response) + self.assert_wrapped(openai._base_client.AsyncAPIClient._process_response) + else: + self.assert_wrapped(openai._base_client.BaseClient._process_response) + self.assert_wrapped(openai.OpenAI.__init__) + self.assert_wrapped(openai.AsyncOpenAI.__init__) + self.assert_wrapped(openai.AzureOpenAI.__init__) + self.assert_wrapped(openai.AsyncAzureOpenAI.__init__) + self.assert_wrapped(openai.resources.models.Models.list) + self.assert_wrapped(openai.resources.models.Models.retrieve) + self.assert_wrapped(openai.resources.models.Models.delete) + self.assert_wrapped(openai.resources.models.AsyncModels.list) + self.assert_wrapped(openai.resources.models.AsyncModels.retrieve) + self.assert_wrapped(openai.resources.models.AsyncModels.delete) + self.assert_wrapped(openai.resources.completions.Completions.create) + self.assert_wrapped(openai.resources.chat.Completions.create) + self.assert_wrapped(openai.resources.completions.AsyncCompletions.create) + self.assert_wrapped(openai.resources.chat.AsyncCompletions.create) + self.assert_wrapped(openai.resources.images.Images.generate) + self.assert_wrapped(openai.resources.images.Images.edit) + self.assert_wrapped(openai.resources.images.Images.create_variation) + self.assert_wrapped(openai.resources.images.AsyncImages.generate) + self.assert_wrapped(openai.resources.images.AsyncImages.edit) + self.assert_wrapped(openai.resources.images.AsyncImages.create_variation) + self.assert_wrapped(openai.resources.audio.Transcriptions.create) + self.assert_wrapped(openai.resources.audio.Translations.create) + self.assert_wrapped(openai.resources.audio.AsyncTranscriptions.create) + self.assert_wrapped(openai.resources.audio.AsyncTranslations.create) + self.assert_wrapped(openai.resources.embeddings.Embeddings.create) + self.assert_wrapped(openai.resources.moderations.Moderations.create) + self.assert_wrapped(openai.resources.embeddings.AsyncEmbeddings.create) + self.assert_wrapped(openai.resources.moderations.AsyncModerations.create) + self.assert_wrapped(openai.resources.files.Files.create) + self.assert_wrapped(openai.resources.files.Files.retrieve) + self.assert_wrapped(openai.resources.files.Files.list) + self.assert_wrapped(openai.resources.files.Files.delete) + self.assert_wrapped(openai.resources.files.Files.retrieve_content) + self.assert_wrapped(openai.resources.files.AsyncFiles.create) + self.assert_wrapped(openai.resources.files.AsyncFiles.retrieve) + self.assert_wrapped(openai.resources.files.AsyncFiles.list) + self.assert_wrapped(openai.resources.files.AsyncFiles.delete) + self.assert_wrapped(openai.resources.files.AsyncFiles.retrieve_content) + else: + self.assert_wrapped(openai.api_resources.completion.Completion.create) + self.assert_wrapped(openai.api_resources.completion.Completion.acreate) + self.assert_wrapped(openai.api_requestor._make_session) + self.assert_wrapped(openai.util.convert_to_openai_object) + self.assert_wrapped(openai.api_resources.embedding.Embedding.create) + self.assert_wrapped(openai.api_resources.embedding.Embedding.acreate) + if hasattr(openai, "Model"): + self.assert_wrapped(openai.api_resources.model.Model.list) + self.assert_wrapped(openai.api_resources.model.Model.retrieve) + self.assert_wrapped(openai.api_resources.model.Model.delete) + self.assert_wrapped(openai.api_resources.model.Model.alist) + self.assert_wrapped(openai.api_resources.model.Model.aretrieve) + self.assert_wrapped(openai.api_resources.model.Model.adelete) + if hasattr(openai, "ChatCompletion"): + self.assert_wrapped(openai.api_resources.chat_completion.ChatCompletion.create) + self.assert_wrapped(openai.api_resources.chat_completion.ChatCompletion.acreate) + if hasattr(openai, "Image"): + self.assert_wrapped(openai.api_resources.image.Image.create) + self.assert_wrapped(openai.api_resources.image.Image.acreate) + self.assert_wrapped(openai.api_resources.image.Image.create_edit) + self.assert_wrapped(openai.api_resources.image.Image.acreate_edit) + self.assert_wrapped(openai.api_resources.image.Image.create_variation) + self.assert_wrapped(openai.api_resources.image.Image.acreate_variation) + if hasattr(openai, "Audio"): + self.assert_wrapped(openai.api_resources.audio.Audio.transcribe) + self.assert_wrapped(openai.api_resources.audio.Audio.atranscribe) + self.assert_wrapped(openai.api_resources.audio.Audio.translate) + self.assert_wrapped(openai.api_resources.audio.Audio.atranslate) + if hasattr(openai, "Moderation"): + self.assert_wrapped(openai.api_resources.moderation.Moderation.create) + self.assert_wrapped(openai.api_resources.moderation.Moderation.acreate) + if hasattr(openai, "File"): + self.assert_wrapped(openai.api_resources.file.File.create) + self.assert_wrapped(openai.api_resources.file.File.retrieve) + self.assert_wrapped(openai.api_resources.file.File.list) + self.assert_wrapped(openai.api_resources.file.File.delete) + self.assert_wrapped(openai.api_resources.file.File.download) + self.assert_wrapped(openai.api_resources.file.File.acreate) + self.assert_wrapped(openai.api_resources.file.File.aretrieve) + self.assert_wrapped(openai.api_resources.file.File.alist) + self.assert_wrapped(openai.api_resources.file.File.adelete) + self.assert_wrapped(openai.api_resources.file.File.adownload) def assert_not_module_patched(self, openai): - pass + if OPENAI_VERSION >= (1, 0, 0): + if OPENAI_VERSION >= (1, 8, 0): + self.assert_not_wrapped(openai._base_client.SyncAPIClient._process_response) + self.assert_not_wrapped(openai._base_client.AsyncAPIClient._process_response) + else: + self.assert_not_wrapped(openai._base_client.BaseClient._process_response) + self.assert_not_wrapped(openai.OpenAI.__init__) + self.assert_not_wrapped(openai.AsyncOpenAI.__init__) + self.assert_not_wrapped(openai.AzureOpenAI.__init__) + self.assert_not_wrapped(openai.AsyncAzureOpenAI.__init__) + self.assert_not_wrapped(openai.resources.models.Models.list) + self.assert_not_wrapped(openai.resources.models.Models.retrieve) + self.assert_not_wrapped(openai.resources.models.Models.delete) + self.assert_not_wrapped(openai.resources.models.AsyncModels.list) + self.assert_not_wrapped(openai.resources.models.AsyncModels.retrieve) + self.assert_not_wrapped(openai.resources.models.AsyncModels.delete) + self.assert_not_wrapped(openai.resources.completions.Completions.create) + self.assert_not_wrapped(openai.resources.chat.Completions.create) + self.assert_not_wrapped(openai.resources.completions.AsyncCompletions.create) + self.assert_not_wrapped(openai.resources.chat.AsyncCompletions.create) + self.assert_not_wrapped(openai.resources.images.Images.generate) + self.assert_not_wrapped(openai.resources.images.Images.edit) + self.assert_not_wrapped(openai.resources.images.Images.create_variation) + self.assert_not_wrapped(openai.resources.images.AsyncImages.generate) + self.assert_not_wrapped(openai.resources.images.AsyncImages.edit) + self.assert_not_wrapped(openai.resources.images.AsyncImages.create_variation) + self.assert_not_wrapped(openai.resources.audio.Transcriptions.create) + self.assert_not_wrapped(openai.resources.audio.Translations.create) + self.assert_not_wrapped(openai.resources.audio.AsyncTranscriptions.create) + self.assert_not_wrapped(openai.resources.audio.AsyncTranslations.create) + self.assert_not_wrapped(openai.resources.embeddings.Embeddings.create) + self.assert_not_wrapped(openai.resources.moderations.Moderations.create) + self.assert_not_wrapped(openai.resources.embeddings.AsyncEmbeddings.create) + self.assert_not_wrapped(openai.resources.moderations.AsyncModerations.create) + self.assert_not_wrapped(openai.resources.files.Files.create) + self.assert_not_wrapped(openai.resources.files.Files.retrieve) + self.assert_not_wrapped(openai.resources.files.Files.list) + self.assert_not_wrapped(openai.resources.files.Files.delete) + self.assert_not_wrapped(openai.resources.files.AsyncFiles.retrieve_content) + self.assert_not_wrapped(openai.resources.files.AsyncFiles.create) + self.assert_not_wrapped(openai.resources.files.AsyncFiles.retrieve) + self.assert_not_wrapped(openai.resources.files.AsyncFiles.list) + self.assert_not_wrapped(openai.resources.files.AsyncFiles.delete) + self.assert_not_wrapped(openai.resources.files.AsyncFiles.retrieve_content) + else: + self.assert_not_wrapped(openai.api_resources.completion.Completion.create) + self.assert_not_wrapped(openai.api_resources.completion.Completion.acreate) + self.assert_not_wrapped(openai.api_requestor._make_session) + self.assert_not_wrapped(openai.util.convert_to_openai_object) + self.assert_not_wrapped(openai.api_resources.embedding.Embedding.create) + self.assert_not_wrapped(openai.api_resources.embedding.Embedding.acreate) + if hasattr(openai, "Model"): + self.assert_not_wrapped(openai.api_resources.model.Model.list) + self.assert_not_wrapped(openai.api_resources.model.Model.retrieve) + self.assert_not_wrapped(openai.api_resources.model.Model.delete) + self.assert_not_wrapped(openai.api_resources.model.Model.alist) + self.assert_not_wrapped(openai.api_resources.model.Model.aretrieve) + self.assert_not_wrapped(openai.api_resources.model.Model.adelete) + if hasattr(openai, "ChatCompletion"): + self.assert_not_wrapped(openai.api_resources.chat_completion.ChatCompletion.create) + self.assert_not_wrapped(openai.api_resources.chat_completion.ChatCompletion.acreate) + if hasattr(openai, "Image"): + self.assert_not_wrapped(openai.api_resources.image.Image.create) + self.assert_not_wrapped(openai.api_resources.image.Image.acreate) + self.assert_not_wrapped(openai.api_resources.image.Image.create_edit) + self.assert_not_wrapped(openai.api_resources.image.Image.acreate_edit) + self.assert_not_wrapped(openai.api_resources.image.Image.create_variation) + self.assert_not_wrapped(openai.api_resources.image.Image.acreate_variation) + if hasattr(openai, "Audio"): + self.assert_not_wrapped(openai.api_resources.audio.Audio.transcribe) + self.assert_not_wrapped(openai.api_resources.audio.Audio.atranscribe) + self.assert_not_wrapped(openai.api_resources.audio.Audio.translate) + self.assert_not_wrapped(openai.api_resources.audio.Audio.atranslate) + if hasattr(openai, "Moderation"): + self.assert_not_wrapped(openai.api_resources.moderation.Moderation.create) + self.assert_not_wrapped(openai.api_resources.moderation.Moderation.acreate) + if hasattr(openai, "File"): + self.assert_not_wrapped(openai.api_resources.file.File.create) + self.assert_not_wrapped(openai.api_resources.file.File.retrieve) + self.assert_not_wrapped(openai.api_resources.file.File.list) + self.assert_not_wrapped(openai.api_resources.file.File.delete) + self.assert_not_wrapped(openai.api_resources.file.File.download) + self.assert_not_wrapped(openai.api_resources.file.File.acreate) + self.assert_not_wrapped(openai.api_resources.file.File.aretrieve) + self.assert_not_wrapped(openai.api_resources.file.File.alist) + self.assert_not_wrapped(openai.api_resources.file.File.adelete) + self.assert_not_wrapped(openai.api_resources.file.File.adownload) def assert_not_module_double_patched(self, openai): - pass + if OPENAI_VERSION >= (1, 0, 0): + if OPENAI_VERSION >= (1, 8, 0): + self.assert_not_double_wrapped(openai._base_client.SyncAPIClient._process_response) + self.assert_not_double_wrapped(openai._base_client.AsyncAPIClient._process_response) + else: + self.assert_not_double_wrapped(openai._base_client.BaseClient._process_response) + self.assert_not_double_wrapped(openai.OpenAI.__init__) + self.assert_not_double_wrapped(openai.AsyncOpenAI.__init__) + self.assert_not_double_wrapped(openai.AzureOpenAI.__init__) + self.assert_not_double_wrapped(openai.AsyncAzureOpenAI.__init__) + self.assert_not_double_wrapped(openai.resources.models.Models.list) + self.assert_not_double_wrapped(openai.resources.models.Models.retrieve) + self.assert_not_double_wrapped(openai.resources.models.Models.delete) + self.assert_not_double_wrapped(openai.resources.models.AsyncModels.list) + self.assert_not_double_wrapped(openai.resources.models.AsyncModels.retrieve) + self.assert_not_double_wrapped(openai.resources.models.AsyncModels.delete) + self.assert_not_double_wrapped(openai.resources.completions.Completions.create) + self.assert_not_double_wrapped(openai.resources.chat.Completions.create) + self.assert_not_double_wrapped(openai.resources.completions.AsyncCompletions.create) + self.assert_not_double_wrapped(openai.resources.chat.AsyncCompletions.create) + self.assert_not_double_wrapped(openai.resources.images.Images.generate) + self.assert_not_double_wrapped(openai.resources.images.Images.edit) + self.assert_not_double_wrapped(openai.resources.images.Images.create_variation) + self.assert_not_double_wrapped(openai.resources.images.AsyncImages.generate) + self.assert_not_double_wrapped(openai.resources.images.AsyncImages.edit) + self.assert_not_double_wrapped(openai.resources.images.AsyncImages.create_variation) + self.assert_not_double_wrapped(openai.resources.audio.Transcriptions.create) + self.assert_not_double_wrapped(openai.resources.audio.Translations.create) + self.assert_not_double_wrapped(openai.resources.audio.AsyncTranscriptions.create) + self.assert_not_double_wrapped(openai.resources.audio.AsyncTranslations.create) + self.assert_not_double_wrapped(openai.resources.embeddings.Embeddings.create) + self.assert_not_double_wrapped(openai.resources.moderations.Moderations.create) + self.assert_not_double_wrapped(openai.resources.embeddings.AsyncEmbeddings.create) + self.assert_not_double_wrapped(openai.resources.moderations.AsyncModerations.create) + self.assert_not_double_wrapped(openai.resources.files.Files.create) + self.assert_not_double_wrapped(openai.resources.files.Files.retrieve) + self.assert_not_double_wrapped(openai.resources.files.Files.list) + self.assert_not_double_wrapped(openai.resources.files.Files.delete) + self.assert_not_double_wrapped(openai.resources.files.Files.retrieve_content) + self.assert_not_double_wrapped(openai.resources.files.AsyncFiles.create) + self.assert_not_double_wrapped(openai.resources.files.AsyncFiles.retrieve) + self.assert_not_double_wrapped(openai.resources.files.AsyncFiles.list) + self.assert_not_double_wrapped(openai.resources.files.AsyncFiles.delete) + self.assert_not_double_wrapped(openai.resources.files.AsyncFiles.retrieve_content) + else: + self.assert_not_double_wrapped(openai.api_resources.completion.Completion.create) + self.assert_not_double_wrapped(openai.api_resources.completion.Completion.acreate) + self.assert_not_double_wrapped(openai.api_requestor._make_session) + self.assert_not_double_wrapped(openai.util.convert_to_openai_object) + self.assert_not_double_wrapped(openai.api_resources.embedding.Embedding.create) + self.assert_not_double_wrapped(openai.api_resources.embedding.Embedding.acreate) + if hasattr(openai, "Model"): + self.assert_not_double_wrapped(openai.api_resources.model.Model.list) + self.assert_not_double_wrapped(openai.api_resources.model.Model.retrieve) + self.assert_not_double_wrapped(openai.api_resources.model.Model.delete) + self.assert_not_double_wrapped(openai.api_resources.model.Model.alist) + self.assert_not_double_wrapped(openai.api_resources.model.Model.aretrieve) + self.assert_not_double_wrapped(openai.api_resources.model.Model.adelete) + if hasattr(openai, "ChatCompletion"): + self.assert_not_double_wrapped(openai.api_resources.chat_completion.ChatCompletion.create) + self.assert_not_double_wrapped(openai.api_resources.chat_completion.ChatCompletion.acreate) + if hasattr(openai, "Image"): + self.assert_not_double_wrapped(openai.api_resources.image.Image.create) + self.assert_not_double_wrapped(openai.api_resources.image.Image.acreate) + self.assert_not_double_wrapped(openai.api_resources.image.Image.create_edit) + self.assert_not_double_wrapped(openai.api_resources.image.Image.acreate_edit) + self.assert_not_double_wrapped(openai.api_resources.image.Image.create_variation) + self.assert_not_double_wrapped(openai.api_resources.image.Image.acreate_variation) + if hasattr(openai, "Audio"): + self.assert_not_double_wrapped(openai.api_resources.audio.Audio.transcribe) + self.assert_not_double_wrapped(openai.api_resources.audio.Audio.atranscribe) + self.assert_not_double_wrapped(openai.api_resources.audio.Audio.translate) + self.assert_not_double_wrapped(openai.api_resources.audio.Audio.atranslate) + if hasattr(openai, "Moderation"): + self.assert_not_double_wrapped(openai.api_resources.moderation.Moderation.create) + self.assert_not_double_wrapped(openai.api_resources.moderation.Moderation.acreate) + if hasattr(openai, "File"): + self.assert_not_double_wrapped(openai.api_resources.file.File.create) + self.assert_not_double_wrapped(openai.api_resources.file.File.retrieve) + self.assert_not_double_wrapped(openai.api_resources.file.File.list) + self.assert_not_double_wrapped(openai.api_resources.file.File.delete) + self.assert_not_double_wrapped(openai.api_resources.file.File.download) + self.assert_not_double_wrapped(openai.api_resources.file.File.acreate) + self.assert_not_double_wrapped(openai.api_resources.file.File.aretrieve) + self.assert_not_double_wrapped(openai.api_resources.file.File.alist) + self.assert_not_double_wrapped(openai.api_resources.file.File.adelete) + self.assert_not_double_wrapped(openai.api_resources.file.File.adownload) diff --git a/tests/contrib/openai/test_openai_v0.py b/tests/contrib/openai/test_openai_v0.py index 0dbd537c3ff..0a618b4bffc 100644 --- a/tests/contrib/openai/test_openai_v0.py +++ b/tests/contrib/openai/test_openai_v0.py @@ -9,9 +9,7 @@ import pytest import ddtrace -from ddtrace import patch from ddtrace.contrib.internal.openai.utils import _est_tokens -from ddtrace.contrib.internal.trace_utils import iswrapped from ddtrace.internal.utils.version import parse_version from tests.contrib.openai.utils import chat_completion_custom_functions from tests.contrib.openai.utils import chat_completion_input_description @@ -41,42 +39,6 @@ def test_config(ddtrace_config_openai, mock_tracer, openai): assert ddtrace.config.openai.metrics_enabled is ddtrace_config_openai["metrics_enabled"] -def test_patching(openai): - """Ensure that the correct objects are patched and not double patched.""" - - # for some reason these can't be specified as the real python objects... - # no clue why (eg. openai.Completion.create doesn't work) - methods = [ - (openai.Completion, "create"), - (openai.api_resources.completion.Completion, "create"), - (openai.Completion, "acreate"), - (openai.api_resources.completion.Completion, "acreate"), - (openai.api_requestor, "_make_session"), - (openai.util, "convert_to_openai_object"), - (openai.Embedding, "create"), - (openai.Embedding, "acreate"), - ] - if hasattr(openai, "ChatCompletion"): - methods += [ - (openai.ChatCompletion, "create"), - (openai.api_resources.chat_completion.ChatCompletion, "create"), - (openai.ChatCompletion, "acreate"), - (openai.api_resources.chat_completion.ChatCompletion, "acreate"), - ] - - for m in methods: - assert not iswrapped(getattr(m[0], m[1])) - - patch(openai=True) - for m in methods: - assert iswrapped(getattr(m[0], m[1])) - - # Ensure double patching does not occur - patch(openai=True) - for m in methods: - assert not iswrapped(getattr(m[0], m[1]).__dd_wrapped__) - - @pytest.mark.parametrize("api_key_in_env", [True, False]) def test_model_list(api_key_in_env, request_api_key, openai, openai_vcr, mock_metrics, snapshot_tracer): with snapshot_context( diff --git a/tests/contrib/openai/test_openai_v1.py b/tests/contrib/openai/test_openai_v1.py index 47ed05ea1bd..438b980d5b5 100644 --- a/tests/contrib/openai/test_openai_v1.py +++ b/tests/contrib/openai/test_openai_v1.py @@ -5,9 +5,7 @@ import pytest import ddtrace -from ddtrace import patch from ddtrace.contrib.internal.openai.utils import _est_tokens -from ddtrace.contrib.internal.trace_utils import iswrapped from ddtrace.internal.utils.version import parse_version from tests.contrib.openai.utils import chat_completion_custom_functions from tests.contrib.openai.utils import chat_completion_input_description @@ -37,56 +35,6 @@ def test_config(ddtrace_config_openai, mock_tracer, openai): assert ddtrace.config.openai.metrics_enabled is ddtrace_config_openai["metrics_enabled"] -def test_patching(openai): - """Ensure that the correct objects are patched and not double patched.""" - methods = [ - (openai.resources.completions.Completions, "create"), - (openai.resources.completions.AsyncCompletions, "create"), - (openai.resources.chat.Completions, "create"), - (openai.resources.chat.AsyncCompletions, "create"), - (openai.resources.embeddings.Embeddings, "create"), - (openai.resources.embeddings.AsyncEmbeddings, "create"), - (openai.resources.models.Models, "list"), - (openai.resources.models.Models, "retrieve"), - (openai.resources.models.AsyncModels, "list"), - (openai.resources.models.AsyncModels, "retrieve"), - (openai.resources.images.Images, "generate"), - (openai.resources.images.Images, "edit"), - (openai.resources.images.Images, "create_variation"), - (openai.resources.images.AsyncImages, "generate"), - (openai.resources.images.AsyncImages, "edit"), - (openai.resources.images.AsyncImages, "create_variation"), - (openai.resources.audio.Transcriptions, "create"), - (openai.resources.audio.AsyncTranscriptions, "create"), - (openai.resources.audio.Translations, "create"), - (openai.resources.audio.AsyncTranslations, "create"), - (openai.resources.moderations.Moderations, "create"), - (openai.resources.moderations.AsyncModerations, "create"), - (openai.resources.files.Files, "create"), - (openai.resources.files.Files, "retrieve"), - (openai.resources.files.Files, "list"), - (openai.resources.files.Files, "delete"), - (openai.resources.files.Files, "retrieve_content"), - (openai.resources.files.AsyncFiles, "create"), - (openai.resources.files.AsyncFiles, "retrieve"), - (openai.resources.files.AsyncFiles, "list"), - (openai.resources.files.AsyncFiles, "delete"), - (openai.resources.files.AsyncFiles, "retrieve_content"), - ] - - for m in methods: - assert not iswrapped(getattr(m[0], m[1])) - - patch(openai=True) - for m in methods: - assert iswrapped(getattr(m[0], m[1])) - - # Ensure double patching does not occur - patch(openai=True) - for m in methods: - assert not iswrapped(getattr(m[0], m[1]).__dd_wrapped__) - - @pytest.mark.parametrize("api_key_in_env", [True, False]) def test_model_list(api_key_in_env, request_api_key, openai, openai_vcr, mock_metrics, snapshot_tracer): with snapshot_context( @@ -908,17 +856,16 @@ def test_misuse(openai, snapshot_tracer): ) def test_span_finish_on_stream_error(openai, openai_vcr, snapshot_tracer): with openai_vcr.use_cassette("completion_stream_wrong_api_key.yaml"): - with pytest.raises(openai.APIConnectionError): - with pytest.raises(openai.AuthenticationError): - client = openai.OpenAI(api_key="sk-wrong-api-key") - client.completions.create( - model="text-curie-001", - prompt="how does openai tokenize prompts?", - temperature=0.8, - n=1, - max_tokens=150, - stream=True, - ) + with pytest.raises((openai.APIConnectionError, openai.AuthenticationError)): + client = openai.OpenAI(api_key="sk-wrong-api-key") + client.completions.create( + model="text-curie-001", + prompt="how does openai tokenize prompts?", + temperature=0.8, + n=1, + max_tokens=150, + stream=True, + ) @pytest.mark.snapshot @@ -1383,6 +1330,9 @@ def test_est_tokens(): ) # oracle: 92 +@pytest.mark.skipif( + parse_version(openai_module.version.VERSION) >= (1, 60), reason="latest openai versions use modified azure requests" +) @pytest.mark.snapshot( token="tests.contrib.openai.test_openai.test_azure_openai_completion", ignores=["meta.http.useragent", "meta.openai.api_base", "meta.openai.api_type", "meta.openai.api_version"], @@ -1405,6 +1355,9 @@ def test_azure_openai_completion(openai, azure_openai_config, openai_vcr, snapsh ) +@pytest.mark.skipif( + parse_version(openai_module.version.VERSION) >= (1, 60), reason="latest openai versions use modified azure requests" +) @pytest.mark.snapshot( token="tests.contrib.openai.test_openai.test_azure_openai_completion", ignores=[ @@ -1434,6 +1387,9 @@ async def test_azure_openai_acompletion(openai, azure_openai_config, openai_vcr, ) +@pytest.mark.skipif( + parse_version(openai_module.version.VERSION) >= (1, 60), reason="latest openai versions use modified azure requests" +) @pytest.mark.snapshot( token="tests.contrib.openai.test_openai.test_azure_openai_chat_completion", ignores=["meta.http.useragent", "meta.openai.api_base", "meta.openai.api_type", "meta.openai.api_version"], @@ -1456,6 +1412,9 @@ def test_azure_openai_chat_completion(openai, azure_openai_config, openai_vcr, s ) +@pytest.mark.skipif( + parse_version(openai_module.version.VERSION) >= (1, 60), reason="latest openai versions use modified azure requests" +) @pytest.mark.snapshot( token="tests.contrib.openai.test_openai.test_azure_openai_chat_completion", ignores=["meta.http.useragent", "meta.openai.api_base", "meta.openai.api_type", "meta.openai.api_version"], @@ -1478,6 +1437,9 @@ async def test_azure_openai_chat_acompletion(openai, azure_openai_config, openai ) +@pytest.mark.skipif( + parse_version(openai_module.version.VERSION) >= (1, 60), reason="latest openai versions use modified azure requests" +) @pytest.mark.snapshot( token="tests.contrib.openai.test_openai.test_azure_openai_embedding", ignores=["meta.http.useragent", "meta.openai.api_base", "meta.openai.api_type", "meta.openai.api_version"], @@ -1497,6 +1459,9 @@ def test_azure_openai_embedding(openai, azure_openai_config, openai_vcr, snapsho ) +@pytest.mark.skipif( + parse_version(openai_module.version.VERSION) >= (1, 60), reason="latest openai versions use modified azure requests" +) @pytest.mark.snapshot( token="tests.contrib.openai.test_openai.test_azure_openai_embedding", ignores=["meta.http.useragent", "meta.openai.api_base", "meta.openai.api_type", "meta.openai.api_version"], diff --git a/tests/contrib/pyramid/test_pyramid.py b/tests/contrib/pyramid/test_pyramid.py index 3cc9b2688ca..28fd7616cdd 100644 --- a/tests/contrib/pyramid/test_pyramid.py +++ b/tests/contrib/pyramid/test_pyramid.py @@ -4,8 +4,8 @@ import pytest from ddtrace import config -from ddtrace.constants import ORIGIN_KEY -from ddtrace.constants import SAMPLING_PRIORITY_KEY +from ddtrace.constants import _ORIGIN_KEY +from ddtrace.constants import _SAMPLING_PRIORITY_KEY from ddtrace.internal.schema import DEFAULT_SPAN_SERVICE_NAME from tests.utils import TracerTestCase from tests.utils import flaky @@ -89,8 +89,8 @@ def test_distributed_tracing(self): assert span.get_tag("span.kind") == "server" assert span.trace_id == 100 assert span.parent_id == 42 - assert span.get_metric(SAMPLING_PRIORITY_KEY) == 2 - assert span.get_tag(ORIGIN_KEY) == "synthetics" + assert span.get_metric(_SAMPLING_PRIORITY_KEY) == 2 + assert span.get_tag(_ORIGIN_KEY) == "synthetics" def test_distributed_tracing_patterned(self): # ensure the Context is properly created @@ -112,8 +112,8 @@ def test_distributed_tracing_patterned(self): assert span.get_tag("http.route") == "/hello/{param}" assert span.trace_id == 100 assert span.parent_id == 42 - assert span.get_metric(SAMPLING_PRIORITY_KEY) == 2 - assert span.get_tag(ORIGIN_KEY) == "synthetics" + assert span.get_metric(_SAMPLING_PRIORITY_KEY) == 2 + assert span.get_tag(_ORIGIN_KEY) == "synthetics" class TestPyramidDistributedTracingDisabled(PyramidBase): @@ -141,8 +141,8 @@ def test_distributed_tracing_disabled(self): assert span.get_tag("span.kind") == "server" assert span.trace_id != 100 assert span.parent_id != 42 - assert span.get_metric(SAMPLING_PRIORITY_KEY) != 2 - assert span.get_tag(ORIGIN_KEY) != "synthetics" + assert span.get_metric(_SAMPLING_PRIORITY_KEY) != 2 + assert span.get_tag(_ORIGIN_KEY) != "synthetics" class TestSchematization(PyramidBase): diff --git a/tests/contrib/pytest/test_pytest.py b/tests/contrib/pytest/test_pytest.py index 267a9d97eac..3918b82174b 100644 --- a/tests/contrib/pytest/test_pytest.py +++ b/tests/contrib/pytest/test_pytest.py @@ -7,8 +7,8 @@ import pytest import ddtrace +from ddtrace.constants import _SAMPLING_PRIORITY_KEY from ddtrace.constants import ERROR_MSG -from ddtrace.constants import SAMPLING_PRIORITY_KEY from ddtrace.contrib.internal.pytest._utils import _USE_PLUGIN_V2 from ddtrace.contrib.internal.pytest.constants import XFAIL_REASON from ddtrace.contrib.internal.pytest.patch import get_version @@ -805,7 +805,7 @@ def test_sample_priority(): spans = self.pop_spans() assert len(spans) == 4 - assert spans[0].get_metric(SAMPLING_PRIORITY_KEY) == 1 + assert spans[0].get_metric(_SAMPLING_PRIORITY_KEY) == 1 def test_pytest_exception(self): """Test that pytest sets exception information correctly.""" diff --git a/tests/contrib/pytest/test_pytest_atr.py b/tests/contrib/pytest/test_pytest_atr.py index ebb4f8421d8..92012461ed0 100644 --- a/tests/contrib/pytest/test_pytest_atr.py +++ b/tests/contrib/pytest/test_pytest_atr.py @@ -24,17 +24,25 @@ ) _TEST_PASS_CONTENT = """ +import unittest + def test_func_pass(): assert True + +class SomeTestCase(unittest.TestCase): + def test_class_func_pass(self): + assert True """ _TEST_FAIL_CONTENT = """ import pytest +import unittest def test_func_fail(): assert False _test_func_retries_skip_count = 0 + def test_func_retries_skip(): global _test_func_retries_skip_count _test_func_retries_skip_count += 1 @@ -42,9 +50,24 @@ def test_func_retries_skip(): pytest.skip() assert False +_test_class_func_retries_skip_count = 0 + +class SomeTestCase(unittest.TestCase): + def test_class_func_fail(self): + assert False + + def test_class_func_retries_skip(self): + global _test_class_func_retries_skip_count + _test_class_func_retries_skip_count += 1 + if _test_class_func_retries_skip_count > 1: + pytest.skip() + assert False """ + _TEST_PASS_ON_RETRIES_CONTENT = """ +import unittest + _test_func_passes_4th_retry_count = 0 def test_func_passes_4th_retry(): global _test_func_passes_4th_retry_count @@ -56,10 +79,19 @@ def test_func_passes_1st_retry(): global _test_func_passes_1st_retry_count _test_func_passes_1st_retry_count += 1 assert _test_func_passes_1st_retry_count == 2 + +class SomeTestCase(unittest.TestCase): + _test_func_passes_4th_retry_count = 0 + + def test_func_passes_4th_retry(self): + SomeTestCase._test_func_passes_4th_retry_count += 1 + assert SomeTestCase._test_func_passes_4th_retry_count == 5 + """ _TEST_ERRORS_CONTENT = """ import pytest +import unittest @pytest.fixture def fixture_fails_setup(): @@ -79,6 +111,7 @@ def test_func_fails_teardown(fixture_fails_teardown): _TEST_SKIP_CONTENT = """ import pytest +import unittest @pytest.mark.skip def test_func_skip_mark(): @@ -86,6 +119,14 @@ def test_func_skip_mark(): def test_func_skip_inside(): pytest.skip() + +class SomeTestCase(unittest.TestCase): + @pytest.mark.skip + def test_class_func_skip_mark(self): + assert True + + def test_class_func_skip_inside(self): + pytest.skip() """ @@ -105,7 +146,7 @@ def test_pytest_atr_no_ddtrace_does_not_retry(self): self.testdir.makepyfile(test_pass_on_retries=_TEST_PASS_ON_RETRIES_CONTENT) self.testdir.makepyfile(test_skip=_TEST_SKIP_CONTENT) rec = self.inline_run() - rec.assertoutcome(passed=2, failed=6, skipped=2) + rec.assertoutcome(passed=3, failed=9, skipped=4) assert len(self.pop_spans()) == 0 def test_pytest_atr_env_var_disables_retrying(self): @@ -117,7 +158,7 @@ def test_pytest_atr_env_var_disables_retrying(self): with mock.patch("ddtrace.internal.ci_visibility.recorder.ddconfig", _get_default_civisibility_ddconfig()): rec = self.inline_run("--ddtrace", "-s", extra_env={"DD_CIVISIBILITY_FLAKY_RETRY_ENABLED": "0"}) - rec.assertoutcome(passed=2, failed=6, skipped=2) + rec.assertoutcome(passed=3, failed=9, skipped=4) assert len(self.pop_spans()) > 0 def test_pytest_atr_env_var_does_not_override_api(self): @@ -133,7 +174,7 @@ def test_pytest_atr_env_var_does_not_override_api(self): return_value=TestVisibilityAPISettings(flaky_test_retries_enabled=False), ): rec = self.inline_run("--ddtrace", extra_env={"DD_CIVISIBILITY_FLAKY_RETRY_ENABLED": "1"}) - rec.assertoutcome(passed=2, failed=6, skipped=2) + rec.assertoutcome(passed=3, failed=9, skipped=4) assert len(self.pop_spans()) > 0 def test_pytest_atr_spans(self): @@ -174,6 +215,15 @@ def test_pytest_atr_spans(self): func_fail_retries += 1 assert func_fail_retries == 5 + class_func_fail_spans = _get_spans_from_list(spans, "test", "SomeTestCase::test_class_func_fail") + assert len(class_func_fail_spans) == 6 + class_func_fail_retries = 0 + for class_func_fail_span in class_func_fail_spans: + assert class_func_fail_span.get_tag("test.status") == "fail" + if class_func_fail_span.get_tag("test.is_retry") == "true": + class_func_fail_retries += 1 + assert class_func_fail_retries == 5 + func_fail_skip_spans = _get_spans_from_list(spans, "test", "test_func_retries_skip") assert len(func_fail_skip_spans) == 6 func_fail_skip_retries = 0 @@ -184,6 +234,18 @@ def test_pytest_atr_spans(self): func_fail_skip_retries += 1 assert func_fail_skip_retries == 5 + class_func_fail_skip_spans = _get_spans_from_list(spans, "test", "SomeTestCase::test_class_func_retries_skip") + assert len(class_func_fail_skip_spans) == 6 + class_func_fail_skip_retries = 0 + for class_func_fail_skip_span in class_func_fail_skip_spans: + class_func_fail_skip_is_retry = class_func_fail_skip_span.get_tag("test.is_retry") == "true" + assert class_func_fail_skip_span.get_tag("test.status") == ( + "skip" if class_func_fail_skip_is_retry else "fail" + ) + if class_func_fail_skip_is_retry: + class_func_fail_skip_retries += 1 + assert class_func_fail_skip_retries == 5 + func_pass_spans = _get_spans_from_list(spans, "test", "test_func_pass") assert len(func_pass_spans) == 1 assert func_pass_spans[0].get_tag("test.status") == "pass" @@ -201,7 +263,17 @@ def test_pytest_atr_spans(self): assert func_skip_inside_spans[0].get_tag("test.status") == "skip" assert func_skip_inside_spans[0].get_tag("test.is_retry") is None - assert len(spans) == 31 + class_func_skip_mark_spans = _get_spans_from_list(spans, "test", "SomeTestCase::test_class_func_skip_mark") + assert len(class_func_skip_mark_spans) == 1 + assert class_func_skip_mark_spans[0].get_tag("test.status") == "skip" + assert class_func_skip_mark_spans[0].get_tag("test.is_retry") is None + + class_func_skip_inside_spans = _get_spans_from_list(spans, "test", "SomeTestCase::test_class_func_skip_inside") + assert len(class_func_skip_inside_spans) == 1 + assert class_func_skip_inside_spans[0].get_tag("test.status") == "skip" + assert class_func_skip_inside_spans[0].get_tag("test.is_retry") is None + + assert len(spans) == 51 def test_pytest_atr_fails_session_when_test_fails(self): self.testdir.makepyfile(test_pass=_TEST_PASS_CONTENT) @@ -212,7 +284,7 @@ def test_pytest_atr_fails_session_when_test_fails(self): rec = self.inline_run("--ddtrace") spans = self.pop_spans() assert rec.ret == 1 - assert len(spans) == 28 + assert len(spans) == 48 def test_pytest_atr_passes_session_when_test_pass(self): self.testdir.makepyfile(test_pass=_TEST_PASS_CONTENT) @@ -222,9 +294,14 @@ def test_pytest_atr_passes_session_when_test_pass(self): rec = self.inline_run("--ddtrace") spans = self.pop_spans() assert rec.ret == 0 - assert len(spans) == 15 + assert len(spans) == 23 def test_pytest_atr_does_not_retry_failed_setup_or_teardown(self): + # NOTE: This feature only works for regular pytest tests. For tests inside unittest classes, setup and teardown + # happens at the 'call' phase, and we don't have a way to detect that the error happened during setup/teardown, + # so tests will be retried as if they were failing tests. + # See . + self.testdir.makepyfile(test_errors=_TEST_ERRORS_CONTENT) rec = self.inline_run("--ddtrace") spans = self.pop_spans() diff --git a/tests/contrib/requests/test_requests.py b/tests/contrib/requests/test_requests.py index 2a892a218f7..bab742a6cd8 100644 --- a/tests/contrib/requests/test_requests.py +++ b/tests/contrib/requests/test_requests.py @@ -281,7 +281,7 @@ def test_default_service_name(self): def test_user_set_service_name(self): # ensure a service name set by the user has precedence - cfg = config.get_from(self.session) + cfg = config._get_from(self.session) cfg["service_name"] = "clients" out = self.session.get(URL_200) assert out.status_code == 200 @@ -292,7 +292,7 @@ def test_user_set_service_name(self): def test_user_set_service(self): # ensure a service name set by the user has precedence - cfg = config.get_from(self.session) + cfg = config._get_from(self.session) cfg["service"] = "clients" out = self.session.get(URL_200) assert out.status_code == 200 @@ -333,7 +333,7 @@ def test_parent_without_service_name(self): def test_user_service_name_precedence(self): # ensure the user service name takes precedence over # the parent Span - cfg = config.get_from(self.session) + cfg = config._get_from(self.session) cfg["service_name"] = "clients" with self.tracer.trace("parent.span", service="web"): out = self.session.get(URL_200) @@ -349,7 +349,7 @@ def test_user_service_name_precedence(self): def test_split_by_domain_with_ampersat(self): # Regression test for: https://github.com/DataDog/dd-trace-py/issues/4062 # ensure a service name is generated by the domain name - cfg = config.get_from(self.session) + cfg = config._get_from(self.session) cfg["split_by_domain"] = True # domain name should take precedence over monkey_service cfg["service_name"] = "monkey_service" @@ -368,7 +368,7 @@ def test_split_by_domain_with_ampersat(self): def test_split_by_domain(self): # ensure a service name is generated by the domain name # of the ongoing call - cfg = config.get_from(self.session) + cfg = config._get_from(self.session) cfg["split_by_domain"] = True out = self.session.get(URL_200) assert out.status_code == 200 @@ -383,7 +383,7 @@ def test_split_by_domain(self): def test_split_by_domain_precedence(self): # ensure the split by domain has precedence all the time - cfg = config.get_from(self.session) + cfg = config._get_from(self.session) cfg["split_by_domain"] = True cfg["service_name"] = "intake" out = self.session.get(URL_200) @@ -400,7 +400,7 @@ def test_split_by_domain_precedence(self): def test_split_by_domain_wrong(self): # ensure the split by domain doesn't crash in case of a wrong URL; # in that case, no spans are created - cfg = config.get_from(self.session) + cfg = config._get_from(self.session) cfg["split_by_domain"] = True with pytest.raises((MissingSchema, InvalidURL)): self.session.get("http:/some>thing") @@ -411,7 +411,7 @@ def test_split_by_domain_wrong(self): def test_split_by_domain_remove_auth_in_url(self): # ensure that auth details are stripped from URL - cfg = config.get_from(self.session) + cfg = config._get_from(self.session) cfg["split_by_domain"] = True out = self.session.get(f"http://user:pass@{HOST_AND_PORT}") assert out.status_code == 200 @@ -425,7 +425,7 @@ def test_split_by_domain_remove_auth_in_url(self): def test_split_by_domain_includes_port(self): # ensure that port is included if present in URL - cfg = config.get_from(self.session) + cfg = config._get_from(self.session) cfg["split_by_domain"] = True out = self.session.get(f"http://{HOST_AND_PORT}") assert out.status_code == 200 @@ -439,7 +439,7 @@ def test_split_by_domain_includes_port(self): def test_split_by_domain_includes_port_path(self): # ensure that port is included if present in URL but not path - cfg = config.get_from(self.session) + cfg = config._get_from(self.session) cfg["split_by_domain"] = True out = self.session.get(f"http://{HOST_AND_PORT}/anything/v1/foo") assert out.status_code == 200 @@ -458,7 +458,7 @@ def test_global_config_service_env_precedence(self): spans = self.pop_spans() assert spans[0].service == "override" - cfg = config.get_from(self.session) + cfg = config._get_from(self.session) cfg["service"] = "override2" out = self.session.get(URL_200) assert out.status_code == 200 diff --git a/tests/contrib/requests/test_requests_distributed.py b/tests/contrib/requests/test_requests_distributed.py index b3974700f87..51ce0a500a9 100644 --- a/tests/contrib/requests/test_requests_distributed.py +++ b/tests/contrib/requests/test_requests_distributed.py @@ -77,7 +77,7 @@ def matcher(request): def test_propagation_true(self): # ensure distributed tracing can be enabled manually - cfg = config.get_from(self.session) + cfg = config._get_from(self.session) cfg["distributed_tracing"] = True adapter = Adapter() self.session.mount("mock", adapter) @@ -101,7 +101,7 @@ def matcher(request): def test_propagation_false(self): # ensure distributed tracing can be disabled manually - cfg = config.get_from(self.session) + cfg = config._get_from(self.session) cfg["distributed_tracing"] = False adapter = Adapter() self.session.mount("mock", adapter) @@ -121,7 +121,7 @@ def test_propagation_apm_opt_out_true(self): with self.override_global_config(dict(_appsec_standalone_enabled=True, _asm_enabled=True)): assert asm_config._apm_opt_out self.tracer.enabled = False - cfg = config.get_from(self.session) + cfg = config._get_from(self.session) cfg["distributed_tracing"] = True adapter = Adapter() self.session.mount("mock", adapter) @@ -149,7 +149,7 @@ def test_propagation_apm_opt_out_false(self): assert not asm_config._apm_opt_out self.tracer.enabled = False - cfg = config.get_from(self.session) + cfg = config._get_from(self.session) cfg["distributed_tracing"] = True adapter = Adapter() self.session.mount("mock", adapter) diff --git a/tests/contrib/tornado/test_tornado_web.py b/tests/contrib/tornado/test_tornado_web.py index 1630f8b16b3..554d3edbf8c 100644 --- a/tests/contrib/tornado/test_tornado_web.py +++ b/tests/contrib/tornado/test_tornado_web.py @@ -2,9 +2,9 @@ import tornado from ddtrace import config +from ddtrace.constants import _ORIGIN_KEY +from ddtrace.constants import _SAMPLING_PRIORITY_KEY from ddtrace.constants import ERROR_MSG -from ddtrace.constants import ORIGIN_KEY -from ddtrace.constants import SAMPLING_PRIORITY_KEY from ddtrace.ext import http from ddtrace.internal.schema import DEFAULT_SPAN_SERVICE_NAME from tests.opentracer.utils import init_tracer @@ -212,13 +212,13 @@ def test_http_exception_500_handler(self): def test_http_exception_500_handler_ignored_exception(self): # it should trace a handler that raises a Tornado HTTPError # The exception should NOT be set on the span - prev_error_statuses = config.http_server.error_statuses + prev_error_statuses = config._http_server.error_statuses try: - config.http_server.error_statuses = "501-599" + config._http_server.error_statuses = "501-599" response = self.fetch("/http_exception_500/") assert 500 == response.code finally: - config.http_server.error_statuses = prev_error_statuses + config._http_server.error_statuses = prev_error_statuses traces = self.pop_traces() assert 1 == len(traces) @@ -379,7 +379,7 @@ def test_propagation(self): # check propagation assert 1234 == request_span.trace_id assert 4567 == request_span.parent_id - assert 2 == request_span.get_metric(SAMPLING_PRIORITY_KEY) + assert 2 == request_span.get_metric(_SAMPLING_PRIORITY_KEY) assert request_span.get_tag("component") == "tornado" assert request_span.get_tag("span.kind") == "server" @@ -463,8 +463,8 @@ def test_no_propagation(self): # check non-propagation assert request_span.trace_id != 1234 assert request_span.parent_id != 4567 - assert request_span.get_metric(SAMPLING_PRIORITY_KEY) != 2 - assert request_span.get_tag(ORIGIN_KEY) != "synthetics" + assert request_span.get_metric(_SAMPLING_PRIORITY_KEY) != 2 + assert request_span.get_tag(_ORIGIN_KEY) != "synthetics" assert request_span.get_tag("component") == "tornado" assert request_span.get_tag("span.kind") == "server" @@ -502,8 +502,8 @@ def test_no_propagation_via_int_config(self): # check non-propagation assert request_span.trace_id != 1234 assert request_span.parent_id != 4567 - assert request_span.get_metric(SAMPLING_PRIORITY_KEY) != 2 - assert request_span.get_tag(ORIGIN_KEY) != "synthetics" + assert request_span.get_metric(_SAMPLING_PRIORITY_KEY) != 2 + assert request_span.get_tag(_ORIGIN_KEY) != "synthetics" assert request_span.get_tag("component") == "tornado" assert request_span.get_tag("span.kind") == "server" @@ -536,8 +536,8 @@ def test_no_propagation_via_env_var(self): # check non-propagation assert request_span.trace_id != 1234 assert request_span.parent_id != 4567 - assert request_span.get_metric(SAMPLING_PRIORITY_KEY) != 2 - assert request_span.get_tag(ORIGIN_KEY) != "synthetics" + assert request_span.get_metric(_SAMPLING_PRIORITY_KEY) != 2 + assert request_span.get_tag(_ORIGIN_KEY) != "synthetics" assert request_span.get_tag("component") == "tornado" assert request_span.get_tag("span.kind") == "server" diff --git a/tests/contrib/vertexai/test_vertexai.py b/tests/contrib/vertexai/test_vertexai.py index 5f07c6e177f..afcbfea39ba 100644 --- a/tests/contrib/vertexai/test_vertexai.py +++ b/tests/contrib/vertexai/test_vertexai.py @@ -42,7 +42,7 @@ def test_vertexai_completion(vertexai): llm = vertexai.generative_models.GenerativeModel("gemini-1.5-flash") llm._prediction_client.responses["generate_content"].append(_mock_completion_response(MOCK_COMPLETION_SIMPLE_1)) llm.generate_content( - "Why do bears hibernate?", + contents="Why do bears hibernate?", generation_config=vertexai.generative_models.GenerationConfig( stop_sequences=["x"], max_output_tokens=30, temperature=1.0 ), @@ -118,7 +118,7 @@ def test_vertexai_completion_stream(vertexai): (_mock_completion_stream_chunk(chunk) for chunk in MOCK_COMPLETION_STREAM_CHUNKS) ] response = llm.generate_content( - "How big is the solar system?", + contents="How big is the solar system?", generation_config=vertexai.generative_models.GenerationConfig( stop_sequences=["x"], max_output_tokens=30, temperature=1.0 ), @@ -278,7 +278,7 @@ def test_vertexai_chat(vertexai): llm._prediction_client.responses["generate_content"].append(_mock_completion_response(MOCK_COMPLETION_SIMPLE_1)) chat = llm.start_chat() chat.send_message( - "Why do bears hibernate?", + content="Why do bears hibernate?", generation_config=vertexai.generative_models.GenerationConfig( stop_sequences=["x"], max_output_tokens=30, temperature=1.0 ), @@ -371,7 +371,7 @@ def test_vertexai_chat_stream(vertexai): ] chat = llm.start_chat() response = chat.send_message( - "How big is the solar system?", + content="How big is the solar system?", generation_config=vertexai.generative_models.GenerationConfig( stop_sequences=["x"], max_output_tokens=30, temperature=1.0 ), diff --git a/tests/contrib/vertexai/test_vertexai_llmobs.py b/tests/contrib/vertexai/test_vertexai_llmobs.py index 78a03bc664c..701f709e213 100644 --- a/tests/contrib/vertexai/test_vertexai_llmobs.py +++ b/tests/contrib/vertexai/test_vertexai_llmobs.py @@ -21,7 +21,7 @@ def test_completion(self, vertexai, mock_llmobs_writer, mock_tracer): llm = vertexai.generative_models.GenerativeModel("gemini-1.5-flash") llm._prediction_client.responses["generate_content"].append(_mock_completion_response(MOCK_COMPLETION_SIMPLE_1)) llm.generate_content( - "Why do bears hibernate?", + contents="Why do bears hibernate?", generation_config=vertexai.generative_models.GenerationConfig( stop_sequences=["x"], max_output_tokens=30, temperature=1.0 ), @@ -126,7 +126,7 @@ def test_completion_stream(self, vertexai, mock_llmobs_writer, mock_tracer): (_mock_completion_stream_chunk(chunk) for chunk in MOCK_COMPLETION_STREAM_CHUNKS) ] response = llm.generate_content( - "How big is the solar system?", + contents="How big is the solar system?", generation_config=vertexai.generative_models.GenerationConfig( stop_sequences=["x"], max_output_tokens=30, temperature=1.0 ), @@ -293,7 +293,7 @@ def test_chat(self, vertexai, mock_llmobs_writer, mock_tracer): llm._prediction_client.responses["generate_content"].append(_mock_completion_response(MOCK_COMPLETION_SIMPLE_1)) chat = llm.start_chat() chat.send_message( - "Why do bears hibernate?", + content="Why do bears hibernate?", generation_config=vertexai.generative_models.GenerationConfig( stop_sequences=["x"], max_output_tokens=30, temperature=1.0 ), @@ -389,7 +389,7 @@ def test_chat_stream(self, vertexai, mock_llmobs_writer, mock_tracer): ] chat = llm.start_chat() response = chat.send_message( - "How big is the solar system?", + content="How big is the solar system?", generation_config=vertexai.generative_models.GenerationConfig( stop_sequences=["x"], max_output_tokens=30, temperature=1.0 ), diff --git a/tests/contrib/vertica/test_vertica.py b/tests/contrib/vertica/test_vertica.py index 196e1621ee5..d3fb9709bea 100644 --- a/tests/contrib/vertica/test_vertica.py +++ b/tests/contrib/vertica/test_vertica.py @@ -9,7 +9,7 @@ from ddtrace.contrib.internal.vertica.patch import patch from ddtrace.contrib.internal.vertica.patch import unpatch from ddtrace.internal.schema import DEFAULT_SPAN_SERVICE_NAME -from ddtrace.settings.config import _deepmerge +from ddtrace.settings._config import _deepmerge from ddtrace.trace import Pin from tests.contrib.config import VERTICA_CONFIG from tests.opentracer.utils import init_tracer diff --git a/tests/debugging/test_debugger.py b/tests/debugging/test_debugger.py index bacfcbcdd45..c9f04cee6ee 100644 --- a/tests/debugging/test_debugger.py +++ b/tests/debugging/test_debugger.py @@ -8,7 +8,7 @@ import pytest import ddtrace -from ddtrace.constants import ORIGIN_KEY +from ddtrace.constants import _ORIGIN_KEY from ddtrace.debugging._debugger import DebuggerWrappingContext from ddtrace.debugging._probe.model import DDExpression from ddtrace.debugging._probe.model import MetricProbeKind @@ -929,7 +929,7 @@ def test_debugger_span_probe(self): tags = span.get_tags() assert tags["debugger.probeid"] == "span-probe" assert tags["tag"] == "value" - assert tags[ORIGIN_KEY] == "di" + assert tags[_ORIGIN_KEY] == "di" def test_debugger_span_not_created_when_condition_was_false(self): from tests.submod.stuff import mutator diff --git a/tests/integration/test_integration_snapshots.py b/tests/integration/test_integration_snapshots.py index 3c8bae602f4..dc80f9508ca 100644 --- a/tests/integration/test_integration_snapshots.py +++ b/tests/integration/test_integration_snapshots.py @@ -228,13 +228,13 @@ def test_trace_with_wrong_metrics_types_not_sent(encoding, metrics, monkeypatch) @pytest.mark.snapshot() def test_tracetagsprocessor_only_adds_new_tags(): from ddtrace import tracer + from ddtrace.constants import _SAMPLING_PRIORITY_KEY from ddtrace.constants import AUTO_KEEP - from ddtrace.constants import SAMPLING_PRIORITY_KEY from ddtrace.constants import USER_KEEP with tracer.trace(name="web.request") as span: span.context.sampling_priority = AUTO_KEEP - span.set_metric(SAMPLING_PRIORITY_KEY, USER_KEEP) + span.set_metric(_SAMPLING_PRIORITY_KEY, USER_KEEP) tracer.flush() diff --git a/tests/integration/test_priority_sampling.py b/tests/integration/test_priority_sampling.py index 653ef96d49e..8ea46591a1a 100644 --- a/tests/integration/test_priority_sampling.py +++ b/tests/integration/test_priority_sampling.py @@ -2,9 +2,9 @@ import pytest +from ddtrace.constants import _SAMPLING_PRIORITY_KEY from ddtrace.constants import AUTO_KEEP from ddtrace.constants import AUTO_REJECT -from ddtrace.constants import SAMPLING_PRIORITY_KEY from ddtrace.internal.encoding import JSONEncoder from ddtrace.internal.encoding import MsgpackEncoderV04 as Encoder from ddtrace.internal.writer import AgentWriter @@ -82,7 +82,7 @@ def test_priority_sampling_rate_honored(): pass t.flush() assert len(t._writer.traces) == captured_span_count - sampled_spans = [s for s in t._writer.spans if s.context._metrics[SAMPLING_PRIORITY_KEY] == AUTO_KEEP] + sampled_spans = [s for s in t._writer.spans if s.context._metrics[_SAMPLING_PRIORITY_KEY] == AUTO_KEEP] sampled_ratio = len(sampled_spans) / captured_span_count diff_magnitude = abs(sampled_ratio - rate_from_agent) assert diff_magnitude < 0.3, "the proportion of sampled spans should approximate the sample rate given by the agent" diff --git a/tests/integration/test_trace_stats.py b/tests/integration/test_trace_stats.py index 46c153bc8d5..37982dbf7b2 100644 --- a/tests/integration/test_trace_stats.py +++ b/tests/integration/test_trace_stats.py @@ -7,7 +7,7 @@ from ddtrace._trace.sampler import DatadogSampler from ddtrace._trace.sampler import SamplingRule -from ddtrace.constants import SPAN_MEASURED_KEY +from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.ext import http from ddtrace.internal.processor.stats import SpanStatsProcessorV06 from tests.integration.utils import AGENT_VERSION @@ -222,7 +222,7 @@ def test_measured_span(send_once_stats_tracer): for _ in range(10): with send_once_stats_tracer.trace("parent"): # Should have stats with send_once_stats_tracer.trace("child_stats") as span: # Should have stats - span.set_tag(SPAN_MEASURED_KEY) + span.set_tag(_SPAN_MEASURED_KEY) @pytest.mark.snapshot() diff --git a/tests/internal/remoteconfig/test_remoteconfig.py b/tests/internal/remoteconfig/test_remoteconfig.py index 5bf87179025..a53db57adb8 100644 --- a/tests/internal/remoteconfig/test_remoteconfig.py +++ b/tests/internal/remoteconfig/test_remoteconfig.py @@ -583,7 +583,7 @@ def test_rc_default_products_registered(): ], ) def test_trace_sampling_rules_conversion(rc_rules, expected_config_rules, expected_sampling_rules): - trace_sampling_rules = config.convert_rc_trace_sampling_rules(rc_rules) + trace_sampling_rules = config._convert_rc_trace_sampling_rules(rc_rules) assert trace_sampling_rules == expected_config_rules if trace_sampling_rules is not None: diff --git a/tests/internal/service_name/test_processor.py b/tests/internal/service_name/test_processor.py index 02e238b8955..a765491c67c 100644 --- a/tests/internal/service_name/test_processor.py +++ b/tests/internal/service_name/test_processor.py @@ -25,7 +25,7 @@ def test_base_service(ddtrace_run_python_code_in_subprocess, schema_version, glo import sys from ddtrace import config -from ddtrace.constants import BASE_SERVICE_KEY +from ddtrace.constants import _BASE_SERVICE_KEY from ddtrace.internal.schema.processor import BaseServiceProcessor from ddtrace._trace.span import Span from tests.internal.service_name.test_processor import processor @@ -54,11 +54,11 @@ def test(processor): ] processor.process_trace(fake_trace) - assert BASE_SERVICE_KEY not in fake_trace[0].get_tags() - assert BASE_SERVICE_KEY not in fake_trace[1].get_tags(), config.service - assert fake_trace[2].get_tag(BASE_SERVICE_KEY) is not None - assert fake_trace[2].get_tag(BASE_SERVICE_KEY) == '{}' - assert BASE_SERVICE_KEY not in fake_trace[3].get_tags(), fake_trace[3].service + fake_trace[3].get_tags() + assert _BASE_SERVICE_KEY not in fake_trace[0].get_tags() + assert _BASE_SERVICE_KEY not in fake_trace[1].get_tags(), config.service + assert fake_trace[2].get_tag(_BASE_SERVICE_KEY) is not None + assert fake_trace[2].get_tag(_BASE_SERVICE_KEY) == '{}' + assert _BASE_SERVICE_KEY not in fake_trace[3].get_tags(), fake_trace[3].service + fake_trace[3].get_tags() if __name__ == "__main__": sys.exit(pytest.main(["-x", __file__])) diff --git a/tests/internal/test_settings.py b/tests/internal/test_settings.py index 9010e5f044a..14f52092f4d 100644 --- a/tests/internal/test_settings.py +++ b/tests/internal/test_settings.py @@ -589,7 +589,7 @@ def test_remoteconfig_header_tags(run_python_code_in_subprocess): assert span.get_tag("header_tag_420") is None assert span.get_tag("env_set_tag_name") == "helloworld" -config.http._reset() +config._http._reset() config._header_tag_name.invalidate() config._handle_remoteconfig(_base_rc_config({"tracing_header_tags": [{"header": "X-Header-Tag-420", "tag_name":"header_tag_420"}]})) @@ -601,7 +601,7 @@ def test_remoteconfig_header_tags(run_python_code_in_subprocess): assert span2.get_tag("header_tag_420") == "foobarbanana", span2._meta assert span2.get_tag("env_set_tag_name") is None -config.http._reset() +config._http._reset() config._header_tag_name.invalidate() config._handle_remoteconfig(_base_rc_config({})) @@ -634,14 +634,13 @@ def test_config_public_properties_and_methods(): public_attrs.add(key) assert public_attrs == { - "trace_headers", "service", "service_mapping", "env", "tags", "version", - "http", - "http_server", + # Attributes below are deprecated and will be removed in v3.0 + "trace_headers", "header_is_traced", "convert_rc_trace_sampling_rules", "enable_remote_configuration", diff --git a/tests/profiling_v2/collector/test_stack.py b/tests/profiling_v2/collector/test_stack.py index 774e15fb70d..03d5b1e7eff 100644 --- a/tests/profiling_v2/collector/test_stack.py +++ b/tests/profiling_v2/collector/test_stack.py @@ -29,7 +29,6 @@ env=dict( DD_PROFILING_MAX_FRAMES="5", DD_PROFILING_OUTPUT_PPROF="/tmp/test_collect_truncate", - DD_PROFILING_STACK_V2_ENABLED="1", ) ) @pytest.mark.skipif(sys.version_info[:2] == (3, 7), reason="stack_v2 is not supported on Python 3.7") @@ -61,9 +60,8 @@ def test_collect_truncate(): assert len(sample.location_id) <= max_nframes + 2, len(sample.location_id) -@pytest.mark.parametrize("stack_v2_enabled", [True, False]) -def test_stack_locations(stack_v2_enabled, tmp_path): - if sys.version_info[:2] == (3, 7) and stack_v2_enabled: +def test_stack_locations(tmp_path): + if sys.version_info[:2] == (3, 7): pytest.skip("stack_v2 is not supported on Python 3.7") test_name = "test_stack_locations" @@ -83,7 +81,7 @@ def bar(): def foo(): bar() - with stack.StackCollector(None, _stack_collector_v2_enabled=stack_v2_enabled): + with stack.StackCollector(None, _stack_collector_v2_enabled=True): for _ in range(10): foo() ddup.upload() @@ -117,9 +115,8 @@ def foo(): pprof_utils.assert_profile_has_sample(profile, samples=samples, expected_sample=expected_sample) -@pytest.mark.parametrize("stack_v2_enabled", [True, False]) -def test_push_span(stack_v2_enabled, tmp_path, tracer): - if sys.version_info[:2] == (3, 7) and stack_v2_enabled: +def test_push_span(tmp_path, tracer): + if sys.version_info[:2] == (3, 7): pytest.skip("stack_v2 is not supported on Python 3.7") test_name = "test_push_span" @@ -140,7 +137,7 @@ def test_push_span(stack_v2_enabled, tmp_path, tracer): tracer=tracer, endpoint_collection_enabled=True, ignore_profiler=True, # this is not necessary, but it's here to trim samples - _stack_collector_v2_enabled=stack_v2_enabled, + _stack_collector_v2_enabled=True, ): with tracer.trace("foobar", resource=resource, span_type=span_type) as span: span_id = span.span_id @@ -221,9 +218,8 @@ def target_fun(): unregister_thread.assert_called_with(thread_id) -@pytest.mark.parametrize("stack_v2_enabled", [True, False]) -def test_push_non_web_span(stack_v2_enabled, tmp_path, tracer): - if sys.version_info[:2] == (3, 7) and stack_v2_enabled: +def test_push_non_web_span(tmp_path, tracer): + if sys.version_info[:2] == (3, 7): pytest.skip("stack_v2 is not supported on Python 3.7") tracer._endpoint_call_counter_span_processor.enable() @@ -244,7 +240,7 @@ def test_push_non_web_span(stack_v2_enabled, tmp_path, tracer): tracer=tracer, endpoint_collection_enabled=True, ignore_profiler=True, # this is not necessary, but it's here to trim samples - _stack_collector_v2_enabled=stack_v2_enabled, + _stack_collector_v2_enabled=True, ): with tracer.trace("foobar", resource=resource, span_type=span_type) as span: span_id = span.span_id @@ -269,10 +265,9 @@ def test_push_non_web_span(stack_v2_enabled, tmp_path, tracer): ) -@pytest.mark.parametrize("stack_v2_enabled", [True, False]) -def test_push_span_none_span_type(stack_v2_enabled, tmp_path, tracer): +def test_push_span_none_span_type(tmp_path, tracer): # Test for https://github.com/DataDog/dd-trace-py/issues/11141 - if sys.version_info[:2] == (3, 7) and stack_v2_enabled: + if sys.version_info[:2] == (3, 7): pytest.skip("stack_v2 is not supported on Python 3.7") test_name = "test_push_span_none_span_type" @@ -292,7 +287,7 @@ def test_push_span_none_span_type(stack_v2_enabled, tmp_path, tracer): tracer=tracer, endpoint_collection_enabled=True, ignore_profiler=True, # this is not necessary, but it's here to trim samples - _stack_collector_v2_enabled=stack_v2_enabled, + _stack_collector_v2_enabled=True, ): # Explicitly set None span_type as the default could change in the # future. @@ -484,9 +479,8 @@ def test_exception_collection_trace(stack_v2_enabled, tmp_path, tracer): ) -@pytest.mark.parametrize("stack_v2_enabled", [True, False]) -def test_collect_once_with_class(stack_v2_enabled, tmp_path): - if sys.version_info[:2] == (3, 7) and stack_v2_enabled: +def test_collect_once_with_class(tmp_path): + if sys.version_info[:2] == (3, 7): pytest.skip("stack_v2 is not supported on Python 3.7") class SomeClass(object): @@ -506,7 +500,7 @@ def sleep_instance(self): ddup.config(env="test", service=test_name, version="my_version", output_filename=pprof_prefix) ddup.start() - with stack.StackCollector(None, ignore_profiler=True, _stack_collector_v2_enabled=stack_v2_enabled): + with stack.StackCollector(None, ignore_profiler=True, _stack_collector_v2_enabled=True): SomeClass.sleep_class() ddup.upload() @@ -521,7 +515,6 @@ def sleep_instance(self): expected_sample=pprof_utils.StackEvent( thread_id=_thread.get_ident(), thread_name="MainThread", - class_name="SomeClass" if not stack_v2_enabled else None, locations=[ pprof_utils.StackLocation( function_name="sleep_instance", @@ -536,16 +529,15 @@ def sleep_instance(self): pprof_utils.StackLocation( function_name="test_collect_once_with_class", filename="test_stack.py", - line_no=test_collect_once_with_class.__code__.co_firstlineno + 23, + line_no=test_collect_once_with_class.__code__.co_firstlineno + 22, ), ], ), ) -@pytest.mark.parametrize("stack_v2_enabled", [True, False]) -def test_collect_once_with_class_not_right_type(stack_v2_enabled, tmp_path): - if sys.version_info[:2] == (3, 7) and stack_v2_enabled: +def test_collect_once_with_class_not_right_type(tmp_path): + if sys.version_info[:2] == (3, 7): pytest.skip("stack_v2 is not supported on Python 3.7") class SomeClass(object): @@ -565,7 +557,7 @@ def sleep_instance(foobar, self): ddup.config(env="test", service=test_name, version="my_version", output_filename=pprof_prefix) ddup.start() - with stack.StackCollector(None, ignore_profiler=True, _stack_collector_v2_enabled=stack_v2_enabled): + with stack.StackCollector(None, ignore_profiler=True, _stack_collector_v2_enabled=True): SomeClass.sleep_class(123) ddup.upload() @@ -580,9 +572,6 @@ def sleep_instance(foobar, self): expected_sample=pprof_utils.StackEvent( thread_id=_thread.get_ident(), thread_name="MainThread", - # stack v1 relied on using cls and self to figure out class name - # so we can't find it here. - class_name=None, locations=[ pprof_utils.StackLocation( function_name="sleep_instance", @@ -597,7 +586,7 @@ def sleep_instance(foobar, self): pprof_utils.StackLocation( function_name="test_collect_once_with_class_not_right_type", filename="test_stack.py", - line_no=test_collect_once_with_class_not_right_type.__code__.co_firstlineno + 23, + line_no=test_collect_once_with_class_not_right_type.__code__.co_firstlineno + 22, ), ], ), diff --git a/tests/profiling_v2/collector/test_stack_asyncio.py b/tests/profiling_v2/collector/test_stack_asyncio.py index 791cceb4080..d28ad54e586 100644 --- a/tests/profiling_v2/collector/test_stack_asyncio.py +++ b/tests/profiling_v2/collector/test_stack_asyncio.py @@ -7,7 +7,6 @@ @pytest.mark.subprocess( env=dict( DD_PROFILING_OUTPUT_PPROF="/tmp/test_stack_asyncio", - DD_PROFILING_STACK_V2_ENABLED="true", ), ) def test_asyncio(): diff --git a/tests/profiling_v2/test_accuracy.py b/tests/profiling_v2/test_accuracy.py index cb1d538712f..a9239c19010 100644 --- a/tests/profiling_v2/test_accuracy.py +++ b/tests/profiling_v2/test_accuracy.py @@ -4,57 +4,7 @@ import pytest -@pytest.mark.subprocess( - env=dict(DD_PROFILING_MAX_TIME_USAGE_PCT="100", DD_PROFILING_OUTPUT_PPROF="/tmp/test_accuracy_libdd.pprof") -) -def test_accuracy_libdd(): - import collections - import os - - from ddtrace.profiling import profiler - from tests.profiling.collector import pprof_utils - from tests.profiling.test_accuracy import assert_almost_equal - from tests.profiling.test_accuracy import spend_16 - - # Set this to 100 so we don't sleep too often and mess with the precision. - p = profiler.Profiler() - p.start() - spend_16() - p.stop() - wall_times = collections.defaultdict(lambda: 0) - cpu_times = collections.defaultdict(lambda: 0) - profile = pprof_utils.parse_profile(os.environ["DD_PROFILING_OUTPUT_PPROF"] + "." + str(os.getpid())) - - for sample in profile.sample: - wall_time_index = pprof_utils.get_sample_type_index(profile, "wall-time") - - wall_time_spent_ns = sample.value[wall_time_index] - cpu_time_index = pprof_utils.get_sample_type_index(profile, "cpu-time") - cpu_time_spent_ns = sample.value[cpu_time_index] - - for location_id in sample.location_id: - location = pprof_utils.get_location_with_id(profile, location_id) - line = location.line[0] - function = pprof_utils.get_function_with_id(profile, line.function_id) - function_name = profile.string_table[function.name] - wall_times[function_name] += wall_time_spent_ns - cpu_times[function_name] += cpu_time_spent_ns - - assert_almost_equal(wall_times["spend_3"], 9e9) - assert_almost_equal(wall_times["spend_1"], 2e9) - assert_almost_equal(wall_times["spend_4"], 4e9) - assert_almost_equal(wall_times["spend_16"], 16e9) - assert_almost_equal(wall_times["spend_7"], 7e9) - - assert_almost_equal(wall_times["spend_cpu_2"], 2e9, tolerance=0.09) - assert_almost_equal(wall_times["spend_cpu_3"], 3e9, tolerance=0.09) - assert_almost_equal(cpu_times["spend_cpu_2"], 2e9, tolerance=0.09) - assert_almost_equal(cpu_times["spend_cpu_3"], 3e9, tolerance=0.09) - - -@pytest.mark.subprocess( - env=dict(DD_PROFILING_STACK_V2_ENABLED="1", DD_PROFILING_OUTPUT_PPROF="/tmp/test_accuracy_stack_v2.pprof") -) +@pytest.mark.subprocess(env=dict(DD_PROFILING_OUTPUT_PPROF="/tmp/test_accuracy_stack_v2.pprof")) @pytest.mark.skipif(sys.version_info[:2] == (3, 7), reason="stack_v2 is not supported on Python 3.7") def test_accuracy_stack_v2(): import collections diff --git a/tests/profiling_v2/test_gunicorn.py b/tests/profiling_v2/test_gunicorn.py index 90141445d3a..0ff02bbf177 100644 --- a/tests/profiling_v2/test_gunicorn.py +++ b/tests/profiling_v2/test_gunicorn.py @@ -52,9 +52,6 @@ def _run_gunicorn(*args): def gunicorn(monkeypatch): monkeypatch.setenv("DD_PROFILING_IGNORE_PROFILER", "1") monkeypatch.setenv("DD_PROFILING_ENABLED", "1") - # This was needed for the gunicorn process to start and print worker startup - # messages. Without this, the test can't find the worker PIDs. - monkeypatch.setenv("DD_PROFILING_STACK_V2_ENABLED", "1") yield _run_gunicorn diff --git a/tests/profiling_v2/test_main.py b/tests/profiling_v2/test_main.py index 3142a1fbba8..132fc8aa502 100644 --- a/tests/profiling_v2/test_main.py +++ b/tests/profiling_v2/test_main.py @@ -11,13 +11,11 @@ from tests.utils import flaky -@pytest.mark.parametrize("stack_v2_enabled", [True, False]) -def test_call_script(stack_v2_enabled): - if sys.version_info[:2] == (3, 7) and stack_v2_enabled: +def test_call_script(): + if sys.version_info[:2] == (3, 7): pytest.skip("stack_v2 is not supported on Python 3.7") env = os.environ.copy() env["DD_PROFILING_ENABLED"] = "1" - env["DD_PROFILING_STACK_V2_ENABLED"] = "1" if stack_v2_enabled else "0" stdout, stderr, exitcode, _ = call_program( "ddtrace-run", sys.executable, os.path.join(os.path.dirname(__file__), "simple_program.py"), env=env ) @@ -28,28 +26,25 @@ def test_call_script(stack_v2_enabled): hello, interval, pid, stack_v2 = list(s.strip() for s in stdout.decode().strip().split("\n")) assert hello == "hello world", stdout.decode().strip() assert float(interval) >= 0.01, stdout.decode().strip() - assert stack_v2 == str(stack_v2_enabled) + assert stack_v2 == str(True) @pytest.mark.skipif(not os.getenv("DD_PROFILE_TEST_GEVENT", False), reason="Not testing gevent") -@pytest.mark.parametrize("stack_v2_enabled", [True, False]) -def test_call_script_gevent(stack_v2_enabled): - if sys.version_info[:2] == (3, 7) and stack_v2_enabled: +def test_call_script_gevent(): + if sys.version_info[:2] == (3, 7): pytest.skip("stack_v2 is not supported on Python 3.7") - if sys.version_info[:2] == (3, 8) and stack_v2_enabled: + if sys.version_info[:2] == (3, 8): pytest.skip("this test is flaky on 3.8 with stack v2") env = os.environ.copy() env["DD_PROFILING_ENABLED"] = "1" - env["DD_PROFILING_STACK_V2_ENABLED"] = "1" if stack_v2_enabled else "0" stdout, stderr, exitcode, pid = call_program( sys.executable, os.path.join(os.path.dirname(__file__), "simple_program_gevent.py"), env=env ) assert exitcode == 0, (stdout, stderr) -@pytest.mark.parametrize("stack_v2_enabled", [True, False]) -def test_call_script_pprof_output(stack_v2_enabled, tmp_path): - if sys.version_info[:2] == (3, 7) and stack_v2_enabled: +def test_call_script_pprof_output(tmp_path): + if sys.version_info[:2] == (3, 7): pytest.skip("stack_v2 is not supported on Python 3.7") """This checks if the pprof output and atexit register work correctly. @@ -61,7 +56,6 @@ def test_call_script_pprof_output(stack_v2_enabled, tmp_path): env["DD_PROFILING_OUTPUT_PPROF"] = filename env["DD_PROFILING_CAPTURE_PCT"] = "1" env["DD_PROFILING_ENABLED"] = "1" - env["DD_PROFILING_STACK_V2_ENABLED"] = "1" if stack_v2_enabled else "0" stdout, stderr, exitcode, _ = call_program( "ddtrace-run", sys.executable, @@ -78,17 +72,15 @@ def test_call_script_pprof_output(stack_v2_enabled, tmp_path): assert len(samples) > 0 -@pytest.mark.parametrize("stack_v2_enabled", [True, False]) @pytest.mark.skipif(sys.platform == "win32", reason="fork only available on Unix") -def test_fork(stack_v2_enabled, tmp_path): - if sys.version_info[:2] == (3, 7) and stack_v2_enabled: +def test_fork(tmp_path): + if sys.version_info[:2] == (3, 7): pytest.skip("stack_v2 is not supported on Python 3.7") filename = str(tmp_path / "pprof") env = os.environ.copy() env["DD_PROFILING_OUTPUT_PPROF"] = filename env["DD_PROFILING_CAPTURE_PCT"] = "100" - env["DD_PROFILING_STACK_V2_ENABLED"] = "1" if stack_v2_enabled else "0" stdout, stderr, exitcode, pid = call_program( "python", os.path.join(os.path.dirname(__file__), "simple_program_fork.py"), env=env ) @@ -144,14 +136,12 @@ def test_fork(stack_v2_enabled, tmp_path): ) -@pytest.mark.parametrize("stack_v2_enabled", [True, False]) @pytest.mark.skipif(sys.platform == "win32", reason="fork only available on Unix") @pytest.mark.skipif(not os.getenv("DD_PROFILE_TEST_GEVENT", False), reason="Not testing gevent") -def test_fork_gevent(stack_v2_enabled): - if sys.version_info[:2] == (3, 7) and stack_v2_enabled: +def test_fork_gevent(): + if sys.version_info[:2] == (3, 7): pytest.skip("stack_v2 is not supported on Python 3.7") env = os.environ.copy() - env["DD_PROFILING_STACK_V2_ENABLED"] = "1" if stack_v2_enabled else "0" stdout, stderr, exitcode, pid = call_program( "python", os.path.join(os.path.dirname(__file__), "../profiling", "gevent_fork.py"), env=env ) @@ -161,20 +151,18 @@ def test_fork_gevent(stack_v2_enabled): methods = multiprocessing.get_all_start_methods() -@pytest.mark.parametrize("stack_v2_enabled", [True, False]) @pytest.mark.parametrize( "method", set(methods) - {"forkserver", "fork"}, ) -def test_multiprocessing(stack_v2_enabled, method, tmp_path): - if sys.version_info[:2] == (3, 7) and stack_v2_enabled: +def test_multiprocessing(method, tmp_path): + if sys.version_info[:2] == (3, 7): pytest.skip("stack_v2 is not supported on Python 3.7") filename = str(tmp_path / "pprof") env = os.environ.copy() env["DD_PROFILING_OUTPUT_PPROF"] = filename env["DD_PROFILING_ENABLED"] = "1" env["DD_PROFILING_CAPTURE_PCT"] = "1" - env["DD_PROFILING_STACK_V2_ENABLED"] = "1" if stack_v2_enabled else "0" stdout, stderr, exitcode, _ = call_program( "ddtrace-run", sys.executable, @@ -208,8 +196,6 @@ def test_memalloc_no_init_error_on_fork(): os.waitpid(pid, 0) -# Not parametrizing with stack_v2_enabled as subprocess mark doesn't support -# parametrized tests and this only tests our start up code. @pytest.mark.subprocess( ddtrace_run=True, env=dict( diff --git a/tests/suitespec.yml b/tests/suitespec.yml index 41fabd7aa88..b135ba986c8 100644 --- a/tests/suitespec.yml +++ b/tests/suitespec.yml @@ -62,6 +62,7 @@ components: - ddtrace/internal/logger.py - ddtrace/internal/metrics.py - ddtrace/internal/module.py + - ddtrace/internal/native/* - ddtrace/internal/packages.py - ddtrace/internal/third-party.tar.gz - ddtrace/internal/periodic.py @@ -76,7 +77,7 @@ components: - ddtrace/py.typed - ddtrace/version.py - ddtrace/settings/config.py - - src/core/* + - src/native/* datastreams: - ddtrace/internal/datastreams/* - ddtrace/data_streams.py diff --git a/tests/telemetry/test_writer.py b/tests/telemetry/test_writer.py index 8d4030c84a9..52d0f763df9 100644 --- a/tests/telemetry/test_writer.py +++ b/tests/telemetry/test_writer.py @@ -15,8 +15,8 @@ from ddtrace.internal.telemetry.data import get_host_info from ddtrace.internal.telemetry.writer import get_runtime_id from ddtrace.internal.utils.version import _pep440_to_semver -from ddtrace.settings import _config as config -from ddtrace.settings.config import DD_TRACE_OBFUSCATION_QUERY_STRING_REGEXP_DEFAULT +from ddtrace.settings import _global_config as config +from ddtrace.settings._config import DD_TRACE_OBFUSCATION_QUERY_STRING_REGEXP_DEFAULT from tests.conftest import DEFAULT_DDTRACE_SUBPROCESS_TEST_SERVICE_NAME from tests.utils import override_global_config @@ -354,9 +354,11 @@ def test_app_started_event_configuration_override(test_agent_session, run_python {"name": "DD_DYNAMIC_INSTRUMENTATION_UPLOAD_FLUSH_INTERVAL", "origin": "default", "value": 1.0}, {"name": "DD_DYNAMIC_INSTRUMENTATION_UPLOAD_TIMEOUT", "origin": "default", "value": 30}, {"name": "DD_ENV", "origin": "default", "value": None}, + {"name": "DD_EXCEPTION_REPLAY_CAPTURE_MAX_FRAMES", "origin": "default", "value": 8}, {"name": "DD_EXCEPTION_REPLAY_ENABLED", "origin": "env_var", "value": True}, {"name": "DD_EXPERIMENTAL_APPSEC_STANDALONE_ENABLED", "origin": "default", "value": False}, {"name": "DD_HTTP_CLIENT_TAG_QUERY_STRING", "origin": "default", "value": None}, + {"name": "DD_IAST_DEDUPLICATION_ENABLED", "origin": "default", "value": True}, {"name": "DD_IAST_ENABLED", "origin": "default", "value": False}, {"name": "DD_IAST_MAX_CONCURRENT_REQUESTS", "origin": "default", "value": 2}, {"name": "DD_IAST_REDACTION_ENABLED", "origin": "default", "value": True}, diff --git a/tests/tracer/test_encoders.py b/tests/tracer/test_encoders.py index f96e063502e..00321a59924 100644 --- a/tests/tracer/test_encoders.py +++ b/tests/tracer/test_encoders.py @@ -21,7 +21,7 @@ from ddtrace._trace._span_pointer import _SpanPointerDirection from ddtrace._trace.context import Context from ddtrace._trace.span import Span -from ddtrace.constants import ORIGIN_KEY +from ddtrace.constants import _ORIGIN_KEY as ORIGIN_KEY from ddtrace.ext import SpanTypes from ddtrace.ext.ci import CI_APP_TEST_ORIGIN from ddtrace.internal._encoding import BufferFull diff --git a/tests/tracer/test_env_vars.py b/tests/tracer/test_env_vars.py index fb5f5ada055..16b92cc49f2 100644 --- a/tests/tracer/test_env_vars.py +++ b/tests/tracer/test_env_vars.py @@ -50,7 +50,7 @@ def test_obfuscation_querystring_pattern_env_var( "-c", ( """import re;from ddtrace import config; -from ddtrace.settings.config import DD_TRACE_OBFUSCATION_QUERY_STRING_REGEXP_DEFAULT; +from ddtrace.settings._config import DD_TRACE_OBFUSCATION_QUERY_STRING_REGEXP_DEFAULT; assert config._obfuscation_query_string_pattern == %s; assert config._global_query_string_obfuscation_disabled == %s; assert config._http_tag_query_string == %s diff --git a/tests/tracer/test_global_config.py b/tests/tracer/test_global_config.py index a83b8352074..6c557bc8de6 100644 --- a/tests/tracer/test_global_config.py +++ b/tests/tracer/test_global_config.py @@ -6,7 +6,7 @@ from ddtrace import config as global_config from ddtrace.settings import Config -from ddtrace.settings.config import _parse_propagation_styles +from ddtrace.settings._config import _parse_propagation_styles from ..utils import DummyTracer from ..utils import override_env diff --git a/tests/tracer/test_instance_config.py b/tests/tracer/test_instance_config.py index 457bf53a408..130e46350ae 100644 --- a/tests/tracer/test_instance_config.py +++ b/tests/tracer/test_instance_config.py @@ -22,47 +22,47 @@ class Klass(object): def test_configuration_get_from(self): # ensure a dictionary is returned - cfg = config.get_from(self.Klass) + cfg = config._get_from(self.Klass) assert isinstance(cfg, dict) def test_configuration_get_from_twice(self): # ensure the configuration is the same if `get_from` is used # in the same instance instance = self.Klass() - cfg1 = config.get_from(instance) - cfg2 = config.get_from(instance) + cfg1 = config._get_from(instance) + cfg2 = config._get_from(instance) assert cfg1 is cfg2 def test_configuration_set(self): # ensure the configuration can be updated in the Pin instance = self.Klass() - cfg = config.get_from(instance) + cfg = config._get_from(instance) cfg["distributed_tracing"] = True - assert config.get_from(instance)["distributed_tracing"] is True + assert config._get_from(instance)["distributed_tracing"] is True def test_global_configuration_inheritance(self): # ensure global configuration is inherited when it's set - cfg = config.get_from(self.Klass) + cfg = config._get_from(self.Klass) cfg["distributed_tracing"] = True instance = self.Klass() - assert config.get_from(instance)["distributed_tracing"] is True + assert config._get_from(instance)["distributed_tracing"] is True def test_configuration_override_instance(self): # ensure instance configuration doesn't override global settings - global_cfg = config.get_from(self.Klass) + global_cfg = config._get_from(self.Klass) global_cfg["distributed_tracing"] = True instance = self.Klass() - cfg = config.get_from(instance) + cfg = config._get_from(instance) cfg["distributed_tracing"] = False - assert config.get_from(self.Klass)["distributed_tracing"] is True - assert config.get_from(instance)["distributed_tracing"] is False + assert config._get_from(self.Klass)["distributed_tracing"] is True + assert config._get_from(instance)["distributed_tracing"] is False def test_service_name_for_pin(self): # ensure for backward compatibility that changing the service # name via the Pin object also updates integration config Pin(service="intake").onto(self.Klass) instance = self.Klass() - cfg = config.get_from(instance) + cfg = config._get_from(instance) assert cfg["service_name"] == "intake" def test_service_attribute_priority(self): @@ -73,7 +73,7 @@ def test_service_attribute_priority(self): } Pin(service="service", _config=global_config).onto(self.Klass) instance = self.Klass() - cfg = config.get_from(instance) + cfg = config._get_from(instance) assert cfg["service_name"] == "service" def test_configuration_copy(self): @@ -83,7 +83,7 @@ def test_configuration_copy(self): } Pin(service="service", _config=global_config).onto(self.Klass) instance = self.Klass() - cfg = config.get_from(instance) + cfg = config._get_from(instance) cfg["service_name"] = "metrics" assert global_config["service_name"] == "service" @@ -98,7 +98,7 @@ def test_configuration_copy_upside_down(self): global_config["service_name"] = "metrics" # use the Pin via `get_from` instance = self.Klass() - cfg = config.get_from(instance) + cfg = config._get_from(instance) # it should have users updated value assert cfg["service_name"] == "metrics" diff --git a/tests/tracer/test_processors.py b/tests/tracer/test_processors.py index f7bf413d916..ad9360deec2 100644 --- a/tests/tracer/test_processors.py +++ b/tests/tracer/test_processors.py @@ -12,13 +12,13 @@ from ddtrace._trace.processor import TraceTagsProcessor from ddtrace._trace.sampler import DatadogSampler from ddtrace._trace.span import Span +from ddtrace.constants import _SAMPLING_PRIORITY_KEY from ddtrace.constants import _SINGLE_SPAN_SAMPLING_MAX_PER_SEC from ddtrace.constants import _SINGLE_SPAN_SAMPLING_MECHANISM from ddtrace.constants import _SINGLE_SPAN_SAMPLING_RATE from ddtrace.constants import AUTO_KEEP from ddtrace.constants import AUTO_REJECT from ddtrace.constants import MANUAL_KEEP_KEY -from ddtrace.constants import SAMPLING_PRIORITY_KEY from ddtrace.constants import USER_KEEP from ddtrace.constants import USER_REJECT from ddtrace.ext import SpanTypes @@ -581,7 +581,7 @@ def assert_span_sampling_decision_tags( assert span.get_metric(_SINGLE_SPAN_SAMPLING_MAX_PER_SEC) == limit if trace_sampling_priority: - assert span.get_metric(SAMPLING_PRIORITY_KEY) == trace_sampling_priority + assert span.get_metric(_SAMPLING_PRIORITY_KEY) == trace_sampling_priority def switch_out_trace_sampling_processor(tracer, sampling_processor): diff --git a/tests/tracer/test_sampler.py b/tests/tracer/test_sampler.py index 54c9c1abef3..4bf9de2019e 100644 --- a/tests/tracer/test_sampler.py +++ b/tests/tracer/test_sampler.py @@ -12,12 +12,12 @@ from ddtrace._trace.sampler import RateSampler from ddtrace._trace.sampling_rule import SamplingRule from ddtrace._trace.span import Span +from ddtrace.constants import _SAMPLING_AGENT_DECISION +from ddtrace.constants import _SAMPLING_LIMIT_DECISION +from ddtrace.constants import _SAMPLING_PRIORITY_KEY +from ddtrace.constants import _SAMPLING_RULE_DECISION from ddtrace.constants import AUTO_KEEP from ddtrace.constants import AUTO_REJECT -from ddtrace.constants import SAMPLING_AGENT_DECISION -from ddtrace.constants import SAMPLING_LIMIT_DECISION -from ddtrace.constants import SAMPLING_PRIORITY_KEY -from ddtrace.constants import SAMPLING_RULE_DECISION from ddtrace.constants import USER_KEEP from ddtrace.constants import USER_REJECT from ddtrace.internal.rate_limiter import RateLimiter @@ -51,10 +51,10 @@ def assert_sampling_decision_tags( :param sampling_priority: expected sampling priority ``_sampling_priority_v1`` :param trace_tag: expected sampling decision trace tag ``_dd.p.dm``. Format is ``-{SAMPLINGMECHANISM}``. """ - metric_agent = span.get_metric(SAMPLING_AGENT_DECISION) - metric_limit = span.get_metric(SAMPLING_LIMIT_DECISION) - metric_rule = span.get_metric(SAMPLING_RULE_DECISION) - metric_sampling_priority = span.get_metric(SAMPLING_PRIORITY_KEY) + metric_agent = span.get_metric(_SAMPLING_AGENT_DECISION) + metric_limit = span.get_metric(_SAMPLING_LIMIT_DECISION) + metric_rule = span.get_metric(_SAMPLING_RULE_DECISION) + metric_sampling_priority = span.get_metric(_SAMPLING_PRIORITY_KEY) if agent: assert metric_agent == agent if limit: diff --git a/tests/tracer/test_settings.py b/tests/tracer/test_settings.py index c78302712e7..ac94404200a 100644 --- a/tests/tracer/test_settings.py +++ b/tests/tracer/test_settings.py @@ -33,15 +33,15 @@ def test_http_config(self): config = Config() config._add("django", dict()) assert config.django.trace_query_string is None - config.http.trace_query_string = True - assert config.http.trace_query_string is True + config._http.trace_query_string = True + assert config._http.trace_query_string is True assert config.django.trace_query_string is True # Integration usage config = Config() config._add("django", dict()) config.django.http.trace_query_string = True - assert config.http.trace_query_string is None + assert config._http.trace_query_string is None assert config.django.trace_query_string is True assert config.django.http.trace_query_string is True @@ -183,7 +183,7 @@ def test_config_is_header_tracing_configured(global_headers, int_headers, expect integration_config.http.trace_headers(int_headers) assert ( - config.http.is_header_tracing_configured, + config._http.is_header_tracing_configured, integration_config.http.is_header_tracing_configured, integration_config.is_header_tracing_configured, ) == expected @@ -193,7 +193,7 @@ def test_environment_header_tags(): with override_env(dict(DD_TRACE_HEADER_TAGS="Host:http.host,User-agent:http.user_agent")): config = Config() - assert config.http.is_header_tracing_configured + assert config._http.is_header_tracing_configured assert config._header_tag_name("Host") == "http.host" assert config._header_tag_name("User-agent") == "http.user_agent" # Case insensitive diff --git a/tests/tracer/test_single_span_sampling_rules.py b/tests/tracer/test_single_span_sampling_rules.py index 3ebffed00d5..24dfda91ad5 100644 --- a/tests/tracer/test_single_span_sampling_rules.py +++ b/tests/tracer/test_single_span_sampling_rules.py @@ -3,10 +3,10 @@ import pytest from ddtrace import Tracer +from ddtrace.constants import _SAMPLING_PRIORITY_KEY from ddtrace.constants import _SINGLE_SPAN_SAMPLING_MAX_PER_SEC from ddtrace.constants import _SINGLE_SPAN_SAMPLING_MECHANISM from ddtrace.constants import _SINGLE_SPAN_SAMPLING_RATE -from ddtrace.constants import SAMPLING_PRIORITY_KEY from ddtrace.internal.sampling import SamplingMechanism from ddtrace.internal.sampling import SpanSamplingRule from ddtrace.internal.sampling import _get_file_json @@ -41,7 +41,7 @@ def assert_sampling_decision_tags( assert span.get_metric(_SINGLE_SPAN_SAMPLING_MAX_PER_SEC) == limit if trace_sampling: - assert span.get_metric(SAMPLING_PRIORITY_KEY) > 0 + assert span.get_metric(_SAMPLING_PRIORITY_KEY) > 0 def test_single_rule_init_via_env(): diff --git a/tests/tracer/test_span.py b/tests/tracer/test_span.py index 8cdaad831f0..1725f0d7675 100644 --- a/tests/tracer/test_span.py +++ b/tests/tracer/test_span.py @@ -11,12 +11,12 @@ from ddtrace._trace._span_link import SpanLink from ddtrace._trace._span_pointer import _SpanPointerDirection from ddtrace._trace.span import Span +from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.constants import ENV_KEY from ddtrace.constants import ERROR_MSG from ddtrace.constants import ERROR_STACK from ddtrace.constants import ERROR_TYPE from ddtrace.constants import SERVICE_VERSION_KEY -from ddtrace.constants import SPAN_MEASURED_KEY from ddtrace.constants import VERSION_KEY from ddtrace.ext import SpanTypes from ddtrace.internal import core @@ -552,7 +552,7 @@ def test_span_pointers(self): ) def test_set_tag_measured(value, assertion): s = Span(name="test.span") - s.set_tag(SPAN_MEASURED_KEY, value) + s.set_tag(_SPAN_MEASURED_KEY, value) assertion(s) @@ -564,19 +564,19 @@ def test_set_tag_measured_not_set(): def test_set_tag_measured_no_value(): s = Span(name="test.span") - s.set_tag(SPAN_MEASURED_KEY) + s.set_tag(_SPAN_MEASURED_KEY) assert_is_measured(s) def test_set_tag_measured_change_value(): s = Span(name="test.span") - s.set_tag(SPAN_MEASURED_KEY, True) + s.set_tag(_SPAN_MEASURED_KEY, True) assert_is_measured(s) - s.set_tag(SPAN_MEASURED_KEY, False) + s.set_tag(_SPAN_MEASURED_KEY, False) assert_is_not_measured(s) - s.set_tag(SPAN_MEASURED_KEY) + s.set_tag(_SPAN_MEASURED_KEY) assert_is_measured(s) diff --git a/tests/tracer/test_trace_utils.py b/tests/tracer/test_trace_utils.py index 38ba097f308..9e05cd9f40d 100644 --- a/tests/tracer/test_trace_utils.py +++ b/tests/tracer/test_trace_utils.py @@ -483,7 +483,7 @@ def test_set_http_meta( assert core.get_item("http.request.path_params", span=span) == path_params -@mock.patch("ddtrace.settings.config.log") +@mock.patch("ddtrace.settings._config.log") @pytest.mark.parametrize( "error_codes,status_code,error,log_call", [ @@ -499,7 +499,7 @@ def test_set_http_meta( ], ) def test_set_http_meta_custom_errors(mock_log, span, int_config, error_codes, status_code, error, log_call): - config.http_server.error_statuses = error_codes + config._http_server.error_statuses = error_codes trace_utils.set_http_meta(span, int_config, status_code=status_code) assert span.error == error if log_call: diff --git a/tests/tracer/test_tracer.py b/tests/tracer/test_tracer.py index e647397cc50..1c45f424679 100644 --- a/tests/tracer/test_tracer.py +++ b/tests/tracer/test_tracer.py @@ -18,15 +18,15 @@ from ddtrace._trace.context import Context from ddtrace._trace.span import _is_top_level from ddtrace._trace.tracer import Tracer +from ddtrace.constants import _HOSTNAME_KEY +from ddtrace.constants import _ORIGIN_KEY +from ddtrace.constants import _SAMPLING_PRIORITY_KEY from ddtrace.constants import AUTO_KEEP from ddtrace.constants import AUTO_REJECT from ddtrace.constants import ENV_KEY -from ddtrace.constants import HOSTNAME_KEY from ddtrace.constants import MANUAL_DROP_KEY from ddtrace.constants import MANUAL_KEEP_KEY -from ddtrace.constants import ORIGIN_KEY from ddtrace.constants import PID -from ddtrace.constants import SAMPLING_PRIORITY_KEY from ddtrace.constants import USER_KEEP from ddtrace.constants import USER_REJECT from ddtrace.constants import VERSION_KEY @@ -1459,9 +1459,9 @@ def test_ctx(tracer, test_spans): assert s3.parent_id == s2.span_id assert s4.parent_id == s1.span_id assert s1.trace_id == s2.trace_id == s3.trace_id == s4.trace_id - assert s1.get_metric(SAMPLING_PRIORITY_KEY) == 1 - assert s2.get_metric(SAMPLING_PRIORITY_KEY) is None - assert ORIGIN_KEY not in s1.get_tags() + assert s1.get_metric(_SAMPLING_PRIORITY_KEY) == 1 + assert s2.get_metric(_SAMPLING_PRIORITY_KEY) is None + assert _ORIGIN_KEY not in s1.get_tags() t = test_spans.pop_traces() assert len(t) == 1 @@ -1535,8 +1535,8 @@ def test_ctx_distributed(tracer, test_spans): trace = test_spans.pop_traces() assert len(trace) == 1 - assert s2.get_metric(SAMPLING_PRIORITY_KEY) == 2 - assert s2.get_tag(ORIGIN_KEY) == "somewhere" + assert s2.get_metric(_SAMPLING_PRIORITY_KEY) == 2 + assert s2.get_tag(_ORIGIN_KEY) == "somewhere" def test_manual_keep(tracer, test_spans): @@ -1544,14 +1544,14 @@ def test_manual_keep(tracer, test_spans): with tracer.trace("asdf") as s: s.set_tag(MANUAL_KEEP_KEY) spans = test_spans.pop() - assert spans[0].get_metric(SAMPLING_PRIORITY_KEY) is USER_KEEP + assert spans[0].get_metric(_SAMPLING_PRIORITY_KEY) is USER_KEEP # On a child span with tracer.trace("asdf"): with tracer.trace("child") as s: s.set_tag(MANUAL_KEEP_KEY) spans = test_spans.pop() - assert spans[0].get_metric(SAMPLING_PRIORITY_KEY) is USER_KEEP + assert spans[0].get_metric(_SAMPLING_PRIORITY_KEY) is USER_KEEP def test_manual_keep_then_drop(tracer, test_spans): @@ -1561,7 +1561,7 @@ def test_manual_keep_then_drop(tracer, test_spans): child.set_tag(MANUAL_KEEP_KEY) root.set_tag(MANUAL_DROP_KEY) spans = test_spans.pop() - assert spans[0].get_metric(SAMPLING_PRIORITY_KEY) is USER_REJECT + assert spans[0].get_metric(_SAMPLING_PRIORITY_KEY) is USER_REJECT def test_manual_drop(tracer, test_spans): @@ -1569,14 +1569,14 @@ def test_manual_drop(tracer, test_spans): with tracer.trace("asdf") as s: s.set_tag(MANUAL_DROP_KEY) spans = test_spans.pop() - assert spans[0].get_metric(SAMPLING_PRIORITY_KEY) is USER_REJECT + assert spans[0].get_metric(_SAMPLING_PRIORITY_KEY) is USER_REJECT # On a child span with tracer.trace("asdf"): with tracer.trace("child") as s: s.set_tag(MANUAL_DROP_KEY) spans = test_spans.pop() - assert spans[0].get_metric(SAMPLING_PRIORITY_KEY) is USER_REJECT + assert spans[0].get_metric(_SAMPLING_PRIORITY_KEY) is USER_REJECT @mock.patch("ddtrace.internal.hostname.get_hostname") @@ -1590,8 +1590,8 @@ def test_get_report_hostname_enabled(get_hostname, tracer, test_spans): spans = test_spans.pop() root = spans[0] child = spans[1] - assert root.get_tag(HOSTNAME_KEY) == "test-hostname" - assert child.get_tag(HOSTNAME_KEY) is None + assert root.get_tag(_HOSTNAME_KEY) == "test-hostname" + assert child.get_tag(_HOSTNAME_KEY) is None @mock.patch("ddtrace.internal.hostname.get_hostname") @@ -1605,8 +1605,8 @@ def test_get_report_hostname_disabled(get_hostname, tracer, test_spans): spans = test_spans.pop() root = spans[0] child = spans[1] - assert root.get_tag(HOSTNAME_KEY) is None - assert child.get_tag(HOSTNAME_KEY) is None + assert root.get_tag(_HOSTNAME_KEY) is None + assert child.get_tag(_HOSTNAME_KEY) is None @mock.patch("ddtrace.internal.hostname.get_hostname") @@ -1620,8 +1620,8 @@ def test_get_report_hostname_default(get_hostname, tracer, test_spans): spans = test_spans.pop() root = spans[0] child = spans[1] - assert root.get_tag(HOSTNAME_KEY) is None - assert child.get_tag(HOSTNAME_KEY) is None + assert root.get_tag(_HOSTNAME_KEY) is None + assert child.get_tag(_HOSTNAME_KEY) is None def test_non_active_span(tracer, test_spans): @@ -1743,7 +1743,7 @@ def test_context_priority(tracer, test_spans): spans = test_spans.pop() assert len(spans) == 1, "trace should be sampled" if p in [USER_REJECT, AUTO_REJECT, AUTO_KEEP, USER_KEEP]: - assert spans[0].get_metric(SAMPLING_PRIORITY_KEY) == p + assert spans[0].get_metric(_SAMPLING_PRIORITY_KEY) == p def test_spans_sampled_out(tracer, test_spans): diff --git a/tests/tracer/test_writer.py b/tests/tracer/test_writer.py index 2089971c554..6abb7681ab8 100644 --- a/tests/tracer/test_writer.py +++ b/tests/tracer/test_writer.py @@ -15,7 +15,7 @@ import ddtrace from ddtrace import config from ddtrace._trace.span import Span -from ddtrace.constants import KEEP_SPANS_RATE_KEY +from ddtrace.constants import _KEEP_SPANS_RATE_KEY from ddtrace.internal.ci_visibility.writer import CIVisibilityWriter from ddtrace.internal.compat import get_connection_response from ddtrace.internal.compat import httplib @@ -366,7 +366,7 @@ def test_keep_rate(self): # 100% of traces kept (refers to the past). # No traces sent before now so 100% kept. for trace in payload: - assert 1.0 == trace[0]["metrics"].get(KEEP_SPANS_RATE_KEY, -1) + assert 1.0 == trace[0]["metrics"].get(_KEEP_SPANS_RATE_KEY, -1) # 2. We fail to write 4 traces because of size limitation. for trace in traces_too_big: @@ -392,7 +392,7 @@ def test_keep_rate(self): # 50% of traces kept (refers to the past). # We had 4 successfully written and 4 dropped. for trace in payload: - assert 0.5 == trace[0]["metrics"].get(KEEP_SPANS_RATE_KEY, -1) + assert 0.5 == trace[0]["metrics"].get(_KEEP_SPANS_RATE_KEY, -1) # 4. We write 1 trace successfully and fail to write 3. writer.write(traces[0]) @@ -408,7 +408,7 @@ def test_keep_rate(self): # 60% of traces kept (refers to the past). # We had 4 successfully written, then 4 dropped, then 2 written. for trace in payload: - assert 0.6 == trace[0]["metrics"].get(KEEP_SPANS_RATE_KEY, -1) + assert 0.6 == trace[0]["metrics"].get(_KEEP_SPANS_RATE_KEY, -1) class CIVisibilityWriterTests(AgentWriterTests): diff --git a/tests/utils.py b/tests/utils.py index 1932033152f..5283e27e7cf 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -20,7 +20,7 @@ from ddtrace import Tracer from ddtrace import config as dd_config from ddtrace._trace.span import Span -from ddtrace.constants import SPAN_MEASURED_KEY +from ddtrace.constants import _SPAN_MEASURED_KEY from ddtrace.ext import http from ddtrace.internal import agent from ddtrace.internal import core @@ -55,18 +55,18 @@ def assert_is_measured(span): """Assert that the span has the proper _dd.measured tag set""" - assert SPAN_MEASURED_KEY in span.get_metrics() - assert SPAN_MEASURED_KEY not in span.get_tags() - assert span.get_metric(SPAN_MEASURED_KEY) == 1 + assert _SPAN_MEASURED_KEY in span.get_metrics() + assert _SPAN_MEASURED_KEY not in span.get_tags() + assert span.get_metric(_SPAN_MEASURED_KEY) == 1 def assert_is_not_measured(span): """Assert that the span does not set _dd.measured""" - assert SPAN_MEASURED_KEY not in span.get_tags() - if SPAN_MEASURED_KEY in span.get_metrics(): - assert span.get_metric(SPAN_MEASURED_KEY) == 0 + assert _SPAN_MEASURED_KEY not in span.get_tags() + if _SPAN_MEASURED_KEY in span.get_metrics(): + assert span.get_metric(_SPAN_MEASURED_KEY) == 0 else: - assert SPAN_MEASURED_KEY not in span.get_metrics() + assert _SPAN_MEASURED_KEY not in span.get_metrics() def assert_span_http_status_code(span, code):