pax_global_header 0000666 0000000 0000000 00000000064 14547447232 0014526 g ustar 00root root 0000000 0000000 52 comment=b244efcc80c9e17e515bdbd66cc0d51ae18aa5ca sentry-python-1.39.2/ 0000775 0000000 0000000 00000000000 14547447232 0014465 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/.craft.yml 0000664 0000000 0000000 00000001713 14547447232 0016367 0 ustar 00root root 0000000 0000000 minVersion: 0.34.1 targets: - name: pypi includeNames: /^sentry[_\-]sdk.*$/ - name: gh-pages - name: registry sdks: pypi:sentry-sdk: - name: github - name: aws-lambda-layer includeNames: /^sentry-python-serverless-\d+(\.\d+)*\.zip$/ layerName: SentryPythonServerlessSDK compatibleRuntimes: - name: python versions: # The number of versions must be, at most, the maximum number of # runtimes AWS Lambda permits for a layer (currently 15). # On the other hand, AWS Lambda does not support every Python runtime. # The supported runtimes are available in the following link: # https://docs.aws.amazon.com/lambda/latest/dg/lambda-python.html - python3.7 - python3.8 - python3.9 - python3.10 - python3.11 license: MIT - name: sentry-pypi internalPypiRepo: getsentry/pypi changelog: CHANGELOG.md changelogPolicy: auto sentry-python-1.39.2/.flake8 0000664 0000000 0000000 00000001314 14547447232 0015637 0 ustar 00root root 0000000 0000000 [flake8] extend-ignore = # Handled by black (Whitespace before ':' -- handled by black) E203, # Handled by black (Line too long) E501, # Sometimes not possible due to execution order (Module level import is not at top of file) E402, # I don't care (Do not assign a lambda expression, use a def) E731, # does not apply to Python 2 (redundant exception types by flake8-bugbear) B014, # I don't care (Lowercase imported as non-lowercase by pep8-naming) N812, # is a worse version of and conflicts with B902 (first argument of a classmethod should be named cls) N804, extend-exclude=checkouts,lol* exclude = # gRCP generated files grpc_test_service_pb2.py grpc_test_service_pb2_grpc.py sentry-python-1.39.2/.github/ 0000775 0000000 0000000 00000000000 14547447232 0016025 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/.github/ISSUE_TEMPLATE/ 0000775 0000000 0000000 00000000000 14547447232 0020210 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/.github/ISSUE_TEMPLATE/bug.yml 0000664 0000000 0000000 00000002315 14547447232 0021511 0 ustar 00root root 0000000 0000000 name: 🐞 Bug Report description: Tell us about something that's not working the way we (probably) intend. body: - type: dropdown id: type attributes: label: How do you use Sentry? options: - Sentry Saas (sentry.io) - Self-hosted/on-premise validations: required: true - type: input id: version attributes: label: Version description: Which SDK version? placeholder: ex. 1.5.2 validations: required: true - type: textarea id: repro attributes: label: Steps to Reproduce description: How can we see what you're seeing? Specific is terrific. placeholder: |- 1. What 2. you 3. did. Extra points for also including the output of `pip freeze --all`. validations: required: true - type: textarea id: expected attributes: label: Expected Result validations: required: true - type: textarea id: actual attributes: label: Actual Result description: Logs? Screenshots? Yes, please. validations: required: true - type: markdown attributes: value: |- ## Thanks 🙏 validations: required: false sentry-python-1.39.2/.github/ISSUE_TEMPLATE/config.yml 0000664 0000000 0000000 00000000250 14547447232 0022175 0 ustar 00root root 0000000 0000000 blank_issues_enabled: false contact_links: - name: Support Request url: https://sentry.io/support about: Use our dedicated support channel for paid accounts. sentry-python-1.39.2/.github/ISSUE_TEMPLATE/feature.yml 0000664 0000000 0000000 00000001742 14547447232 0022372 0 ustar 00root root 0000000 0000000 name: 💡 Feature Request description: Create a feature request for sentry-python SDK. labels: 'enhancement' body: - type: markdown attributes: value: Thanks for taking the time to file a feature request! Please fill out this form as completely as possible. - type: textarea id: problem attributes: label: Problem Statement description: A clear and concise description of what you want and what your use case is. placeholder: |- I want to make whirled peas, but Sentry doesn't blend. validations: required: true - type: textarea id: expected attributes: label: Solution Brainstorm description: We know you have bright ideas to share ... share away, friend. placeholder: |- Add a blender to Sentry. validations: required: true - type: markdown attributes: value: |- ## Thanks 🙏 Check our [triage docs](https://open.sentry.io/triage/) for what to expect next. sentry-python-1.39.2/.github/PULL_REQUEST_TEMPLATE.md 0000664 0000000 0000000 00000001516 14547447232 0021631 0 ustar 00root root 0000000 0000000 --- ## General Notes Thank you for contributing to `sentry-python`! Please add tests to validate your changes, and lint your code using `tox -e linters`. Running the test suite on your PR might require maintainer approval. Some tests (AWS Lambda) additionally require a maintainer to add a special label to run and will fail if the label is not present. #### For maintainers Sensitive test suites require maintainer review to ensure that tests do not compromise our secrets. This review must be repeated after any code revisions. Before running sensitive test suites, please carefully check the PR. Then, apply the `Trigger: tests using secrets` label. The label will be removed after any code changes to enforce our policy requiring maintainers to review all code revisions before running sensitive tests. sentry-python-1.39.2/.github/dependabot.yml 0000664 0000000 0000000 00000002247 14547447232 0020662 0 ustar 00root root 0000000 0000000 version: 2 updates: - package-ecosystem: pip directory: "/" schedule: interval: weekly open-pull-requests-limit: 10 allow: - dependency-type: direct - dependency-type: indirect ignore: - dependency-name: pytest versions: - "> 3.7.3" - dependency-name: flake8 # Later versions dropped Python 2 support versions: - "> 5.0.4" - dependency-name: jsonschema # Later versions dropped Python 2 support versions: - "> 3.2.0" - dependency-name: pytest-cov versions: - "> 2.8.1" - dependency-name: pytest-forked versions: - "> 1.1.3" - dependency-name: sphinx versions: - ">= 2.4.a, < 2.5" - dependency-name: tox versions: - "> 3.7.0" - dependency-name: werkzeug versions: - "> 0.15.5, < 1" - dependency-name: werkzeug versions: - ">= 1.0.a, < 1.1" - dependency-name: mypy versions: - "0.800" - dependency-name: sphinx versions: - 3.4.3 - package-ecosystem: gitsubmodule directory: "/" schedule: interval: weekly open-pull-requests-limit: 10 - package-ecosystem: "github-actions" directory: "/" schedule: interval: weekly open-pull-requests-limit: 10 sentry-python-1.39.2/.github/workflows/ 0000775 0000000 0000000 00000000000 14547447232 0020062 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/.github/workflows/ci.yml 0000664 0000000 0000000 00000004236 14547447232 0021205 0 ustar 00root root 0000000 0000000 name: CI on: push: branches: - master - release/** - sentry-sdk-2.0 pull_request: permissions: contents: read env: BUILD_CACHE_KEY: ${{ github.sha }} CACHED_BUILD_PATHS: | ${{ github.workspace }}/dist-serverless jobs: lint: name: Lint Sources runs-on: ubuntu-latest timeout-minutes: 10 steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: 3.12 - run: | pip install tox tox -e linters check-ci-config: name: Check CI config runs-on: ubuntu-latest timeout-minutes: 10 steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: 3.12 - run: | pip install jinja2 python scripts/split-tox-gh-actions/split-tox-gh-actions.py --fail-on-changes build_lambda_layer: name: Build Package runs-on: ubuntu-latest timeout-minutes: 10 steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: 3.12 - name: Setup build cache uses: actions/cache@v3 id: build_cache with: path: ${{ env.CACHED_BUILD_PATHS }} key: ${{ env.BUILD_CACHE_KEY }} - name: Build Packages run: | echo "Creating directory containing Python SDK Lambda Layer" pip install virtualenv # This will also trigger "make dist" that creates the Python packages make aws-lambda-layer - name: Upload Python Packages uses: actions/upload-artifact@v3 with: name: ${{ github.sha }} path: | dist/* docs: name: Build SDK API Doc runs-on: ubuntu-latest timeout-minutes: 10 steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: 3.12 - run: | pip install virtualenv make apidocs cd docs/_build && zip -r gh-pages ./ - uses: actions/upload-artifact@v3.1.1 with: name: ${{ github.sha }} path: docs/_build/gh-pages.zip sentry-python-1.39.2/.github/workflows/codeql-analysis.yml 0000664 0000000 0000000 00000005275 14547447232 0023706 0 ustar 00root root 0000000 0000000 # For most projects, this workflow file will not need changing; you simply need # to commit it to your repository. # # You may wish to alter this file to override the set of languages analyzed, # or to provide custom queries or build logic. # # ******** NOTE ******** # We have attempted to detect the languages in your repository. Please check # the `language` matrix defined below to confirm you have the correct set of # supported CodeQL languages. # name: "CodeQL" on: push: branches: - master - sentry-sdk-2.0 pull_request: # The branches below must be a subset of the branches above branches: - master - sentry-sdk-2.0 schedule: - cron: '18 18 * * 3' permissions: contents: read jobs: analyze: permissions: actions: read # for github/codeql-action/init to get workflow details contents: read # for actions/checkout to fetch code security-events: write # for github/codeql-action/autobuild to send a status report name: Analyze runs-on: ubuntu-latest strategy: fail-fast: false matrix: language: [ 'python' ] # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ] # Learn more: # https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed steps: - name: Checkout repository uses: actions/checkout@v4 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL uses: github/codeql-action/init@v2 with: languages: ${{ matrix.language }} # If you wish to specify custom queries, you can do so here or in a config file. # By default, queries listed here will override any specified in a config file. # Prefix the list here with "+" to use these queries and those in the config file. # queries: ./path/to/local/query, your-org/your-repo/queries@main # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). # If this step fails, then you should remove it and run the build manually (see below) - name: Autobuild uses: github/codeql-action/autobuild@v2 # ℹ️ Command-line programs to run using the OS shell. # 📚 https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines # and modify them (or add more) to build your code if your project # uses a compiled language #- run: | # make bootstrap # make release - name: Perform CodeQL Analysis uses: github/codeql-action/analyze@v2 sentry-python-1.39.2/.github/workflows/enforce-license-compliance.yml 0000664 0000000 0000000 00000000713 14547447232 0025757 0 ustar 00root root 0000000 0000000 name: Enforce License Compliance on: push: branches: - master - main - release/* - sentry-sdk-2.0 pull_request: branches: - master - main - sentry-sdk-2.0 jobs: enforce-license-compliance: runs-on: ubuntu-latest steps: - name: 'Enforce License Compliance' uses: getsentry/action-enforce-license-compliance@main with: fossa_api_key: ${{ secrets.FOSSA_API_KEY }} sentry-python-1.39.2/.github/workflows/release.yml 0000664 0000000 0000000 00000001327 14547447232 0022230 0 ustar 00root root 0000000 0000000 name: Release on: workflow_dispatch: inputs: version: description: Version to release required: true force: description: Force a release even when there are release-blockers (optional) required: false jobs: release: runs-on: ubuntu-latest name: "Release a new version" steps: - uses: actions/checkout@v4 with: token: ${{ secrets.GH_RELEASE_PAT }} fetch-depth: 0 - name: Prepare release uses: getsentry/action-prepare-release@v1 env: GITHUB_TOKEN: ${{ secrets.GH_RELEASE_PAT }} with: version: ${{ github.event.inputs.version }} force: ${{ github.event.inputs.force }} sentry-python-1.39.2/.github/workflows/scripts/ 0000775 0000000 0000000 00000000000 14547447232 0021551 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/.github/workflows/scripts/trigger_tests_on_label.py 0000664 0000000 0000000 00000004606 14547447232 0026651 0 ustar 00root root 0000000 0000000 #!/usr/bin/env python3 import argparse import json import os from urllib.parse import quote from urllib.request import Request, urlopen LABEL = "Trigger: tests using secrets" def _has_write(repo_id: int, username: str, *, token: str) -> bool: req = Request( f"https://api.github.com/repositories/{repo_id}/collaborators/{username}/permission", headers={"Authorization": f"token {token}"}, ) contents = json.load(urlopen(req, timeout=10)) return contents["permission"] in {"admin", "write"} def _remove_label(repo_id: int, pr: int, label: str, *, token: str) -> None: quoted_label = quote(label) req = Request( f"https://api.github.com/repositories/{repo_id}/issues/{pr}/labels/{quoted_label}", method="DELETE", headers={"Authorization": f"token {token}"}, ) urlopen(req) def main() -> int: parser = argparse.ArgumentParser() parser.add_argument("--repo-id", type=int, required=True) parser.add_argument("--pr", type=int, required=True) parser.add_argument("--event", required=True) parser.add_argument("--username", required=True) parser.add_argument("--label-names", type=json.loads, required=True) args = parser.parse_args() token = os.environ["GITHUB_TOKEN"] write_permission = _has_write(args.repo_id, args.username, token=token) if ( not write_permission # `reopened` is included here due to close => push => reopen and args.event in {"synchronize", "reopened"} and LABEL in args.label_names ): print(f"Invalidating label [{LABEL}] due to code change...") _remove_label(args.repo_id, args.pr, LABEL, token=token) args.label_names.remove(LABEL) if write_permission or LABEL in args.label_names: print("Permissions passed!") print(f"- has write permission: {write_permission}") print(f"- has [{LABEL}] label: {LABEL in args.label_names}") return 0 else: print("Permissions failed!") print(f"- has write permission: {write_permission}") print(f"- has [{LABEL}] label: {LABEL in args.label_names}") print(f"- args.label_names: {args.label_names}") print( f"Please have a collaborator add the [{LABEL}] label once they " f"have reviewed the code to trigger tests." ) return 1 if __name__ == "__main__": raise SystemExit(main()) sentry-python-1.39.2/.github/workflows/test-integrations-aws-lambda.yml 0000664 0000000 0000000 00000007646 14547447232 0026313 0 ustar 00root root 0000000 0000000 name: Test AWS Lambda on: push: branches: - master - release/** - sentry-sdk-2.0 # XXX: We are using `pull_request_target` instead of `pull_request` because we want # this to run on forks with access to the secrets necessary to run the test suite. # Prefer to use `pull_request` when possible. pull_request_target: types: [labeled, opened, reopened, synchronize] # Cancel in progress workflows on pull_requests. # https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true permissions: contents: read # `write` is needed to remove the `Trigger: tests using secrets` label pull-requests: write env: SENTRY_PYTHON_TEST_AWS_ACCESS_KEY_ID: ${{ secrets.SENTRY_PYTHON_TEST_AWS_ACCESS_KEY_ID }} SENTRY_PYTHON_TEST_AWS_SECRET_ACCESS_KEY: ${{ secrets.SENTRY_PYTHON_TEST_AWS_SECRET_ACCESS_KEY }} BUILD_CACHE_KEY: ${{ github.sha }} CACHED_BUILD_PATHS: | ${{ github.workspace }}/dist-serverless jobs: check-permissions: name: permissions check runs-on: ubuntu-20.04 steps: - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # v3.1.0 with: persist-credentials: false - name: Check permissions on PR if: github.event_name == 'pull_request_target' run: | python3 -uS .github/workflows/scripts/trigger_tests_on_label.py \ --repo-id ${{ github.event.repository.id }} \ --pr ${{ github.event.number }} \ --event ${{ github.event.action }} \ --username "$ARG_USERNAME" \ --label-names "$ARG_LABEL_NAMES" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # these can contain special characters ARG_USERNAME: ${{ github.event.pull_request.user.login }} ARG_LABEL_NAMES: ${{ toJSON(github.event.pull_request.labels.*.name) }} - name: Check permissions on repo branch if: github.event_name == 'push' run: true test-aws_lambda-pinned: name: AWS Lambda (pinned) timeout-minutes: 30 needs: check-permissions runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.9"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] steps: - uses: actions/checkout@v4 with: ref: ${{ github.event.pull_request.head.sha || github.ref }} - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Setup Test Env run: | pip install coverage "tox>=3,<4" - name: Erase coverage run: | coverage erase - name: Test aws_lambda pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-aws_lambda" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Generate coverage XML run: | coverage combine .coverage* coverage xml -i - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml check_required_tests: name: All AWS Lambda tests passed needs: test-aws_lambda-pinned # Always run this, even if a dependent job failed if: always() runs-on: ubuntu-20.04 steps: - name: Check for failures if: contains(needs.test-aws_lambda-pinned.result, 'failure') || contains(needs.test-aws_lambda-pinned.result, 'skipped') run: | echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 sentry-python-1.39.2/.github/workflows/test-integrations-cloud-computing.yml 0000664 0000000 0000000 00000015436 14547447232 0027410 0 ustar 00root root 0000000 0000000 name: Test Cloud Computing on: push: branches: - master - release/** - sentry-sdk-2.0 pull_request: # Cancel in progress workflows on pull_requests. # https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true permissions: contents: read env: BUILD_CACHE_KEY: ${{ github.sha }} CACHED_BUILD_PATHS: | ${{ github.workspace }}/dist-serverless jobs: test-cloud_computing-latest: name: Cloud Computing (latest) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.7","3.10","3.11","3.12"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Setup Test Env run: | pip install coverage "tox>=3,<4" - name: Erase coverage run: | coverage erase - name: Test boto3 latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-boto3-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test chalice latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-chalice-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test cloud_resource_context latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-cloud_resource_context-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test gcp latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-gcp-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Generate coverage XML run: | coverage combine .coverage* coverage xml -i - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml test-cloud_computing-pinned: name: Cloud Computing (pinned) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.6","3.7","3.9","3.11","3.12"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Setup Test Env run: | pip install coverage "tox>=3,<4" - name: Erase coverage run: | coverage erase - name: Test boto3 pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-boto3" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test chalice pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-chalice" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test cloud_resource_context pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-cloud_resource_context" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test gcp pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-gcp" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Generate coverage XML run: | coverage combine .coverage* coverage xml -i - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml test-cloud_computing-py27: name: Cloud Computing (py27) timeout-minutes: 30 runs-on: ubuntu-20.04 container: python:2.7 steps: - uses: actions/checkout@v4 - name: Setup Test Env run: | pip install coverage "tox>=3,<4" - name: Erase coverage run: | coverage erase - name: Test boto3 py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-boto3" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test chalice py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-chalice" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test cloud_resource_context py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-cloud_resource_context" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test gcp py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-gcp" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Generate coverage XML run: | coverage combine .coverage* coverage xml -i - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml check_required_tests: name: All Cloud Computing tests passed needs: [test-cloud_computing-pinned, test-cloud_computing-py27] # Always run this, even if a dependent job failed if: always() runs-on: ubuntu-20.04 steps: - name: Check for failures if: contains(needs.test-cloud_computing-pinned.result, 'failure') || contains(needs.test-cloud_computing-pinned.result, 'skipped') run: | echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 - name: Check for 2.7 failures if: contains(needs.test-cloud_computing-py27.result, 'failure') || contains(needs.test-cloud_computing-py27.result, 'skipped') run: | echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 sentry-python-1.39.2/.github/workflows/test-integrations-common.yml 0000664 0000000 0000000 00000006462 14547447232 0025566 0 ustar 00root root 0000000 0000000 name: Test Common on: push: branches: - master - release/** - sentry-sdk-2.0 pull_request: # Cancel in progress workflows on pull_requests. # https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true permissions: contents: read env: BUILD_CACHE_KEY: ${{ github.sha }} CACHED_BUILD_PATHS: | ${{ github.workspace }}/dist-serverless jobs: test-common-pinned: name: Common (pinned) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.5","3.6","3.7","3.8","3.9","3.10","3.11","3.12"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Setup Test Env run: | pip install coverage "tox>=3,<4" - name: Erase coverage run: | coverage erase - name: Test common pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-common" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Generate coverage XML run: | coverage combine .coverage* coverage xml -i - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml test-common-py27: name: Common (py27) timeout-minutes: 30 runs-on: ubuntu-20.04 container: python:2.7 steps: - uses: actions/checkout@v4 - name: Setup Test Env run: | pip install coverage "tox>=3,<4" - name: Erase coverage run: | coverage erase - name: Test common py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-common" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Generate coverage XML run: | coverage combine .coverage* coverage xml -i - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml check_required_tests: name: All Common tests passed needs: [test-common-pinned, test-common-py27] # Always run this, even if a dependent job failed if: always() runs-on: ubuntu-20.04 steps: - name: Check for failures if: contains(needs.test-common-pinned.result, 'failure') || contains(needs.test-common-pinned.result, 'skipped') run: | echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 - name: Check for 2.7 failures if: contains(needs.test-common-py27.result, 'failure') || contains(needs.test-common-py27.result, 'skipped') run: | echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 sentry-python-1.39.2/.github/workflows/test-integrations-data-processing.yml 0000664 0000000 0000000 00000016532 14547447232 0027360 0 ustar 00root root 0000000 0000000 name: Test Data Processing on: push: branches: - master - release/** - sentry-sdk-2.0 pull_request: # Cancel in progress workflows on pull_requests. # https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true permissions: contents: read env: BUILD_CACHE_KEY: ${{ github.sha }} CACHED_BUILD_PATHS: | ${{ github.workspace }}/dist-serverless jobs: test-data_processing-latest: name: Data Processing (latest) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.5","3.7","3.8","3.11","3.12"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Setup Test Env run: | pip install coverage "tox>=3,<4" - name: Erase coverage run: | coverage erase - name: Test arq latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-arq-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test beam latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-beam-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test celery latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-celery-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test huey latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-huey-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test rq latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-rq-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Generate coverage XML run: | coverage combine .coverage* coverage xml -i - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml test-data_processing-pinned: name: Data Processing (pinned) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.5","3.6","3.7","3.8","3.9","3.10","3.11","3.12"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Setup Test Env run: | pip install coverage "tox>=3,<4" - name: Erase coverage run: | coverage erase - name: Test arq pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-arq" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test beam pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-beam" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test celery pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-celery" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test huey pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-huey" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test rq pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-rq" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Generate coverage XML run: | coverage combine .coverage* coverage xml -i - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml test-data_processing-py27: name: Data Processing (py27) timeout-minutes: 30 runs-on: ubuntu-20.04 container: python:2.7 steps: - uses: actions/checkout@v4 - name: Setup Test Env run: | pip install coverage "tox>=3,<4" - name: Erase coverage run: | coverage erase - name: Test arq py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-arq" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test beam py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-beam" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test celery py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-celery" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test huey py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-huey" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test rq py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-rq" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Generate coverage XML run: | coverage combine .coverage* coverage xml -i - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml check_required_tests: name: All Data Processing tests passed needs: [test-data_processing-pinned, test-data_processing-py27] # Always run this, even if a dependent job failed if: always() runs-on: ubuntu-20.04 steps: - name: Check for failures if: contains(needs.test-data_processing-pinned.result, 'failure') || contains(needs.test-data_processing-pinned.result, 'skipped') run: | echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 - name: Check for 2.7 failures if: contains(needs.test-data_processing-py27.result, 'failure') || contains(needs.test-data_processing-py27.result, 'skipped') run: | echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 sentry-python-1.39.2/.github/workflows/test-integrations-databases.yml 0000664 0000000 0000000 00000023125 14547447232 0026220 0 ustar 00root root 0000000 0000000 name: Test Databases on: push: branches: - master - release/** - sentry-sdk-2.0 pull_request: # Cancel in progress workflows on pull_requests. # https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true permissions: contents: read env: BUILD_CACHE_KEY: ${{ github.sha }} CACHED_BUILD_PATHS: | ${{ github.workspace }}/dist-serverless jobs: test-databases-latest: name: Databases (latest) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.7","3.8","3.11","3.12"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] services: postgres: image: postgres env: POSTGRES_PASSWORD: sentry # Set health checks to wait until postgres has started options: >- --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 # Maps tcp port 5432 on service container to the host ports: - 5432:5432 env: SENTRY_PYTHON_TEST_POSTGRES_USER: postgres SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry SENTRY_PYTHON_TEST_POSTGRES_NAME: ci_test SENTRY_PYTHON_TEST_POSTGRES_HOST: localhost steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - uses: getsentry/action-clickhouse-in-ci@v1 - name: Setup Test Env run: | pip install coverage "tox>=3,<4" psql postgresql://postgres:sentry@localhost:5432 -c "create database ${SENTRY_PYTHON_TEST_POSTGRES_NAME};" || true psql postgresql://postgres:sentry@localhost:5432 -c "grant all privileges on database ${SENTRY_PYTHON_TEST_POSTGRES_NAME} to ${SENTRY_PYTHON_TEST_POSTGRES_USER};" || true - name: Erase coverage run: | coverage erase - name: Test asyncpg latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-asyncpg-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test clickhouse_driver latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-clickhouse_driver-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test pymongo latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-pymongo-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test sqlalchemy latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-sqlalchemy-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Generate coverage XML run: | coverage combine .coverage* coverage xml -i - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml test-databases-pinned: name: Databases (pinned) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.6","3.7","3.8","3.9","3.10","3.11","3.12"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] services: postgres: image: postgres env: POSTGRES_PASSWORD: sentry # Set health checks to wait until postgres has started options: >- --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 # Maps tcp port 5432 on service container to the host ports: - 5432:5432 env: SENTRY_PYTHON_TEST_POSTGRES_USER: postgres SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry SENTRY_PYTHON_TEST_POSTGRES_NAME: ci_test SENTRY_PYTHON_TEST_POSTGRES_HOST: localhost steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - uses: getsentry/action-clickhouse-in-ci@v1 - name: Setup Test Env run: | pip install coverage "tox>=3,<4" psql postgresql://postgres:sentry@localhost:5432 -c "create database ${SENTRY_PYTHON_TEST_POSTGRES_NAME};" || true psql postgresql://postgres:sentry@localhost:5432 -c "grant all privileges on database ${SENTRY_PYTHON_TEST_POSTGRES_NAME} to ${SENTRY_PYTHON_TEST_POSTGRES_USER};" || true - name: Erase coverage run: | coverage erase - name: Test asyncpg pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-asyncpg" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test clickhouse_driver pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-clickhouse_driver" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test pymongo pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-pymongo" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test sqlalchemy pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-sqlalchemy" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Generate coverage XML run: | coverage combine .coverage* coverage xml -i - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml test-databases-py27: name: Databases (py27) timeout-minutes: 30 runs-on: ubuntu-20.04 container: python:2.7 services: postgres: image: postgres env: POSTGRES_PASSWORD: sentry # Set health checks to wait until postgres has started options: >- --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 # Maps tcp port 5432 on service container to the host ports: - 5432:5432 env: SENTRY_PYTHON_TEST_POSTGRES_USER: postgres SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry SENTRY_PYTHON_TEST_POSTGRES_NAME: ci_test SENTRY_PYTHON_TEST_POSTGRES_HOST: postgres steps: - uses: actions/checkout@v4 - uses: getsentry/action-clickhouse-in-ci@v1 - name: Setup Test Env run: | pip install coverage "tox>=3,<4" psql postgresql://postgres:sentry@postgres:5432 -c "create database ${SENTRY_PYTHON_TEST_POSTGRES_NAME};" || true psql postgresql://postgres:sentry@postgres:5432 -c "grant all privileges on database ${SENTRY_PYTHON_TEST_POSTGRES_NAME} to ${SENTRY_PYTHON_TEST_POSTGRES_USER};" || true - name: Erase coverage run: | coverage erase - name: Test asyncpg py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-asyncpg" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test clickhouse_driver py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-clickhouse_driver" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test pymongo py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-pymongo" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test sqlalchemy py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-sqlalchemy" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Generate coverage XML run: | coverage combine .coverage* coverage xml -i - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml check_required_tests: name: All Databases tests passed needs: [test-databases-pinned, test-databases-py27] # Always run this, even if a dependent job failed if: always() runs-on: ubuntu-20.04 steps: - name: Check for failures if: contains(needs.test-databases-pinned.result, 'failure') || contains(needs.test-databases-pinned.result, 'skipped') run: | echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 - name: Check for 2.7 failures if: contains(needs.test-databases-py27.result, 'failure') || contains(needs.test-databases-py27.result, 'skipped') run: | echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 sentry-python-1.39.2/.github/workflows/test-integrations-graphql.yml 0000664 0000000 0000000 00000011656 14547447232 0025735 0 ustar 00root root 0000000 0000000 name: Test GraphQL on: push: branches: - master - release/** - sentry-sdk-2.0 pull_request: # Cancel in progress workflows on pull_requests. # https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true permissions: contents: read env: BUILD_CACHE_KEY: ${{ github.sha }} CACHED_BUILD_PATHS: | ${{ github.workspace }}/dist-serverless jobs: test-graphql-latest: name: GraphQL (latest) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.7","3.8","3.11","3.12"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Setup Test Env run: | pip install coverage "tox>=3,<4" - name: Erase coverage run: | coverage erase - name: Test ariadne latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-ariadne-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test gql latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-gql-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test graphene latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-graphene-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test strawberry latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-strawberry-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Generate coverage XML run: | coverage combine .coverage* coverage xml -i - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml test-graphql-pinned: name: GraphQL (pinned) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.7","3.8","3.11"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Setup Test Env run: | pip install coverage "tox>=3,<4" - name: Erase coverage run: | coverage erase - name: Test ariadne pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-ariadne" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test gql pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-gql" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test graphene pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-graphene" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test strawberry pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-strawberry" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Generate coverage XML run: | coverage combine .coverage* coverage xml -i - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml check_required_tests: name: All GraphQL tests passed needs: test-graphql-pinned # Always run this, even if a dependent job failed if: always() runs-on: ubuntu-20.04 steps: - name: Check for failures if: contains(needs.test-graphql-pinned.result, 'failure') || contains(needs.test-graphql-pinned.result, 'skipped') run: | echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 sentry-python-1.39.2/.github/workflows/test-integrations-miscellaneous.yml 0000664 0000000 0000000 00000012031 14547447232 0027126 0 ustar 00root root 0000000 0000000 name: Test Miscellaneous on: push: branches: - master - release/** - sentry-sdk-2.0 pull_request: # Cancel in progress workflows on pull_requests. # https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true permissions: contents: read env: BUILD_CACHE_KEY: ${{ github.sha }} CACHED_BUILD_PATHS: | ${{ github.workspace }}/dist-serverless jobs: test-miscellaneous-latest: name: Miscellaneous (latest) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.5","3.8","3.11","3.12"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Setup Test Env run: | pip install coverage "tox>=3,<4" - name: Erase coverage run: | coverage erase - name: Test loguru latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-loguru-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test opentelemetry latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-opentelemetry-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test pure_eval latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-pure_eval-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test trytond latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-trytond-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Generate coverage XML run: | coverage combine .coverage* coverage xml -i - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml test-miscellaneous-pinned: name: Miscellaneous (pinned) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.5","3.6","3.7","3.8","3.9","3.11","3.12"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Setup Test Env run: | pip install coverage "tox>=3,<4" - name: Erase coverage run: | coverage erase - name: Test loguru pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-loguru" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test opentelemetry pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-opentelemetry" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test pure_eval pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-pure_eval" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test trytond pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-trytond" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Generate coverage XML run: | coverage combine .coverage* coverage xml -i - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml check_required_tests: name: All Miscellaneous tests passed needs: test-miscellaneous-pinned # Always run this, even if a dependent job failed if: always() runs-on: ubuntu-20.04 steps: - name: Check for failures if: contains(needs.test-miscellaneous-pinned.result, 'failure') || contains(needs.test-miscellaneous-pinned.result, 'skipped') run: | echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 sentry-python-1.39.2/.github/workflows/test-integrations-networking.yml 0000664 0000000 0000000 00000015220 14547447232 0026455 0 ustar 00root root 0000000 0000000 name: Test Networking on: push: branches: - master - release/** - sentry-sdk-2.0 pull_request: # Cancel in progress workflows on pull_requests. # https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true permissions: contents: read env: BUILD_CACHE_KEY: ${{ github.sha }} CACHED_BUILD_PATHS: | ${{ github.workspace }}/dist-serverless jobs: test-networking-latest: name: Networking (latest) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.8","3.9","3.11","3.12"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Setup Test Env run: | pip install coverage "tox>=3,<4" - name: Erase coverage run: | coverage erase - name: Test gevent latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-gevent-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test grpc latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-grpc-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test httpx latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-httpx-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test requests latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-requests-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Generate coverage XML run: | coverage combine .coverage* coverage xml -i - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml test-networking-pinned: name: Networking (pinned) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.6","3.7","3.8","3.9","3.10","3.11","3.12"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Setup Test Env run: | pip install coverage "tox>=3,<4" - name: Erase coverage run: | coverage erase - name: Test gevent pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-gevent" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test grpc pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-grpc" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test httpx pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-httpx" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test requests pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-requests" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Generate coverage XML run: | coverage combine .coverage* coverage xml -i - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml test-networking-py27: name: Networking (py27) timeout-minutes: 30 runs-on: ubuntu-20.04 container: python:2.7 steps: - uses: actions/checkout@v4 - name: Setup Test Env run: | pip install coverage "tox>=3,<4" - name: Erase coverage run: | coverage erase - name: Test gevent py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-gevent" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test grpc py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-grpc" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test httpx py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-httpx" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test requests py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-requests" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Generate coverage XML run: | coverage combine .coverage* coverage xml -i - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml check_required_tests: name: All Networking tests passed needs: [test-networking-pinned, test-networking-py27] # Always run this, even if a dependent job failed if: always() runs-on: ubuntu-20.04 steps: - name: Check for failures if: contains(needs.test-networking-pinned.result, 'failure') || contains(needs.test-networking-pinned.result, 'skipped') run: | echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 - name: Check for 2.7 failures if: contains(needs.test-networking-py27.result, 'failure') || contains(needs.test-networking-py27.result, 'skipped') run: | echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 sentry-python-1.39.2/.github/workflows/test-integrations-web-frameworks-1.yml 0000664 0000000 0000000 00000022721 14547447232 0027363 0 ustar 00root root 0000000 0000000 name: Test Web Frameworks 1 on: push: branches: - master - release/** - sentry-sdk-2.0 pull_request: # Cancel in progress workflows on pull_requests. # https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true permissions: contents: read env: BUILD_CACHE_KEY: ${{ github.sha }} CACHED_BUILD_PATHS: | ${{ github.workspace }}/dist-serverless jobs: test-web_frameworks_1-latest: name: Web Frameworks 1 (latest) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.8","3.10","3.11","3.12"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] services: postgres: image: postgres env: POSTGRES_PASSWORD: sentry # Set health checks to wait until postgres has started options: >- --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 # Maps tcp port 5432 on service container to the host ports: - 5432:5432 env: SENTRY_PYTHON_TEST_POSTGRES_USER: postgres SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry SENTRY_PYTHON_TEST_POSTGRES_NAME: ci_test SENTRY_PYTHON_TEST_POSTGRES_HOST: localhost steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Setup Test Env run: | pip install coverage "tox>=3,<4" psql postgresql://postgres:sentry@localhost:5432 -c "create database ${SENTRY_PYTHON_TEST_POSTGRES_NAME};" || true psql postgresql://postgres:sentry@localhost:5432 -c "grant all privileges on database ${SENTRY_PYTHON_TEST_POSTGRES_NAME} to ${SENTRY_PYTHON_TEST_POSTGRES_USER};" || true - name: Erase coverage run: | coverage erase - name: Test django latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-django-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test fastapi latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-fastapi-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test flask latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-flask-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test starlette latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-starlette-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Generate coverage XML run: | coverage combine .coverage* coverage xml -i - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml test-web_frameworks_1-pinned: name: Web Frameworks 1 (pinned) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.5","3.6","3.7","3.8","3.9","3.10","3.11","3.12"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] services: postgres: image: postgres env: POSTGRES_PASSWORD: sentry # Set health checks to wait until postgres has started options: >- --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 # Maps tcp port 5432 on service container to the host ports: - 5432:5432 env: SENTRY_PYTHON_TEST_POSTGRES_USER: postgres SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry SENTRY_PYTHON_TEST_POSTGRES_NAME: ci_test SENTRY_PYTHON_TEST_POSTGRES_HOST: localhost steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Setup Test Env run: | pip install coverage "tox>=3,<4" psql postgresql://postgres:sentry@localhost:5432 -c "create database ${SENTRY_PYTHON_TEST_POSTGRES_NAME};" || true psql postgresql://postgres:sentry@localhost:5432 -c "grant all privileges on database ${SENTRY_PYTHON_TEST_POSTGRES_NAME} to ${SENTRY_PYTHON_TEST_POSTGRES_USER};" || true - name: Erase coverage run: | coverage erase - name: Test django pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-django" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test fastapi pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-fastapi" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test flask pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-flask" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test starlette pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-starlette" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Generate coverage XML run: | coverage combine .coverage* coverage xml -i - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml test-web_frameworks_1-py27: name: Web Frameworks 1 (py27) timeout-minutes: 30 runs-on: ubuntu-20.04 container: python:2.7 services: postgres: image: postgres env: POSTGRES_PASSWORD: sentry # Set health checks to wait until postgres has started options: >- --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 # Maps tcp port 5432 on service container to the host ports: - 5432:5432 env: SENTRY_PYTHON_TEST_POSTGRES_USER: postgres SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry SENTRY_PYTHON_TEST_POSTGRES_NAME: ci_test SENTRY_PYTHON_TEST_POSTGRES_HOST: postgres steps: - uses: actions/checkout@v4 - name: Setup Test Env run: | pip install coverage "tox>=3,<4" psql postgresql://postgres:sentry@postgres:5432 -c "create database ${SENTRY_PYTHON_TEST_POSTGRES_NAME};" || true psql postgresql://postgres:sentry@postgres:5432 -c "grant all privileges on database ${SENTRY_PYTHON_TEST_POSTGRES_NAME} to ${SENTRY_PYTHON_TEST_POSTGRES_USER};" || true - name: Erase coverage run: | coverage erase - name: Test django py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-django" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test fastapi py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-fastapi" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test flask py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-flask" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test starlette py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-starlette" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Generate coverage XML run: | coverage combine .coverage* coverage xml -i - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml check_required_tests: name: All Web Frameworks 1 tests passed needs: [test-web_frameworks_1-pinned, test-web_frameworks_1-py27] # Always run this, even if a dependent job failed if: always() runs-on: ubuntu-20.04 steps: - name: Check for failures if: contains(needs.test-web_frameworks_1-pinned.result, 'failure') || contains(needs.test-web_frameworks_1-pinned.result, 'skipped') run: | echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 - name: Check for 2.7 failures if: contains(needs.test-web_frameworks_1-py27.result, 'failure') || contains(needs.test-web_frameworks_1-py27.result, 'skipped') run: | echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 sentry-python-1.39.2/.github/workflows/test-integrations-web-frameworks-2.yml 0000664 0000000 0000000 00000027005 14547447232 0027364 0 ustar 00root root 0000000 0000000 name: Test Web Frameworks 2 on: push: branches: - master - release/** - sentry-sdk-2.0 pull_request: # Cancel in progress workflows on pull_requests. # https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true permissions: contents: read env: BUILD_CACHE_KEY: ${{ github.sha }} CACHED_BUILD_PATHS: | ${{ github.workspace }}/dist-serverless jobs: test-web_frameworks_2-latest: name: Web Frameworks 2 (latest) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.5","3.6","3.7","3.8","3.11","3.12"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Setup Test Env run: | pip install coverage "tox>=3,<4" - name: Erase coverage run: | coverage erase - name: Test aiohttp latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-aiohttp-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test asgi latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-asgi-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test bottle latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-bottle-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test falcon latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-falcon-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test pyramid latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-pyramid-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test quart latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-quart-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test redis latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-redis-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test rediscluster latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-rediscluster-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test sanic latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-sanic-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test starlite latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-starlite-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test tornado latest run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-tornado-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Generate coverage XML run: | coverage combine .coverage* coverage xml -i - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml test-web_frameworks_2-pinned: name: Web Frameworks 2 (pinned) timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.5","3.6","3.7","3.8","3.9","3.11","3.12"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-20.04] steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Setup Test Env run: | pip install coverage "tox>=3,<4" - name: Erase coverage run: | coverage erase - name: Test aiohttp pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-aiohttp" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test asgi pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-asgi" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test bottle pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-bottle" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test falcon pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-falcon" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test pyramid pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-pyramid" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test quart pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-quart" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test redis pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-redis" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test rediscluster pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-rediscluster" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test sanic pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-sanic" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test starlite pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-starlite" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test tornado pinned run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-tornado" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Generate coverage XML run: | coverage combine .coverage* coverage xml -i - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml test-web_frameworks_2-py27: name: Web Frameworks 2 (py27) timeout-minutes: 30 runs-on: ubuntu-20.04 container: python:2.7 steps: - uses: actions/checkout@v4 - name: Setup Test Env run: | pip install coverage "tox>=3,<4" - name: Erase coverage run: | coverage erase - name: Test aiohttp py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-aiohttp" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test asgi py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-asgi" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test bottle py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-bottle" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test falcon py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-falcon" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test pyramid py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-pyramid" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test quart py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-quart" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test redis py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-redis" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test rediscluster py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-rediscluster" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test sanic py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-sanic" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test starlite py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-starlite" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Test tornado py27 run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py2.7-tornado" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - name: Generate coverage XML run: | coverage combine .coverage* coverage xml -i - uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} files: coverage.xml check_required_tests: name: All Web Frameworks 2 tests passed needs: [test-web_frameworks_2-pinned, test-web_frameworks_2-py27] # Always run this, even if a dependent job failed if: always() runs-on: ubuntu-20.04 steps: - name: Check for failures if: contains(needs.test-web_frameworks_2-pinned.result, 'failure') || contains(needs.test-web_frameworks_2-pinned.result, 'skipped') run: | echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 - name: Check for 2.7 failures if: contains(needs.test-web_frameworks_2-py27.result, 'failure') || contains(needs.test-web_frameworks_2-py27.result, 'skipped') run: | echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 sentry-python-1.39.2/.gitignore 0000664 0000000 0000000 00000000415 14547447232 0016455 0 ustar 00root root 0000000 0000000 *.pyc *.log *.egg *.db *.pid .python-version .coverage* .DS_Store .tox pip-log.txt *.egg-info /build /dist /dist-serverless sentry-python-serverless*.zip .cache .idea .eggs venv .venv .vscode/tags .pytest_cache .hypothesis relay pip-wheel-metadata .mypy_cache .vscode/ sentry-python-1.39.2/.gitmodules 0000664 0000000 0000000 00000000174 14547447232 0016644 0 ustar 00root root 0000000 0000000 [submodule "checkouts/data-schemas"] path = checkouts/data-schemas url = https://github.com/getsentry/sentry-data-schemas sentry-python-1.39.2/.pre-commit-config.yaml 0000664 0000000 0000000 00000001174 14547447232 0020751 0 ustar 00root root 0000000 0000000 # See https://pre-commit.com for more information # See https://pre-commit.com/hooks.html for more hooks repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.3.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer - repo: https://github.com/psf/black rev: 22.6.0 hooks: - id: black exclude: ^(.*_pb2.py|.*_pb2_grpc.py) - repo: https://github.com/pycqa/flake8 rev: 5.0.4 hooks: - id: flake8 # Disabled for now, because it lists a lot of problems. #- repo: https://github.com/pre-commit/mirrors-mypy # rev: 'v0.931' # hooks: # - id: mypy sentry-python-1.39.2/.tool-versions 0000664 0000000 0000000 00000000016 14547447232 0017306 0 ustar 00root root 0000000 0000000 python 3.7.12 sentry-python-1.39.2/CHANGELOG.md 0000664 0000000 0000000 00000257320 14547447232 0016307 0 ustar 00root root 0000000 0000000 # Changelog ## 1.39.2 ### Various fixes & improvements - Fix timestamp in transaction created by OTel (#2627) by @antonpirker - Fix relative path in DB query source (#2624) by @antonpirker - Run more CI checks on 2.0 branch (#2625) by @sentrivana - Fix tracing `TypeError` for static and class methods (#2559) by @szokeasaurusrex - Fix missing `ctx` in Arq integration (#2600) by @ivanovart - Change `data_category` from `check_in` to `monitor` (#2598) by @sentrivana ## 1.39.1 ### Various fixes & improvements - Fix psycopg2 detection in the Django integration (#2593) by @sentrivana - Filter out empty string releases (#2591) by @sentrivana - Fixed local var not present when there is an error in a user's `error_sampler` function (#2511) by @antonpirker - Fixed typing in `aiohttp` (#2590) by @antonpirker ## 1.39.0 ### Various fixes & improvements - Add support for cluster clients from Redis SDK (#2394) by @md384 - Improve location reporting for timer metrics (#2552) by @mitsuhiko - Fix Celery `TypeError` with no-argument `apply_async` (#2575) by @szokeasaurusrex - Fix Lambda integration with EventBridge source (#2546) by @davidcroda - Add max tries to Spotlight (#2571) by @hazAT - Handle `os.path.devnull` access issues (#2579) by @sentrivana - Change `code.filepath` frame picking logic (#2568) by @sentrivana - Trigger AWS Lambda tests on label (#2538) by @sentrivana - Run permissions step on pull_request_target but not push (#2548) by @sentrivana - Hash AWS Lambda test functions based on current revision (#2557) by @sentrivana - Update Django version in tests (#2562) by @sentrivana - Make metrics tests non-flaky (#2572) by @antonpirker ## 1.38.0 ### Various fixes & improvements - Only add trace context to checkins and do not run `event_processors` for checkins (#2536) by @antonpirker - Metric span summaries (#2522) by @mitsuhiko - Add source context to code locations (#2539) by @jan-auer - Use in-app filepath instead of absolute path (#2541) by @antonpirker - Switch to `jinja2` for generating CI yamls (#2534) by @sentrivana ## 1.37.1 ### Various fixes & improvements - Fix `NameError` on `parse_version` with eventlet (#2532) by @sentrivana - build(deps): bump checkouts/data-schemas from `68def1e` to `e9f7d58` (#2501) by @dependabot ## 1.37.0 ### Various fixes & improvements - Move installed modules code to utils (#2429) by @sentrivana Note: We moved the internal function `_get_installed_modules` from `sentry_sdk.integrations.modules` to `sentry_sdk.utils`. So if you use this function you have to update your imports - Add code locations for metrics (#2526) by @jan-auer - Add query source to DB spans (#2521) by @antonpirker - Send events to Spotlight sidecar (#2524) by @HazAT - Run integration tests with newest `pytest` (#2518) by @sentrivana - Bring tests up to date (#2512) by @sentrivana - Fix: Prevent global var from being discarded at shutdown (#2530) by @antonpirker - Fix: Scope transaction source not being updated in scope.span setter (#2519) by @sl0thentr0py ## 1.36.0 ### Various fixes & improvements - Django: Support Django 5.0 (#2490) by @sentrivana - Django: Handling ASGI body in the right way. (#2513) by @antonpirker - Flask: Test with Flask 3.0 (#2506) by @sentrivana - Celery: Do not create a span when task is triggered by Celery Beat (#2510) by @antonpirker - Redis: Ensure `RedisIntegration` is disabled, unless `redis` is installed (#2504) by @szokeasaurusrex - Quart: Fix Quart integration for Quart 0.19.4 (#2516) by @antonpirker - gRPC: Make async gRPC less noisy (#2507) by @jyggen ## 1.35.0 ### Various fixes & improvements - **Updated gRPC integration:** Asyncio interceptors and easier setup (#2369) by @fdellekart Our gRPC integration now instruments incoming unary-unary grpc requests and outgoing unary-unary, unary-stream grpc requests using grpcio channels. Everything works now for sync and async code. Before this release you had to add Sentry interceptors by hand to your gRPC code, now the only thing you need to do is adding the `GRPCIntegration` to you `sentry_sdk_init()` call. (See [documentation](https://docs.sentry.io/platforms/python/integrations/grpc/) for more information): ```python import sentry_sdk from sentry_sdk.integrations.grpc import GRPCIntegration sentry_sdk.init( dsn="___PUBLIC_DSN___", enable_tracing=True, integrations=[ GRPCIntegration(), ], ) ``` The old way still works, but we strongly encourage you to update your code to the way described above. - Python 3.12: Replace deprecated datetime functions (#2502) by @sentrivana - Metrics: Unify datetime format (#2409) by @mitsuhiko - Celery: Set correct data in `check_in`s (#2500) by @antonpirker - Celery: Read timezone for Crons monitors from `celery_schedule` if existing (#2497) by @antonpirker - Django: Removing redundant code in Django tests (#2491) by @vagi8 - Django: Make reading the request body work in Django ASGI apps. (#2495) by @antonpirker - FastAPI: Use wraps on fastapi request call wrapper (#2476) by @nkaras - Fix: Probe for psycopg2 and psycopg3 parameters function. (#2492) by @antonpirker - Fix: Remove unnecessary TYPE_CHECKING alias (#2467) by @rafrafek ## 1.34.0 ### Various fixes & improvements - Added Python 3.12 support (#2471, #2483) - Handle missing `connection_kwargs` in `patch_redis_client` (#2482) by @szokeasaurusrex - Run common test suite on Python 3.12 (#2479) by @sentrivana ## 1.33.1 ### Various fixes & improvements - Make parse_version work in utils.py itself. (#2474) by @antonpirker ## 1.33.0 ### Various fixes & improvements - New: Added `error_sampler` option (#2456) by @szokeasaurusrex - Python 3.12: Detect interpreter in shutdown state on thread spawn (#2468) by @mitsuhiko - Patch eventlet under Sentry SDK (#2464) by @szokeasaurusrex - Mitigate CPU spikes when sending lots of events with lots of data (#2449) by @antonpirker - Make `debug` option also configurable via environment (#2450) by @antonpirker - Make sure `get_dsn_parameters` is an actual function (#2441) by @sentrivana - Bump pytest-localserver, add compat comment (#2448) by @sentrivana - AWS Lambda: Update compatible runtimes for AWS Lambda layer (#2453) by @antonpirker - AWS Lambda: Load AWS Lambda secrets in Github CI (#2153) by @antonpirker - Redis: Connection attributes in `redis` database spans (#2398) by @antonpirker - Falcon: Falcon integration checks response status before reporting error (#2465) by @szokeasaurusrex - Quart: Support Quart 0.19 onwards (#2403) by @pgjones - Sanic: Sanic integration initial version (#2419) by @szokeasaurusrex - Django: Fix parsing of Django `path` patterns (#2452) by @sentrivana - Django: Add Django 4.2 to test suite (#2462) by @sentrivana - Polish changelog (#2434) by @sentrivana - Update CONTRIBUTING.md (#2443) by @krishvsoni - Update README.md (#2435) by @sentrivana ## 1.32.0 ### Various fixes & improvements - **New:** Error monitoring for some of the most popular Python GraphQL libraries: - Add [GQL GraphQL integration](https://docs.sentry.io/platforms/python/integrations/gql/) (#2368) by @szokeasaurusrex Usage: ```python import sentry_sdk from sentry_sdk.integrations.gql import GQLIntegration sentry_sdk.init( dsn='___PUBLIC_DSN___', integrations=[ GQLIntegration(), ], ) ``` - Add [Graphene GraphQL error integration](https://docs.sentry.io/platforms/python/integrations/graphene/) (#2389) by @sentrivana Usage: ```python import sentry_sdk from sentry_sdk.integrations.graphene import GrapheneIntegration sentry_sdk.init( dsn='___PUBLIC_DSN___', integrations=[ GrapheneIntegration(), ], ) ``` - Add [Strawberry GraphQL error & tracing integration](https://docs.sentry.io/platforms/python/integrations/strawberry/) (#2393) by @sentrivana Usage: ```python import sentry_sdk from sentry_sdk.integrations.strawberry import StrawberryIntegration sentry_sdk.init( dsn='___PUBLIC_DSN___', integrations=[ # make sure to set async_execution to False if you're executing # GraphQL queries synchronously StrawberryIntegration(async_execution=True), ], traces_sample_rate=1.0, ) ``` - Add [Ariadne GraphQL error integration](https://docs.sentry.io/platforms/python/integrations/ariadne/) (#2387) by @sentrivana Usage: ```python import sentry_sdk from sentry_sdk.integrations.ariadne import AriadneIntegration sentry_sdk.init( dsn='___PUBLIC_DSN___', integrations=[ AriadneIntegration(), ], ) ``` - Capture multiple named groups again (#2432) by @sentrivana - Don't fail when upstream scheme is unusual (#2371) by @vanschelven - Support new RQ version (#2405) by @antonpirker - Remove `utcnow`, `utcfromtimestamp` deprecated in Python 3.12 (#2415) by @rmad17 - Add `trace` to `__all__` in top-level `__init__.py` (#2401) by @lobsterkatie - Move minimetrics code to the SDK (#2385) by @mitsuhiko - Add configurable compression levels (#2382) by @mitsuhiko - Shift flushing by up to a rollup window (#2396) by @mitsuhiko - Make a consistent noop flush behavior (#2428) by @mitsuhiko - Stronger recursion protection (#2426) by @mitsuhiko - Remove `OpenTelemetryIntegration` from `__init__.py` (#2379) by @sentrivana - Update API docs (#2397) by @antonpirker - Pin some test requirements because new majors break our tests (#2404) by @antonpirker - Run more `requests`, `celery`, `falcon` tests (#2414) by @sentrivana - Move `importorskip`s in tests to `__init__.py` files (#2412) by @sentrivana - Fix `mypy` errors (#2433) by @sentrivana - Fix pre-commit issues (#2424) by @bukzor-sentryio - Update [CONTRIBUTING.md](https://github.com/getsentry/sentry-python/blob/master/CONTRIBUTING.md) (#2411) by @sentrivana - Bump `sphinx` from 7.2.5 to 7.2.6 (#2378) by @dependabot - [Experimental] Add explain plan to DB spans (#2315) by @antonpirker ## 1.31.0 ### Various fixes & improvements - **New:** Add integration for `clickhouse-driver` (#2167) by @mimre25 For more information, see the documentation for [clickhouse-driver](https://docs.sentry.io/platforms/python/configuration/integrations/clickhouse-driver) for more information. Usage: ```python import sentry_sdk from sentry_sdk.integrations.clickhouse_driver import ClickhouseDriverIntegration sentry_sdk.init( dsn='___PUBLIC_DSN___', integrations=[ ClickhouseDriverIntegration(), ], ) ``` - **New:** Add integration for `asyncpg` (#2314) by @mimre25 For more information, see the documentation for [asyncpg](https://docs.sentry.io/platforms/python/configuration/integrations/asyncpg/) for more information. Usage: ```python import sentry_sdk from sentry_sdk.integrations.asyncpg import AsyncPGIntegration sentry_sdk.init( dsn='___PUBLIC_DSN___', integrations=[ AsyncPGIntegration(), ], ) ``` - **New:** Allow to override `propagate_traces` in `Celery` per task (#2331) by @jan-auer For more information, see the documentation for [Celery](https://docs.sentry.io//platforms/python/guides/celery/#distributed-traces) for more information. Usage: ```python import sentry_sdk from sentry_sdk.integrations.celery import CeleryIntegration # Enable global distributed traces (this is the default, just to be explicit.) sentry_sdk.init( dsn='___PUBLIC_DSN___', integrations=[ CeleryIntegration(propagate_traces=True), ], ) ... # This will NOT propagate the trace. (The task will start its own trace): my_task_b.apply_async( args=("some_parameter", ), headers={"sentry-propagate-traces": False}, ) ``` - Prevent Falcon integration from breaking ASGI apps (#2359) by @szokeasaurusrex - Backpressure: only downsample a max of 10 times (#2347) by @sl0thentr0py - Made NoOpSpan compatible to Transactions. (#2364) by @antonpirker - Cleanup ASGI integration (#2335) by @antonpirker - Pin anyio in tests (dep of httpx), because new major 4.0.0 breaks tests. (#2336) by @antonpirker - Added link to backpressure section in docs. (#2354) by @antonpirker - Add .vscode to .gitignore (#2317) by @shoaib-mohd - Documenting Spans and Transactions (#2358) by @antonpirker - Fix in profiler: do not call getcwd from module root (#2329) by @Zylphrex - Fix deprecated version attribute (#2338) by @vagi8 - Fix transaction name in Starlette and FastAPI (#2341) by @antonpirker - Fix tests using Postgres (#2362) by @antonpirker - build(deps): Updated linting tooling (#2350) by @antonpirker - build(deps): bump sphinx from 7.2.4 to 7.2.5 (#2344) by @dependabot - build(deps): bump actions/checkout from 2 to 4 (#2352) by @dependabot - build(deps): bump checkouts/data-schemas from `ebc77d3` to `68def1e` (#2351) by @dependabot ## 1.30.0 ### Various fixes & improvements - Officially support Python 3.11 (#2300) by @sentrivana - Context manager monitor (#2290) by @szokeasaurusrex - Set response status code in transaction `response` context. (#2312) by @antonpirker - Add missing context kwarg to `_sentry_task_factory` (#2267) by @JohnnyDeuss - In Postgres take the connection params from the connection (#2308) by @antonpirker - Experimental: Allow using OTel for performance instrumentation (#2272) by @sentrivana This release includes experimental support for replacing Sentry's default performance monitoring solution with one powered by OpenTelemetry without having to do any manual setup. Try it out by installing `pip install sentry-sdk[opentelemetry-experimental]` and then initializing the SDK with: ```python sentry_sdk.init( # ...your usual options... _experiments={"otel_powered_performance": True}, ) ``` This enables OpenTelemetry performance monitoring support for some of the most popular frameworks and libraries (Flask, Django, FastAPI, requests...). We're looking forward to your feedback! Please let us know about your experience in this discussion: https://github.com/getsentry/sentry/discussions/55023 **Important note:** Please note that this feature is experimental and in a proof-of-concept stage and is not meant for production use. It may be changed or removed at any point. - Enable backpressure handling by default (#2298) by @sl0thentr0py The SDK now dynamically downsamples transactions to reduce backpressure in high throughput systems. It starts a new `Monitor` thread to perform some health checks which decide to downsample (halved each time) in 10 second intervals till the system is healthy again. To disable this behavior, use: ```python sentry_sdk.init( # ...your usual options... enable_backpressure_handling=False, ) ``` If your system serves heavy load, please let us know how this feature works for you! Check out the [documentation](https://docs.sentry.io/platforms/python/configuration/options/#enable-backpressure-handling) for more information. - Stop recording spans for internal web requests to Sentry (#2297) by @szokeasaurusrex - Add test for `ThreadPoolExecutor` (#2259) by @gggritso - Add docstrings for `Scope.update_from_*` (#2311) by @sentrivana - Moved `is_sentry_url` to utils (#2304) by @szokeasaurusrex - Fix: arq attribute error on settings, support worker args (#2260) by @rossmacarthur - Fix: Exceptions include detail property for their value (#2193) by @nicolassanmar - build(deps): bump mypy from 1.4.1 to 1.5.1 (#2319) by @dependabot - build(deps): bump sphinx from 7.1.2 to 7.2.4 (#2322) by @dependabot - build(deps): bump sphinx from 7.0.1 to 7.1.2 (#2296) by @dependabot - build(deps): bump checkouts/data-schemas from `1b85152` to `ebc77d3` (#2254) by @dependabot ## 1.29.2 ### Various fixes & improvements - Revert GraphQL integration (#2287) by @sentrivana ## 1.29.1 ### Various fixes & improvements - Fix GraphQL integration swallowing responses (#2286) by @sentrivana - Fix typo (#2283) by @sentrivana ## 1.29.0 ### Various fixes & improvements - Capture GraphQL client errors (#2243) by @sentrivana - The SDK will now create dedicated errors whenever an HTTP client makes a request to a `/graphql` endpoint and the response contains an error. You can opt out of this by providing `capture_graphql_errors=False` to the HTTP client integration. - Read MAX_VALUE_LENGTH from client options (#2121) (#2171) by @puittenbroek - Rename `request_bodies` to `max_request_body_size` (#2247) by @mgaligniana - Always sample checkin regardless of `sample_rate` (#2279) by @szokeasaurusrex - Add information to short-interval cron error message (#2246) by @lobsterkatie - Add DB connection attributes in spans (#2274) by @antonpirker - Add `db.system` to remaining Redis spans (#2271) by @AbhiPrasad - Clarified the procedure for running tests (#2276) by @szokeasaurusrex - Fix Chalice tests (#2278) by @sentrivana - Bump Black from 23.3.0 to 23.7.0 (#2256) by @dependabot - Remove py3.4 from tox.ini (#2248) by @sentrivana ## 1.28.1 ### Various fixes & improvements - Redis: Add support for redis.asyncio (#1933) by @Zhenay - Make sure each task that is started by Celery Beat has its own trace. (#2249) by @antonpirker - Add Sampling Decision to Trace Envelope Header (#2239) by @antonpirker - Do not add trace headers (`sentry-trace` and `baggage`) to HTTP requests to Sentry (#2240) by @antonpirker - Prevent adding `sentry-trace` header multiple times (#2235) by @antonpirker - Skip distributions with incomplete metadata (#2231) by @rominf - Remove stale.yml (#2245) by @hubertdeng123 - Django: Fix 404 Handler handler being labeled as "generic ASGI request" (#1277) by @BeryJu ## 1.28.0 ### Various fixes & improvements - Add support for cron jobs in ARQ integration (#2088) by @lewazo - Backpressure handling prototype (#2189) by @sl0thentr0py - Add "replay" context to event payload (#2234) by @antonpirker - Update test Django app to be compatible for Django 4.x (#1794) by @DilLip-Chowdary-Codes ## 1.27.1 ### Various fixes & improvements - Add Starlette/FastAPI template tag for adding Sentry tracing information (#2225) by @antonpirker - By adding `{{ sentry_trace_meta }}` to your Starlette/FastAPI Jinja2 templates we will include Sentry trace information as a meta tag in the rendered HTML to allow your frontend to pick up and continue the trace started in the backend. - Fixed generation of baggage when a DSC is already in propagation context (#2232) by @antonpirker - Handle explicitly passing `None` for `trace_configs` in `aiohttp` (#2230) by @Harmon758 - Support newest Starlette versions (#2227) by @antonpirker ## 1.27.0 ### Various fixes & improvements - Support for SQLAlchemy 2.0 (#2200) by @antonpirker - Add instrumentation of `aiohttp` client requests (#1761) by @md384 - Add Django template tag for adding Sentry tracing information (#2222) by @antonpirker - By adding `{{ sentry_trace_meta }}` to your Django templates we will include Sentry trace information as a meta tag in the rendered HTML to allow your frontend to pick up and continue the trace started in the backend. - Update Flask HTML meta helper (#2203) by @antonpirker - Take trace ID always from propagation context (#2209) by @antonpirker - Fix trace context in event payload (#2205) by @antonpirker - Use new top level API in `trace_propagation_meta` (#2202) by @antonpirker - Do not overwrite existing baggage on outgoing requests (#2191, #2214) by @sentrivana - Set the transaction/span status from an OTel span (#2115) by @daniil-konovalenko - Fix propagation of OTel `NonRecordingSpan` (#2187) by @hartungstenio - Fix `TaskLockedException` handling in Huey integration (#2206) by @Zhenay - Add message format configuration arguments to Loguru integration (#2208) by @Gwill - Profiling: Add client reports for profiles (#2207) by @Zylphrex - CI: Fix CI (#2220) by @antonpirker - Dependencies: Bump `checkouts/data-schemas` from `7fdde87` to `1b85152` (#2218) by @dependabot - Dependencies: Bump `mypy` from 1.3.0 to 1.4.1 (#2194) by @dependabot - Docs: Change API doc theme (#2210) by @sentrivana - Docs: Allow (some) autocompletion for top-level API (#2213) by @sentrivana - Docs: Revert autocomplete hack (#2224) by @sentrivana ## 1.26.0 ### Various fixes & improvements - Tracing without performance (#2136) by @antonpirker - Load tracing information from environment (#2176) by @antonpirker - Auto-enable HTTPX integration if HTTPX installed (#2177) by @sentrivana - Support for SOCKS proxies (#1050) by @Roguelazer - Wrap `parse_url` calls in `capture_internal_exceptions` (#2162) by @sentrivana - Run 2.7 tests in CI again (#2181) by @sentrivana - Crons: Do not support sub-minute cron intervals (#2172) by @antonpirker - Profile: Add function name to profiler frame cache (#2164) by @Zylphrex - Dependencies: bump checkouts/data-schemas from `0ed3357` to `7fdde87` (#2165) by @dependabot - Update changelog (#2163) by @sentrivana ## 1.25.1 ### Django update (ongoing) Collections of improvements to our Django integration. By: @mgaligniana (#1773) ### Various fixes & improvements - Fix `parse_url` (#2161) by @sentrivana and @antonpirker Our URL sanitization used in multiple integrations broke with the recent Python security update. If you started seeing `ValueError`s with `"'Filtered' does not appear to be an IPv4 or IPv6 address"`, this release fixes that. See [the original issue](https://github.com/getsentry/sentry-python/issues/2160) for more context. - Better version parsing in integrations (#2152) by @antonpirker We now properly support all integration versions that conform to [PEP 440](https://peps.python.org/pep-0440/). This replaces our naïve version parsing that wouldn't accept versions such as `2.0.0rc1` or `2.0.5.post1`. - Align HTTP status code as span data field `http.response.status_code` (#2113) by @antonpirker - Do not encode cached value to determine size (#2143) by @sentrivana - Fix using `unittest.mock` whenever available (#1926) by @mgorny - Fix 2.7 `common` tests (#2145) by @sentrivana - Bump `actions/stale` from `6` to `8` (#1978) by @dependabot - Bump `black` from `22.12.0` to `23.3.0` (#1984) by @dependabot - Bump `mypy` from `1.2.0` to `1.3.0` (#2110) by @dependabot - Bump `sphinx` from `5.3.0` to `7.0.1` (#2112) by @dependabot ## 1.25.0 ### Various fixes & improvements - Support urllib3>=2.0.0 (#2148) by @asottile-sentry We're now supporting urllib3's new major version, 2.0.0. If you encounter issues (e.g. some of your dependencies not supporting the new urllib3 version yet) you might consider pinning the urllib3 version to `<2.0.0` manually in your project. Check out the [the urllib3 migration guide](https://urllib3.readthedocs.io/en/latest/v2-migration-guide.html#migrating-as-an-application-developer) for details. - Auto-retry tests on failure (#2134) by @sentrivana - Correct `importlib.metadata` check in `test_modules` (#2149) by @asottile-sentry - Fix distribution name normalization (PEP-0503) (#2144) by @rominf - Fix `functions_to_trace` typing (#2141) by @rcmarron ## 1.24.0 ### Various fixes & improvements - **New:** Celery Beat exclude tasks option (#2130) by @antonpirker You can exclude Celery Beat tasks from being auto-instrumented. To do this, add a list of tasks you want to exclude as option `exclude_beat_tasks` when creating `CeleryIntegration`. The list can contain simple strings with the full task name, as specified in the Celery Beat schedule, or regular expressions to match multiple tasks. For more information, see the documentation for [Crons](https://docs.sentry.io/platforms/python/guides/celery/crons/) for more information. Usage: ```python exclude_beat_tasks = [ "some-task-a", "payment-check-.*", ] sentry_sdk.init( dsn='___PUBLIC_DSN___', integrations=[ CeleryIntegration( monitor_beat_tasks=True, exclude_beat_tasks=exclude_beat_tasks, ), ], ) ``` In this example the task `some-task-a` and all tasks with a name starting with `payment-check-` will be ignored. - **New:** Add support for **ExceptionGroups** (#2025) by @antonpirker _Note:_ If running Self-Hosted Sentry, you should wait to adopt this SDK update until after updating to the 23.6.0 (est. June 2023) release of Sentry. Updating early will not break anything, but you will not get the full benefit of the Exception Groups improvements to issue grouping that were added to the Sentry backend. - Prefer `importlib.metadata` over `pkg_resources` if available (#2081) by @sentrivana - Work with a copy of request, vars in the event (#2125) by @sentrivana - Pinned version of dependency that broke the build (#2133) by @antonpirker ## 1.23.1 ### Various fixes & improvements - Disable Django Cache spans by default. (#2120) by @antonpirker ## 1.23.0 ### Various fixes & improvements - **New:** Add `loguru` integration (#1994) by @PerchunPak Check [the documentation](https://docs.sentry.io/platforms/python/configuration/integrations/loguru/) for more information. Usage: ```python from loguru import logger import sentry_sdk from sentry_sdk.integrations.loguru import LoguruIntegration sentry_sdk.init( dsn="___PUBLIC_DSN___", integrations=[ LoguruIntegration(), ], ) logger.debug("I am ignored") logger.info("I am a breadcrumb") logger.error("I am an event", extra=dict(bar=43)) logger.exception("An exception happened") ``` - An error event with the message `"I am an event"` will be created. - `"I am a breadcrumb"` will be attached as a breadcrumb to that event. - `bar` will end up in the `extra` attributes of that event. - `"An exception happened"` will send the current exception from `sys.exc_info()` with the stack trace to Sentry. If there's no exception, the current stack will be attached. - The debug message `"I am ignored"` will not be captured by Sentry. To capture it, set `level` to `DEBUG` or lower in `LoguruIntegration`. - Do not truncate request body if `request_bodies` is `"always"` (#2092) by @sentrivana - Fixed Celery headers for Beat auto-instrumentation (#2102) by @antonpirker - Add `db.operation` to Redis and MongoDB spans (#2089) by @antonpirker - Make sure we're importing `redis` the library (#2106) by @sentrivana - Add `include_source_context` option (#2020) by @farhat-nawaz and @sentrivana - Import `Markup` from `markupsafe` (#2047) by @rco-ableton - Fix `__qualname__` missing attribute in asyncio integration (#2105) by @sl0thentr0py - Remove relay extension from AWS Layer (#2068) by @sl0thentr0py - Add a note about `pip freeze` to the bug template (#2103) by @sentrivana ## 1.22.2 ### Various fixes & improvements - Fix: Django caching spans when using keyword arguments (#2086) by @antonpirker - Fix: Duration in Celery Beat tasks monitoring (#2087) by @antonpirker - Fix: Docstrings of SPANDATA (#2084) by @antonpirker ## 1.22.1 ### Various fixes & improvements - Fix: Handle a list of keys (not just a single key) in Django cache spans (#2082) by @antonpirker ## 1.22.0 ### Various fixes & improvements - Add `cache.hit` and `cache.item_size` to Django (#2057) by @antonpirker _Note:_ This will add spans for all requests to the caches configured in Django. This will probably add some overhead to your server an also add multiple spans to your performance waterfall diagrams. If you do not want this, you can disable this feature in the DjangoIntegration: ```python sentry_sdk.init( dsn="...", integrations=[ DjangoIntegration(cache_spans=False), ] ) ``` - Use `http.method` instead of `method` (#2054) by @AbhiPrasad - Handle non-int `exc.status_code` in Starlette (#2075) by @sentrivana - Handle SQLAlchemy `engine.name` being bytes (#2074) by @sentrivana - Fix `KeyError` in `capture_checkin` if SDK is not initialized (#2073) by @antonpirker - Use `functools.wrap` for `ThreadingIntegration` patches to fix attributes (#2080) by @EpicWink - Pin `urllib3` to <2.0.0 for now (#2069) by @sl0thentr0py ## 1.21.1 ### Various fixes & improvements - Do not send monitor_config when unset (#2058) by @evanpurkhiser - Add `db.system` span data (#2040, #2042) by @antonpirker - Fix memory leak in profiling (#2049) by @Zylphrex - Fix crash loop when returning none in before_send (#2045) by @sentrivana ## 1.21.0 ### Various fixes & improvements - Better handling of redis span/breadcrumb data (#2033) by @antonpirker _Note:_ With this release we will limit the description of redis db spans and the data in breadcrumbs represting redis db operations to 1024 characters. This can can lead to truncated data. If you do not want this there is a new parameter `max_data_size` in `RedisIntegration`. You can set this to `None` for disabling trimming. Example for **disabling** trimming of redis commands in spans or breadcrumbs: ```python sentry_sdk.init( integrations=[ RedisIntegration(max_data_size=None), ] ) ``` Example for custom trim size of redis commands in spans or breadcrumbs: ```python sentry_sdk.init( integrations=[ RedisIntegration(max_data_size=50), ] )` ``` - Add `db.system` to redis and SQLAlchemy db spans (#2037, #2038, #2039) (#2037) by @AbhiPrasad - Upgraded linting tooling (#2026) by @antonpirker - Made code more resilient. (#2031) by @antonpirker ## 1.20.0 ### Various fixes & improvements - Send all events to /envelope endpoint when tracing is enabled (#2009) by @antonpirker _Note:_ If you’re self-hosting Sentry 9, you need to stay in the previous version of the SDK or update your self-hosted to at least 20.6.0 - Profiling: Remove profile context from SDK (#2013) by @Zylphrex - Profiling: Additionl performance improvements to the profiler (#1991) by @Zylphrex - Fix: Celery Beat monitoring without restarting the Beat process (#2001) by @antonpirker - Fix: Using the Codecov uploader instead of deprecated python package (#2011) by @antonpirker - Fix: Support for Quart (#2003)` (#2003) by @antonpirker ## 1.19.1 ### Various fixes & improvements - Make auto monitoring beat update support Celery 4 and 5 (#1989) by @antonpirker ## 1.19.0 ### Various fixes & improvements - **New:** [Celery Beat](https://docs.celeryq.dev/en/stable/userguide/periodic-tasks.html) auto monitoring (#1967) by @antonpirker The CeleryIntegration can now also monitor your Celery Beat scheduled tasks automatically using the new [Crons](https://blog.sentry.io/2023/01/04/cron-job-monitoring-beta-because-scheduled-jobs-fail-too/) feature of Sentry. To learn more see our [Celery Beat Auto Discovery](https://docs.sentry.io/platforms/python/guides/celery/crons/) documentation. Usage: ```python from celery import Celery, signals from celery.schedules import crontab import sentry_sdk from sentry_sdk.integrations.celery import CeleryIntegration app = Celery('tasks', broker='...') app.conf.beat_schedule = { 'set-in-beat-schedule': { 'task': 'tasks.some_important_task', 'schedule': crontab(...), }, } @signals.celeryd_init.connect def init_sentry(**kwargs): sentry_sdk.init( dsn='...', integrations=[CeleryIntegration(monitor_beat_tasks=True)], # 👈 here environment="local.dev.grace", release="v1.0", ) ``` This will auto detect all schedules tasks in your `beat_schedule` and will monitor them with Sentry [Crons](https://blog.sentry.io/2023/01/04/cron-job-monitoring-beta-because-scheduled-jobs-fail-too/). - **New:** [gRPC](https://grpc.io/) integration (#1911) by @hossein-raeisi The [gRPC](https://grpc.io/) integration instruments all incoming requests and outgoing unary-unary, unary-stream grpc requests using grpcio channels. To learn more see our [gRPC Integration](https://docs.sentry.io/platforms/python/configuration/integrations/grpc/) documentation. On the server: ```python import grpc from sentry_sdk.integrations.grpc.server import ServerInterceptor server = grpc.server( thread_pool=..., interceptors=[ServerInterceptor()], ) ``` On the client: ```python import grpc from sentry_sdk.integrations.grpc.client import ClientInterceptor with grpc.insecure_channel("example.com:12345") as channel: channel = grpc.intercept_channel(channel, *[ClientInterceptor()]) ``` - **New:** socket integration (#1911) by @hossein-raeisi Use this integration to create spans for DNS resolves (`socket.getaddrinfo()`) and connection creations (`socket.create_connection()`). To learn more see our [Socket Integration](https://docs.sentry.io/platforms/python/configuration/integrations/socket/) documentation. Usage: ```python import sentry_sdk from sentry_sdk.integrations.socket import SocketIntegration sentry_sdk.init( dsn="___PUBLIC_DSN___", integrations=[ SocketIntegration(), ], ) ``` - Fix: Do not trim span descriptions. (#1983) by @antonpirker ## 1.18.0 ### Various fixes & improvements - **New:** Implement `EventScrubber` (#1943) by @sl0thentr0py To learn more see our [Scrubbing Sensitive Data](https://docs.sentry.io/platforms/python/data-management/sensitive-data/#event-scrubber) documentation. Add a new `EventScrubber` class that scrubs certain potentially sensitive interfaces with a `DEFAULT_DENYLIST`. The default scrubber is automatically run if `send_default_pii = False`: ```python import sentry_sdk from sentry_sdk.scrubber import EventScrubber sentry_sdk.init( # ... send_default_pii=False, event_scrubber=EventScrubber(), # this is set by default ) ``` You can also pass in a custom `denylist` to the `EventScrubber` class and filter additional fields that you want. ```python from sentry_sdk.scrubber import EventScrubber, DEFAULT_DENYLIST # custom denylist denylist = DEFAULT_DENYLIST + ["my_sensitive_var"] sentry_sdk.init( # ... send_default_pii=False, event_scrubber=EventScrubber(denylist=denylist), ) ``` - **New:** Added new `functions_to_trace` option for central way of performance instrumentation (#1960) by @antonpirker To learn more see our [Tracing Options](https://docs.sentry.io/platforms/python/configuration/options/#functions-to-trace) documentation. An optional list of functions that should be set up for performance monitoring. For each function in the list, a span will be created when the function is executed. ```python functions_to_trace = [ {"qualified_name": "tests.test_basics._hello_world_counter"}, {"qualified_name": "time.sleep"}, {"qualified_name": "collections.Counter.most_common"}, ] sentry_sdk.init( # ... traces_sample_rate=1.0, functions_to_trace=functions_to_trace, ) ``` - Updated denylist to include other widely used cookies/headers (#1972) by @antonpirker - Forward all `sentry-` baggage items (#1970) by @cleptric - Update OSS licensing (#1973) by @antonpirker - Profiling: Handle non frame types in profiler (#1965) by @Zylphrex - Tests: Bad arq dependency in tests (#1966) by @Zylphrex - Better naming (#1962) by @antonpirker ## 1.17.0 ### Various fixes & improvements - **New:** Monitor Celery Beat tasks with Sentry [Cron Monitoring](https://docs.sentry.io/product/crons/). With this feature you can make sure that your Celery beat tasks run at the right time and see if they where successful or not. > **Warning** > Cron Monitoring is currently in beta. Beta features are still in-progress and may have bugs. We recognize the irony. > If you have any questions or feedback, please email us at crons-feedback@sentry.io, reach out via Discord (#cronjobs), or open an issue. Usage: ```python # File: tasks.py from celery import Celery, signals from celery.schedules import crontab import sentry_sdk from sentry_sdk.crons import monitor from sentry_sdk.integrations.celery import CeleryIntegration # 1. Setup your Celery beat configuration app = Celery('mytasks', broker='redis://localhost:6379/0') app.conf.beat_schedule = { 'set-in-beat-schedule': { 'task': 'tasks.tell_the_world', 'schedule': crontab(hour='10', minute='15'), 'args': ("in beat_schedule set", ), }, } # 2. Initialize Sentry either in `celeryd_init` or `beat_init` signal. #@signals.celeryd_init.connect @signals.beat_init.connect def init_sentry(**kwargs): sentry_sdk.init( dsn='...', integrations=[CeleryIntegration()], environment="local.dev.grace", release="v1.0.7-a1", ) # 3. Link your Celery task to a Sentry Cron Monitor @app.task @monitor(monitor_slug='3b861d62-ff82-4aa0-9cd6-b2b6403bd0cf') def tell_the_world(msg): print(msg) ``` - **New:** Add decorator for Sentry tracing (#1089) by @ynouri This allows you to use a decorator to setup custom performance instrumentation. To learn more see [Custom Instrumentation](https://docs.sentry.io/platforms/python/performance/instrumentation/custom-instrumentation/). Usage: Just add the new decorator to your function, and a span will be created for it: ```python import sentry_sdk @sentry_sdk.trace def my_complex_function(): # do stuff ... ``` - Make Django signals tracing optional (#1929) by @antonpirker See the [Django Guide](https://docs.sentry.io/platforms/python/guides/django) to learn more. - Deprecated `with_locals` in favor of `include_local_variables` (#1924) by @antonpirker - Added top level API to get current span (#1954) by @antonpirker - Profiling: Add profiler options to init (#1947) by @Zylphrex - Profiling: Set active thread id for quart (#1830) by @Zylphrex - Fix: Update `get_json` function call for werkzeug 2.1.0+ (#1939) by @michielderoos - Fix: Returning the tasks result. (#1931) by @antonpirker - Fix: Rename MYPY to TYPE_CHECKING (#1934) by @untitaker - Fix: Fix type annotation for ignore_errors in sentry_sdk.init() (#1928) by @tiangolo - Tests: Start a real http server instead of mocking libs (#1938) by @antonpirker ## 1.16.0 ### Various fixes & improvements - **New:** Add [arq](https://arq-docs.helpmanual.io/) Integration (#1872) by @Zhenay This integration will create performance spans when arq jobs will be enqueued and when they will be run. It will also capture errors in jobs and will link them to the performance spans. Usage: ```python import asyncio from httpx import AsyncClient from arq import create_pool from arq.connections import RedisSettings import sentry_sdk from sentry_sdk.integrations.arq import ArqIntegration from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT sentry_sdk.init( dsn="...", integrations=[ArqIntegration()], ) async def download_content(ctx, url): session: AsyncClient = ctx['session'] response = await session.get(url) print(f'{url}: {response.text:.80}...') return len(response.text) async def startup(ctx): ctx['session'] = AsyncClient() async def shutdown(ctx): await ctx['session'].aclose() async def main(): with sentry_sdk.start_transaction(name="testing_arq_tasks", source=TRANSACTION_SOURCE_COMPONENT): redis = await create_pool(RedisSettings()) for url in ('https://facebook.com', 'https://microsoft.com', 'https://github.com', "asdf" ): await redis.enqueue_job('download_content', url) class WorkerSettings: functions = [download_content] on_startup = startup on_shutdown = shutdown if __name__ == '__main__': asyncio.run(main()) ``` - Update of [Falcon](https://falconframework.org/) Integration (#1733) by @bartolootrit - Adding [Cloud Resource Context](https://docs.sentry.io/platforms/python/configuration/integrations/cloudresourcecontext/) integration (#1882) by @antonpirker - Profiling: Use the transaction timestamps to anchor the profile (#1898) by @Zylphrex - Profiling: Add debug logs to profiling (#1883) by @Zylphrex - Profiling: Start profiler thread lazily (#1903) by @Zylphrex - Fixed checks for structured http data (#1905) by @antonpirker - Make `set_measurement` public api and remove experimental status (#1909) by @sl0thentr0py - Add `trace_propagation_targets` option (#1916) by @antonpirker - Add `enable_tracing` to default traces_sample_rate to 1.0 (#1900) by @sl0thentr0py - Remove deprecated `tracestate` (#1907) by @sl0thentr0py - Sanitize URLs in Span description and breadcrumbs (#1876) by @antonpirker - Mechanism should default to true unless set explicitly (#1889) by @sl0thentr0py - Better setting of in-app in stack frames (#1894) by @antonpirker - Add workflow to test gevent (#1870) by @Zylphrex - Updated outdated HTTPX test matrix (#1917) by @antonpirker - Switch to MIT license (#1908) by @cleptric ## 1.15.0 ### Various fixes & improvements - New: Add [Huey](https://huey.readthedocs.io/en/latest/) Integration (#1555) by @Zhenay This integration will create performance spans when Huey tasks will be enqueued and when they will be executed. Usage: Task definition in `demo.py`: ```python import time from huey import SqliteHuey, crontab import sentry_sdk from sentry_sdk.integrations.huey import HueyIntegration sentry_sdk.init( dsn="...", integrations=[ HueyIntegration(), ], traces_sample_rate=1.0, ) huey = SqliteHuey(filename='/tmp/demo.db') @huey.task() def add_numbers(a, b): return a + b ``` Running the tasks in `run.py`: ```python from demo import add_numbers, flaky_task, nightly_backup import sentry_sdk from sentry_sdk.integrations.huey import HueyIntegration from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT, Transaction def main(): sentry_sdk.init( dsn="...", integrations=[ HueyIntegration(), ], traces_sample_rate=1.0, ) with sentry_sdk.start_transaction(name="testing_huey_tasks", source=TRANSACTION_SOURCE_COMPONENT): r = add_numbers(1, 2) if __name__ == "__main__": main() ``` - Profiling: Do not send single sample profiles (#1879) by @Zylphrex - Profiling: Add additional test coverage for profiler (#1877) by @Zylphrex - Profiling: Always use builtin time.sleep (#1869) by @Zylphrex - Profiling: Defaul in_app decision to None (#1855) by @Zylphrex - Profiling: Remove use of threading.Event (#1864) by @Zylphrex - Profiling: Enable profiling on all transactions (#1797) by @Zylphrex - FastAPI: Fix check for Starlette in FastAPI integration (#1868) by @antonpirker - Flask: Do not overwrite default for username with email address in FlaskIntegration (#1873) by @homeworkprod - Tests: Add py3.11 to test-common (#1871) by @Zylphrex - Fix: Don't log whole event in before_send / event_processor drops (#1863) by @sl0thentr0py ## 1.14.0 ### Various fixes & improvements - Add `before_send_transaction` (#1840) by @antonpirker Adds a hook (similar to `before_send`) that is called for all transaction events (performance releated data). Usage: ```python import sentry_sdk def strip_sensitive_data(event, hint): # modify event here (or return `None` if you want to drop the event entirely) return event sentry_sdk.init( # ... before_send_transaction=strip_sensitive_data, ) ``` See also: https://docs.sentry.io/platforms/python/configuration/filtering/#using-platformidentifier-namebefore-send-transaction- - Django: Always remove values of Django session related cookies. (#1842) by @antonpirker - Profiling: Enable profiling for ASGI frameworks (#1824) by @Zylphrex - Profiling: Better gevent support (#1822) by @Zylphrex - Profiling: Add profile context to transaction (#1860) by @Zylphrex - Profiling: Use co_qualname in python 3.11 (#1831) by @Zylphrex - OpenTelemetry: fix Use dict for sentry-trace context instead of tuple (#1847) by @AbhiPrasad - OpenTelemetry: fix extra dependency (#1825) by @bernardotorres - OpenTelemetry: fix NoOpSpan updates scope (#1834) by @Zylphrex - OpenTelemetry: Make sure to noop when there is no DSN (#1852) by @antonpirker - FastAPI: Fix middleware being patched multiple times (#1841) by @JohnnyDeuss - Starlette: Avoid import of pkg_resource with Starlette integration (#1836) by @mgu - Removed code coverage target (#1862) by @antonpirker ## 1.13.0 ### Various fixes & improvements - Add Starlite integration (#1748) by @gazorby Adding support for the [Starlite](https://starlite-api.github.io/starlite/1.48/) framework. Unhandled errors are captured. Performance spans for Starlite middleware are also captured. Thanks @gazorby for the great work! Usage: ```python from starlite import Starlite, get import sentry_sdk from sentry_sdk.integrations.starlite import StarliteIntegration sentry_sdk.init( dsn="...", traces_sample_rate=1.0, integrations=[ StarliteIntegration(), ], ) @get("/") def hello_world() -> dict[str, str]: """Keeping the tradition alive with hello world.""" bla = 1/0 # causing an error return {"hello": "world"} app = Starlite(route_handlers=[hello_world]) ``` - Profiling: Remove sample buffer from profiler (#1791) by @Zylphrex - Profiling: Performance tweaks to profile sampler (#1789) by @Zylphrex - Add span for Django SimpleTemplateResponse rendering (#1818) by @chdsbd - Use @wraps for Django Signal receivers (#1815) by @meanmail - Add enqueued_at and started_at to rq job extra (#1024) by @kruvasyan - Remove sanic v22 pin (#1819) by @sl0thentr0py - Add support for `byterray` and `memoryview` built-in types (#1833) by @Tarty - Handle `"rc"` in SQLAlchemy version. (#1812) by @peterschutt - Doc: Use .venv (not .env) as a virtual env location in CONTRIBUTING.md (#1790) by @tonyo - Auto publish to internal pypi on release (#1823) by @asottile-sentry - Added Python 3.11 to test suite (#1795) by @antonpirker - Update test/linting dependencies (#1801) by @antonpirker - Deps: bump sphinx from 5.2.3 to 5.3.0 (#1686) by @dependabot ## 1.12.1 ### Various fixes & improvements - Link errors to OTel spans (#1787) by @antonpirker ## 1.12.0 ### Basic OTel support This adds support to automatically integrate OpenTelemetry performance tracing with Sentry. See the documentation on how to set it up: https://docs.sentry.io/platforms/python/performance/instrumentation/opentelemetry/ Give it a try and let us know if you have any feedback or problems with using it. By: @antonpirker (#1772, #1766, #1765) ### Various fixes & improvements - Tox Cleanup (#1749) by @antonpirker - CI: Fix Github action checks (#1780) by @Zylphrex - Profiling: Introduce active thread id on scope (#1764) by @Zylphrex - Profiling: Eagerly hash stack for profiles (#1755) by @Zylphrex - Profiling: Resolve inherited method class names (#1756) by @Zylphrex ## 1.11.1 ### Various fixes & improvements - Move set_transaction_name out of event processor in fastapi/starlette (#1751) by @sl0thentr0py - Expose proxy_headers as top level config and use in ProxyManager: https://docs.sentry.io/platforms/python/configuration/options/#proxy-headers (#1746) by @sl0thentr0py ## 1.11.0 ### Various fixes & improvements - Fix signals problem on sentry.io (#1732) by @antonpirker - Fix reading FastAPI request body twice. (#1724) by @antonpirker - ref(profiling): Do not error if already setup (#1731) by @Zylphrex - ref(profiling): Use sleep scheduler by default (#1729) by @Zylphrex - feat(profiling): Extract more frame info (#1702) by @Zylphrex - Update actions/upload-artifact to v3.1.1 (#1718) by @mattgauntseo-sentry - Performance optimizations (#1725) by @antonpirker - feat(pymongo): add PyMongo integration (#1590) by @Agalin - Move relay to port 5333 to avoid collisions (#1716) by @sl0thentr0py - fix(utils): strip_string() checks text length counting bytes not chars (#1711) by @mgaligniana - chore: remove jira workflow (#1707) by @vladanpaunovic - build(deps): bump checkouts/data-schemas from `a214fbc` to `20ff3b9` (#1703) by @dependabot - perf(profiling): Tune the sample profile generation code for performance (#1694) by @Zylphrex ## 1.10.1 ### Various fixes & improvements - Bug fixes for FastAPI and Sentry SDK 1.10.0 (#1699) by @antonpirker - The wrapped receive() did not return anything. (#1698) by @antonpirker ## 1.10.0 ### Various fixes & improvements - Unified naming for span ops (#1661) by @antonpirker We have unified the strings of our span operations. See https://develop.sentry.dev/sdk/performance/span-operations/ **WARNING**: If you have Sentry Dashboards or Sentry Discover queries that use `transaction.op` in their fields, conditions, aggregates or columns this change could potentially break your Dashboards/Discover setup. Here is a list of the changes we made to the `op`s. Please adjust your dashboards and Discover queries accordingly: | Old operation (`op`) | New Operation (`op`) | | ------------------------ | ---------------------- | | `asgi.server` | `http.server` | | `aws.request` | `http.client` | | `aws.request.stream` | `http.client.stream` | | `celery.submit` | `queue.submit.celery` | | `celery.task` | `queue.task.celery` | | `django.middleware` | `middleware.django` | | `django.signals` | `event.django` | | `django.template.render` | `template.render` | | `django.view` | `view.render` | | `http` | `http.client` | | `redis` | `db.redis` | | `rq.task` | `queue.task.rq` | | `serverless.function` | `function.aws` | | `serverless.function` | `function.gcp` | | `starlette.middleware` | `middleware.starlette` | - Include framework in SDK name (#1662) by @antonpirker - Asyncio integration (#1671) by @antonpirker - Add exception handling to Asyncio Integration (#1695) by @antonpirker - Fix asyncio task factory (#1689) by @antonpirker - Have instrumentation for ASGI middleware receive/send callbacks. (#1673) by @antonpirker - Use Django internal ASGI handling from Channels version 4.0.0. (#1688) by @antonpirker - fix(integrations): Fix http putrequest when url is None (#1693) by @MattFlower - build(deps): bump checkouts/data-schemas from `f0a57f2` to `a214fbc` (#1627) by @dependabot - build(deps): bump flake8-bugbear from 22.9.11 to 22.9.23 (#1637) by @dependabot - build(deps): bump sphinx from 5.1.1 to 5.2.3 (#1653) by @dependabot - build(deps): bump actions/stale from 5 to 6 (#1638) by @dependabot - build(deps): bump black from 22.8.0 to 22.10.0 (#1670) by @dependabot - Remove unused node setup from ci. (#1681) by @antonpirker - Check for Decimal is in_valid_sample_rate (#1672) by @Arvind2222 - Add session for aiohttp integration (#1605) by @denys-pidlisnyi - feat(profiling): Extract qualified name for each frame (#1669) by @Zylphrex - feat(profiling): Attach thread metadata to profiles (#1660) by @Zylphrex - ref(profiling): Rename profiling frame keys (#1680) by @Zylphrex - fix(profiling): get_frame_name only look at arguments (#1684) by @Zylphrex - fix(profiling): Need to sample profile correctly (#1679) by @Zylphrex - fix(profiling): Race condition spawning multiple profiling threads (#1676) by @Zylphrex - tests(profiling): Add basic profiling tests (#1677) by @Zylphrex - tests(profiling): Add tests for thread schedulers (#1683) by @Zylphrex ## 1.9.10 ### Various fixes & improvements - Use content-length header in ASGI instead of reading request body (#1646, #1631, #1595, #1573) (#1649) by @antonpirker - Added newer Celery versions to test suite (#1655) by @antonpirker - Django 4.x support (#1632) by @antonpirker - Cancel old CI runs when new one is started. (#1651) by @antonpirker - Increase max string size for desc (#1647) by @k-fish - Pin Sanic version for CI (#1650) by @antonpirker - Fix for partial signals in old Django and old Python versions. (#1641) by @antonpirker - Convert profile output to the sample format (#1611) by @phacops - Dynamically adjust profiler sleep time (#1634) by @Zylphrex ## 1.9.9 ### Django update (ongoing) - Instrument Django Signals so they show up in "Performance" view (#1526) by @BeryJu - include other Django enhancements brought up by the community ### Various fixes & improvements - fix(profiling): Profiler mode type hints (#1633) by @Zylphrex - New ASGIMiddleware tests (#1600) by @antonpirker - build(deps): bump mypy from 0.961 to 0.971 (#1517) by @dependabot - build(deps): bump black from 22.3.0 to 22.8.0 (#1596) by @dependabot - build(deps): bump sphinx from 5.0.2 to 5.1.1 (#1524) by @dependabot - ref: upgrade linters to flake8 5.x (#1610) by @asottile-sentry - feat(profiling): Introduce different profiler schedulers (#1616) by @Zylphrex - fix(profiling): Check transaction sampled status before profiling (#1624) by @Zylphrex - Wrap Baggage ser/deser in capture_internal_exceptions (#1630) by @sl0thentr0py - Faster Tests (DjangoCon) (#1602) by @antonpirker - feat(profiling): Add support for profiles_sample_rate (#1613) by @Zylphrex - feat(profiling): Support for multithreaded profiles (#1570) by @Zylphrex ## 1.9.8 ### Various fixes & improvements - Baggage creation for head of trace (#1589) by @sl0thentr0py - The SDK now also generates new baggage entries for dynamic sampling when it is the first (head) SDK in the pipeline. ## 1.9.7 ### Various fixes & improvements - Let SentryAsgiMiddleware work with Starlette and FastAPI integrations (#1594) by @antonpirker **Note:** The last version 1.9.6 introduced a breaking change where projects that used Starlette or FastAPI and had manually setup `SentryAsgiMiddleware` could not start. This versions fixes this behaviour. With this version if you have a manual `SentryAsgiMiddleware` setup and are using Starlette or FastAPI everything just works out of the box. Sorry for any inconveniences the last version might have brought to you. We can do better and in the future we will do our best to not break your code again. ## 1.9.6 ### Various fixes & improvements - Auto-enable Starlette and FastAPI (#1533) by @antonpirker - Add more version constraints (#1574) by @isra17 - Fix typo in starlette attribute check (#1566) by @sl0thentr0py ## 1.9.5 ### Various fixes & improvements - fix(redis): import redis pipeline using full path (#1565) by @olksdr - Fix side effects for parallel tests (#1554) by @sl0thentr0py ## 1.9.4 ### Various fixes & improvements - Remove TRANSACTION_SOURCE_UNKNOWN and default to CUSTOM (#1558) by @sl0thentr0py - feat(redis): Add instrumentation for redis pipeline (#1543) by @jjbayer - Handle no release when uploading profiles (#1548) by @szokeasaurusrex ## 1.9.3 ### Various fixes & improvements - Wrap StarletteRequestExtractor in capture_internal_exceptions (#1551) by @sl0thentr0py ## 1.9.2 ### Various fixes & improvements - chore: remove quotes (#1545) by @vladanpaunovic ## 1.9.1 ### Various fixes & improvements - Fix FastAPI issues (#1532) ( #1514) (#1532) by @antonpirker - Add deprecation warning for 3.4, 3.5 (#1541) by @sl0thentr0py - Fast tests (#1504) by @antonpirker - Replace Travis CI badge with GitHub Actions badge (#1538) by @153957 - chore(deps): update urllib3 minimum version with environment markers (#1312) by @miketheman - Update Flask and Quart integrations (#1520) by @pgjones - chore: Remove ancient examples from tracing prototype (#1528) by @sl0thentr0py - fix(django): Send correct "url" transaction source if Django resolver fails to resolve (#1525) by @sl0thentr0py ## 1.9.0 ### Various fixes & improvements - feat(profiler): Add experimental profiler under experiments.enable_profiling (#1481) by @szokeasaurusrex - Fixed problem with broken response and python-multipart (#1516) by @antonpirker ## 1.8.0 ### Various fixes & improvements - feat(starlette): add Starlette integration (#1441) by @sl0thentr0py **Important:** Remove manual usage of `SentryAsgiMiddleware`! This is now done by the Starlette integration. Usage: ```python from starlette.applications import Starlette from sentry_sdk.integrations.starlette import StarletteIntegration sentry_sdk.init( dsn="...", integrations=[StarletteIntegration()], ) app = Starlette(debug=True, routes=[...]) ``` - feat(fastapi): add FastAPI integration (#829) by @antonpirker **Important:** Remove manual usage of `SentryAsgiMiddleware`! This is now done by the FastAPI integration. Usage: ```python from fastapi import FastAPI from sentry_sdk.integrations.starlette import StarletteIntegration from sentry_sdk.integrations.fastapi import FastApiIntegration sentry_sdk.init( dsn="...", integrations=[StarletteIntegration(), FastApiIntegration()], ) app = FastAPI() ``` Yes, you have to add both, the `StarletteIntegration` **AND** the `FastApiIntegration`! - fix: avoid sending empty Baggage header (#1507) by @intgr - fix: properly freeze Baggage object (#1508) by @intgr - docs: fix simple typo, collecter | collector (#1505) by @timgates42 ## 1.7.2 ### Various fixes & improvements - feat(transactions): Transaction Source (#1490) by @antonpirker - Removed (unused) sentry_timestamp header (#1494) by @antonpirker ## 1.7.1 ### Various fixes & improvements - Skip malformed baggage items (#1491) by @robyoung ## 1.7.0 ### Various fixes & improvements - feat(tracing): Dynamic Sampling Context / Baggage continuation (#1485) by @sl0thentr0py The SDK now propagates the [W3C Baggage Header](https://www.w3.org/TR/baggage/) from incoming transactions to outgoing requests. It also extracts Sentry specific [sampling information](https://develop.sentry.dev/sdk/performance/dynamic-sampling-context/) and adds it to the transaction headers to enable Dynamic Sampling in the product. ## 1.6.0 ### Various fixes & improvements - Fix Deployment (#1474) by @antonpirker - Serverless V2 (#1450) by @antonpirker - Use logging levelno instead of levelname. Levelnames can be overridden (#1449) by @rrauenza ## 1.5.12 ### Various fixes & improvements - feat(measurements): Add experimental set_measurement api on transaction (#1359) by @sl0thentr0py - fix: Remove incorrect usage from flask helper example (#1434) by @BYK ## 1.5.11 ### Various fixes & improvements - chore: Bump mypy and fix abstract ContextManager typing (#1421) by @sl0thentr0py - chore(issues): add link to Sentry support (#1420) by @vladanpaunovic - fix: replace git.io links with redirect targets (#1412) by @asottile-sentry - ref: Update error verbose for sentry init (#1361) by @targhs - fix(sessions): Update session also for non sampled events and change filter order (#1394) by @adinauer ## 1.5.10 ### Various fixes & improvements - Remove Flask version contraint (#1395) by @antonpirker - Change ordering of event drop mechanisms (#1390) by @adinauer ## 1.5.9 ### Various fixes & improvements - fix(sqlalchemy): Use context instead of connection in sqlalchemy integration (#1388) by @sl0thentr0py - Update correct test command in contributing docs (#1377) by @targhs - Update black (#1379) by @antonpirker - build(deps): bump sphinx from 4.1.1 to 4.5.0 (#1376) by @dependabot - fix: Auto-enabling Redis and Pyramid integration (#737) by @untitaker - feat(testing): Add pytest-watch (#853) by @lobsterkatie - Treat x-api-key header as sensitive (#1236) by @simonschmidt - fix: Remove obsolete MAX_FORMAT_PARAM_LENGTH (#1375) by @blueyed ## 1.5.8 ### Various fixes & improvements - feat(asgi): Add support for setting transaction name to path in FastAPI (#1349) by @tiangolo - fix(sqlalchemy): Change context manager type to avoid race in threads (#1368) by @Fofanko - fix(perf): Fix transaction setter on scope to use containing_transaction to match with getter (#1366) by @sl0thentr0py - chore(ci): Change stale GitHub workflow to run once a day (#1367) by @kamilogorek - feat(django): Make django middleware expose more wrapped attributes (#1202) by @MattFisher ## 1.5.7 ### Various fixes & improvements - fix(serializer): Make sentry_repr dunder method to avoid mock problems (#1364) by @sl0thentr0py ## 1.5.6 ### Various fixes & improvements - Create feature.yml (#1350) by @vladanpaunovic - Update contribution guide (#1346) by @antonpirker - chore: add bug issue template (#1345) by @vladanpaunovic - Added default value for auto_session_tracking (#1337) by @antonpirker - docs(readme): reordered content (#1343) by @antonpirker - fix(tests): Removed unsupported Django 1.6 from tests to avoid confusion (#1338) by @antonpirker - Group captured warnings under separate issues (#1324) by @mnito - build(changelogs): Use automated changelogs from Craft (#1340) by @BYK - fix(aiohttp): AioHttpIntegration sentry_app_handle() now ignores ConnectionResetError (#1331) by @cmalek - meta: Remove black GH action (#1339) by @sl0thentr0py - feat(flask): Add `sentry_trace()` template helper (#1336) by @BYK ## 1.5.5 - Add session tracking to ASGI integration (#1329) - Pinning test requirements versions (#1330) - Allow classes to short circuit serializer with `sentry_repr` (#1322) - Set default on json.dumps in compute_tracestate_value to ensure string conversion (#1318) Work in this release contributed by @tomchuk. Thank you for your contribution! ## 1.5.4 - Add Python 3.10 to test suite (#1309) - Capture only 5xx HTTP errors in Falcon Integration (#1314) - Attempt custom urlconf resolve in `got_request_exception` as well (#1317) ## 1.5.3 - Pick up custom urlconf set by Django middlewares from request if any (#1308) ## 1.5.2 - Record event_processor client reports #1281 - Add a Quart integration #1248 - Sanic v21.12 support #1292 - Support Celery abstract tasks #1287 Work in this release contributed by @johnzeringue, @pgjones and @ahopkins. Thank you for your contribution! ## 1.5.1 - Fix django legacy url resolver regex substitution due to upstream CVE-2021-44420 fix #1272 - Record lost `sample_rate` events only if tracing is enabled #1268 - Fix gevent version parsing for non-numeric parts #1243 - Record span and breadcrumb when Django opens db connection #1250 ## 1.5.0 - Also record client outcomes for before send #1211 - Add support for implicitly sized envelope items #1229 - Fix integration with Apache Beam 2.32, 2.33 #1233 - Remove Python 2.7 support for AWS Lambda layers in craft config #1241 - Refactor Sanic integration for v21.9 support #1212 - AWS Lambda Python 3.9 runtime support #1239 - Fix "shutdown_timeout" typing #1256 Work in this release contributed by @galuszkak, @kianmeng, @ahopkins, @razumeiko, @tomscytale, and @seedofjoy. Thank you for your contribution! ## 1.4.3 - Turned client reports on by default. ## 1.4.2 - Made envelope modifications in the HTTP transport non observable #1206 ## 1.4.1 - Fix race condition between `finish` and `start_child` in tracing #1203 ## 1.4.0 - No longer set the last event id for transactions #1186 - Added support for client reports (disabled by default for now) #1181 - Added `tracestate` header handling #1179 - Added real ip detection to asgi integration #1199 ## 1.3.1 - Fix detection of contextvars compatibility with Gevent versions >=20.9.0 #1157 ## 1.3.0 - Add support for Sanic versions 20 and 21 #1146 ## 1.2.0 - Fix for `AWSLambda` Integration to handle other path formats for function initial handler #1139 - Fix for worker to set daemon attribute instead of deprecated setDaemon method #1093 - Fix for `bottle` Integration that discards `-dev` for version extraction #1085 - Fix for transport that adds a unified hook for capturing metrics about dropped events #1100 - Add `Httpx` Integration #1119 - Add support for china domains in `AWSLambda` Integration #1051 ## 1.1.0 - Fix for `AWSLambda` integration returns value of original handler #1106 - Fix for `RQ` integration that only captures exception if RQ job has failed and ignore retries #1076 - Feature that supports Tracing for the `Tornado` integration #1060 - Feature that supports wild cards in `ignore_logger` in the `Logging` Integration #1053 - Fix for django that deals with template span description names that are either lists or tuples #1054 ## 1.0.0 This release contains a breaking change - **BREAKING CHANGE**: Feat: Moved `auto_session_tracking` experimental flag to a proper option and removed explicitly setting experimental `session_mode` in favor of auto detecting its value, hence enabling release health by default #994 - Fixed Django transaction name by setting the name to `request.path_info` rather than `request.path` - Fix for tracing by getting HTTP headers from span rather than transaction when possible #1035 - Fix for Flask transactions missing request body in non errored transactions #1034 - Fix for honoring the `X-Forwarded-For` header #1037 - Fix for worker that logs data dropping of events with level error #1032 ## 0.20.3 - Added scripts to support auto instrumentation of no code AWS lambda Python functions ## 0.20.2 - Fix incorrect regex in craft to include wheel file in pypi release ## 0.20.1 - Fix for error that occurs with Async Middlewares when the middleware is a function rather than a class ## 0.20.0 - Fix for header extraction for AWS lambda/API extraction - Fix multiple \*\*kwargs type hints # 967 - Fix that corrects AWS lambda integration failure to detect the aws-lambda-ric 1.0 bootstrap #976 - Fix AWSLambda integration: variable "timeout_thread" referenced before assignment #977 - Use full git sha as release name #960 - **BREAKING CHANGE**: The default environment is now production, not based on release - Django integration now creates transaction spans for template rendering - Fix headers not parsed correctly in ASGI middleware, Decode headers before creating transaction #984 - Restored ability to have tracing disabled #991 - Fix Django async views not behaving asynchronously - Performance improvement: supported pre-aggregated sessions ## 0.19.5 - Fix two regressions added in 0.19.2 with regard to sampling behavior when reading the sampling decision from headers. - Increase internal transport queue size and make it configurable. ## 0.19.4 - Fix a bug that would make applications crash if an old version of `boto3` was installed. ## 0.19.3 - Automatically pass integration-relevant data to `traces_sampler` for AWS, AIOHTTP, ASGI, Bottle, Celery, Django, Falcon, Flask, GCP, Pyramid, Tryton, RQ, and WSGI integrations - Fix a bug where the AWS integration would crash if event was anything besides a dictionary - Fix the Django integrations's ASGI handler for Channels 3.0. Thanks Luke Pomfrey! ## 0.19.2 - Add `traces_sampler` option. - The SDK now attempts to infer a default release from various environment variables and the current git repo. - Fix a crash with async views in Django 3.1. - Fix a bug where complex URL patterns in Django would create malformed transaction names. - Add options for transaction styling in AIOHTTP. - Add basic attachment support (documentation tbd). - fix a crash in the `pure_eval` integration. - Integration for creating spans from `boto3`. ## 0.19.1 - Fix dependency check for `blinker` fixes #858 - Fix incorrect timeout warnings in AWS Lambda and GCP integrations #854 ## 0.19.0 - Removed `_experiments.auto_enabling_integrations` in favor of just `auto_enabling_integrations` which is now enabled by default. ## 0.18.0 - **Breaking change**: The `no_proxy` environment variable is now honored when inferring proxy settings from the system. Thanks Xavier Fernandez! - Added Performance/Tracing support for AWS and GCP functions. - Fix an issue with Django instrumentation where the SDK modified `resolver_match.callback` and broke user code. ## 0.17.8 - Fix yet another bug with disjoint traces in Celery. - Added support for Chalice 1.20. Thanks again to the folks at Cuenca MX! ## 0.17.7 - Internal: Change data category for transaction envelopes. - Fix a bug under Celery 4.2+ that may have caused disjoint traces or missing transactions. ## 0.17.6 - Support for Flask 0.10 (only relaxing version check) ## 0.17.5 - Work around an issue in the Python stdlib that makes the entire process deadlock during garbage collection if events are sent from a `__del__` implementation. - Add possibility to wrap ASGI application twice in middleware to enable split up of request scope data and exception catching. ## 0.17.4 - New integration for the Chalice web framework for AWS Lambda. Thanks to the folks at Cuenca MX! ## 0.17.3 - Fix an issue with the `pure_eval` integration in interaction with trimming where `pure_eval` would create a lot of useless local variables that then drown out the useful ones in trimming. ## 0.17.2 - Fix timezone bugs in GCP integration. ## 0.17.1 - Fix timezone bugs in AWS Lambda integration. - Fix crash on GCP integration because of missing parameter `timeout_warning`. ## 0.17.0 - Fix a bug where class-based callables used as Django views (without using Django's regular class-based views) would not have `csrf_exempt` applied. - New integration for Google Cloud Functions. - Fix a bug where a recently released version of `urllib3` would cause the SDK to enter an infinite loop on networking and SSL errors. - **Breaking change**: Remove the `traceparent_v2` option. The option has been ignored since 0.16.3, just remove it from your code. ## 0.16.5 - Fix a bug that caused Django apps to crash if the view didn't have a `__name__` attribute. ## 0.16.4 - Add experiment to avoid trunchating span descriptions. Initialize with `init(_experiments={"smart_transaction_trimming": True})`. - Add a span around the Django view in transactions to distinguish its operations from middleware operations. ## 0.16.3 - Fix AWS Lambda support for Python 3.8. - The AWS Lambda integration now captures initialization/import errors for Python 3. - The AWS Lambda integration now supports an option to warn about functions likely to time out. - Testing for RQ 1.5 - Flip default of `traceparent_v2`. This change should have zero impact. The flag will be removed in 0.17. - Fix compatibility bug with Django 3.1. ## 0.16.2 - New (optional) integrations for richer stacktraces: `pure_eval` for additional variables, `executing` for better function names. ## 0.16.1 - Flask integration: Fix a bug that prevented custom tags from being attached to transactions. ## 0.16.0 - Redis integration: add tags for more commands - Redis integration: Patch rediscluster package if installed. - Session tracking: A session is no longer considered crashed if there has been a fatal log message (only unhandled exceptions count). - **Breaking change**: Revamping of the tracing API. - **Breaking change**: `before_send` is no longer called for transactions. ## 0.15.1 - Fix fatal crash in Pyramid integration on 404. ## 0.15.0 - **Breaking change:** The ASGI middleware will now raise an exception if contextvars are not available, like it is already the case for other asyncio integrations. - Contextvars are now used in more circumstances following a bugfix release of `gevent`. This will fix a few instances of wrong request data being attached to events while using an asyncio-based web framework. - APM: Fix a bug in the SQLAlchemy integration where a span was left open if the database transaction had to be rolled back. This could have led to deeply nested span trees under that db query span. - Fix a bug in the Pyramid integration where the transaction name could not be overridden at all. - Fix a broken type annotation on `capture_exception`. - Basic support for Django 3.1. More work is required for async middlewares to be instrumented properly for APM. ## 0.14.4 - Fix bugs in transport rate limit enforcement for specific data categories. The bug should not have affected anybody because we do not yet emit rate limits for specific event types/data categories. - Fix a bug in `capture_event` where it would crash if given additional kwargs. Thanks to Tatiana Vasilevskaya! - Fix a bug where contextvars from the request handler were inaccessible in AIOHTTP error handlers. - Fix a bug where the Celery integration would crash if newrelic instrumented Celery as well. ## 0.14.3 - Attempt to use a monotonic clock to measure span durations in Performance/APM. - Avoid overwriting explicitly set user data in web framework integrations. - Allow to pass keyword arguments to `capture_event` instead of configuring the scope. - Feature development for session tracking. ## 0.14.2 - Fix a crash in Django Channels instrumentation when SDK is reinitialized. - More contextual data for AWS Lambda (cloudwatch logs link). ## 0.14.1 - Fix a crash in the Django integration when used in combination with Django Rest Framework's test utilities for request. - Fix high memory consumption when sending a lot of errors in the same process. Particularly noticeable in async environments. ## 0.14.0 - Show ASGI request data in Django 3.0 - New integration for the Trytond ERP framework. Thanks n1ngu! ## 0.13.5 - Fix trace continuation bugs in APM. - No longer report `asyncio.CancelledError` as part of AIOHTTP integration. ## 0.13.4 - Fix package classifiers to mark this package as supporting Python 3.8. The SDK supported 3.8 before though. - Update schema sent for transaction events (transaction status). - Fix a bug where `None` inside request data was skipped/omitted. ## 0.13.3 - Fix an issue with the ASGI middleware that would cause Uvicorn to infer the wrong ASGI versions and call the wrapped application with the wrong argument count. - Do not ignore the `tornado.application` logger. - The Redis integration now instruments Redis blaster for breadcrumbs and transaction spans. ## 0.13.2 - Fix a bug in APM that would cause wrong durations to be displayed on non-UTC servers. ## 0.13.1 - Add new global functions for setting scope/context data. - Fix a bug that would make Django 1.11+ apps crash when using function-based middleware. ## 0.13.0 - Remove an old deprecation warning (behavior itself already changed since a long time). - The AIOHTTP integration now attaches the request body to crash reports. Thanks to Vitali Rebkavets! - Add an experimental PySpark integration. - First release to be tested under Python 3.8. No code changes were necessary though, so previous releases also might have worked. ## 0.12.3 - Various performance improvements to event sending. - Avoid crashes when scope or hub is racy. - Revert a change that broke applications using gevent and channels (in the same virtualenv, but different processes). - Fix a bug that made the SDK crash on unicode in SQL. ## 0.12.2 - Fix a crash with ASGI (Django Channels) when the ASGI request type is neither HTTP nor Websockets. ## 0.12.1 - Temporarily remove sending of SQL parameters (as part of breadcrumbs or spans for APM) to Sentry to avoid memory consumption issues. ## 0.12.0 - Sentry now has a [Discord server](https://discord.gg/cWnMQeA)! Join the server to get involved into SDK development and ask questions. - Fix a bug where the response object for httplib (or requests) was held onto for an unnecessarily long amount of time. - APM: Add spans for more methods on `subprocess.Popen` objects. - APM: Add spans for Django middlewares. - APM: Add spans for ASGI requests. - Automatically inject the ASGI middleware for Django Channels 2.0. This will **break your Channels 2.0 application if it is running on Python 3.5 or 3.6** (while previously it would "only" leak a lot of memory for each ASGI request). **Install `aiocontextvars` from PyPI to make it work again.** ## 0.11.2 - Fix a bug where the SDK would throw an exception on shutdown when running under eventlet. - Add missing data to Redis breadcrumbs. ## 0.11.1 - Remove a faulty assertion (observed in environment with Django Channels and ASGI). ## 0.11.0 - Fix type hints for the logging integration. Thanks Steven Dignam! - Fix an issue where scope/context data would leak in applications that use `gevent` with its threading monkeypatch. The fix is to avoid usage of contextvars in such environments. Thanks Ran Benita! - Fix a reference cycle in the `ThreadingIntegration` that led to exceptions on interpreter shutdown. Thanks Guang Tian Li! - Fix a series of bugs in the stdlib integration that broke usage of `subprocess`. - More instrumentation for APM. - New integration for SQLAlchemy (creates breadcrumbs from queries). - New (experimental) integration for Apache Beam. - Fix a bug in the `LoggingIntegration` that would send breadcrumbs timestamps in the wrong timezone. - The `AiohttpIntegration` now sets the event's transaction name. - Fix a bug that caused infinite recursion when serializing local variables that logged errors or otherwise created Sentry events. ## 0.10.2 - Fix a bug where a log record with non-strings as `extra` keys would make the SDK crash. - Added ASGI integration for better hub propagation, request data for your events and capturing uncaught exceptions. Using this middleware explicitly in your code will also fix a few issues with Django Channels. - Fix a bug where `celery-once` was deadlocking when used in combination with the celery integration. - Fix a memory leak in the new tracing feature when it is not enabled. ## 0.10.1 - Fix bug where the SDK would yield a deprecation warning about `collections.abc` vs `collections`. - Fix bug in stdlib integration that would cause spawned subprocesses to not inherit the environment variables from the parent process. ## 0.10.0 - Massive refactor in preparation to tracing. There are no intentional breaking changes, but there is a risk of breakage (hence the minor version bump). Two new client options `traces_sample_rate` and `traceparent_v2` have been added. Do not change the defaults in production, they will bring your application down or at least fill your Sentry project up with nonsense events. ## 0.9.5 - Do not use `getargspec` on Python 3 to evade deprecation warning. ## 0.9.4 - Revert a change in 0.9.3 that prevented passing a `unicode` string as DSN to `init()`. ## 0.9.3 - Add type hints for `init()`. - Include user agent header when sending events. ## 0.9.2 - Fix a bug in the Django integration that would prevent the user from initializing the SDK at the top of `settings.py`. This bug was introduced in 0.9.1 for all Django versions, but has been there for much longer for Django 1.6 in particular. ## 0.9.1 - Fix a bug on Python 3.7 where gunicorn with gevent would cause the SDK to leak event data between requests. - Fix a bug where the GNU backtrace integration would not parse certain frames. - Fix a bug where the SDK would not pick up request bodies for Django Rest Framework based apps. - Remove a few more headers containing sensitive data per default. - Various improvements to type hints. Thanks Ran Benita! - Add a event hint to access the log record from `before_send`. - Fix a bug that would ignore `__tracebackhide__`. Thanks Matt Millican! - Fix distribution information for mypy support (add `py.typed` file). Thanks Ran Benita! ## 0.9.0 - The SDK now captures `SystemExit` and other `BaseException`s when coming from within a WSGI app (Flask, Django, ...) - Pyramid: No longer report an exception if there exists an exception view for it. ## 0.8.1 - Fix infinite recursion bug in Celery integration. ## 0.8.0 - Add the always_run option in excepthook integration. - Fix performance issues when attaching large data to events. This is not really intended to be a breaking change, but this release does include a rewrite of a larger chunk of code, therefore the minor version bump. ## 0.7.14 - Fix crash when using Celery integration (`TypeError` when using `apply_async`). ## 0.7.13 - Fix a bug where `Ignore` raised in a Celery task would be reported to Sentry. - Add experimental support for tracing PoC. ## 0.7.12 - Read from `X-Real-IP` for user IP address. - Fix a bug that would not apply in-app rules for attached callstacks. - It's now possible to disable automatic proxy support by passing `http_proxy=""`. Thanks Marco Neumann! ## 0.7.11 - Fix a bug that would send `errno` in an invalid format to the server. - Fix import-time crash when running Python with `-O` flag. - Fix a bug that would prevent the logging integration from attaching `extra` keys called `data`. - Fix order in which exception chains are reported to match Raven behavior. - New integration for the Falcon web framework. Thanks to Jacob Magnusson! ## 0.7.10 - Add more event trimming. - Log Sentry's response body in debug mode. - Fix a few bad typehints causing issues in IDEs. - Fix a bug in the Bottle integration that would report HTTP exceptions (e.g. redirects) as errors. - Fix a bug that would prevent use of `in_app_exclude` without setting `in_app_include`. - Fix a bug where request bodies of Django Rest Framework apps were not captured. - Suppress errors during SQL breadcrumb capturing in Django integration. Also change order in which formatting strategies are tried. ## 0.7.9 - New integration for the Bottle web framework. Thanks to Stepan Henek! - Self-protect against broken mapping implementations and other broken reprs instead of dropping all local vars from a stacktrace. Thanks to Marco Neumann! ## 0.7.8 - Add support for Sanic versions 18 and 19. - Fix a bug that causes an SDK crash when using composed SQL from psycopg2. ## 0.7.7 - Fix a bug that would not capture request bodies if they were empty JSON arrays, objects or strings. - New GNU backtrace integration parses stacktraces from exception messages and appends them to existing stacktrace. - Capture Tornado formdata. - Support Python 3.6 in Sanic and AIOHTTP integration. - Clear breadcrumbs before starting a new request. - Fix a bug in the Celery integration that would drop pending events during worker shutdown (particularly an issue when running with `max_tasks_per_child = 1`) - Fix a bug with `repr`ing locals whose `__repr__` simultaneously changes the WSGI environment or other data that we're also trying to serialize at the same time. ## 0.7.6 - Fix a bug where artificial frames for Django templates would not be marked as in-app and would always appear as the innermost frame. Implement a heuristic to show template frame closer to `render` or `parse` invocation. ## 0.7.5 - Fix bug into Tornado integration that would send broken cookies to the server. - Fix a bug in the logging integration that would ignore the client option `with_locals`. ## 0.7.4 - Read release and environment from process environment like the Raven SDK does. The keys are called `SENTRY_RELEASE` and `SENTRY_ENVIRONMENT`. - Fix a bug in the `serverless` integration where it would not push a new scope for each function call (leaking tags and other things across calls). - Experimental support for type hints. ## 0.7.3 - Fix crash in AIOHTTP integration when integration was set up but disabled. - Flask integration now adds usernames, email addresses based on the protocol Flask-User defines on top of Flask-Login. - New threading integration catches exceptions from crashing threads. - New method `flush` on hubs and clients. New global `flush` function. - Add decorator for serverless functions to fix common problems in those environments. - Fix a bug in the logging integration where using explicit handlers required enabling the integration. ## 0.7.2 - Fix `celery.exceptions.Retry` spamming in Celery integration. ## 0.7.1 - Fix `UnboundLocalError` crash in Celery integration. ## 0.7.0 - Properly display chained exceptions (PEP-3134). - Rewrite celery integration to monkeypatch instead of using signals due to bugs in Celery 3's signal handling. The Celery scope is also now available in prerun and postrun signals. - Fix Tornado integration to work with Tornado 6. - Do not evaluate Django `QuerySet` when trying to capture local variables. Also an internal hook was added to overwrite `repr` for local vars. ## 0.6.9 - Second attempt at fixing the bug that was supposed to be fixed in 0.6.8. > No longer access arbitrary sequences in local vars due to possible side effects. ## 0.6.8 - No longer access arbitrary sequences in local vars due to possible side effects. ## 0.6.7 - Sourcecode Django templates is now displayed in stackframes like Jinja templates in Flask already were. - Updates to AWS Lambda integration for changes Amazon did to their Python 3.7 runtime. - Fix a bug in the AIOHTTP integration that would report 300s and other HTTP status codes as errors. - Fix a bug where a crashing `before_send` would crash the SDK and app. - Fix a bug where cyclic references in e.g. local variables or `extra` data would crash the SDK. ## 0.6.6 - Un-break API of internal `Auth` object that we use in Sentry itself. ## 0.6.5 - Capture WSGI request data eagerly to save memory and avoid issues with uWSGI. - Ability to use subpaths in DSN. - Ignore `django.request` logger. ## 0.6.4 - Fix bug that would lead to an `AssertionError: stack must have at least one layer`, at least in testsuites for Flask apps. ## 0.6.3 - New integration for Tornado - Fix request data in Django, Flask and other WSGI frameworks leaking between events. - Fix infinite recursion when sending more events in `before_send`. ## 0.6.2 - Fix crash in AWS Lambda integration when using Zappa. This only silences the error, the underlying bug is still in Zappa. ## 0.6.1 - New integration for aiohttp-server. - Fix crash when reading hostname in broken WSGI environments. ## 0.6.0 - Fix bug where a 429 without Retry-After would not be honored. - Fix bug where proxy setting would not fall back to `http_proxy` for HTTPs traffic. - A WSGI middleware is now available for catching errors and adding context about the current request to them. - Using `logging.debug("test", exc_info=True)` will now attach the current stacktrace if no `sys.exc_info` is available. - The Python 3.7 runtime for AWS Lambda is now supported. - Fix a bug that would drop an event or parts of it when it contained bytes that were not UTF-8 encoded. - Logging an exception will no longer add the exception as breadcrumb to the exception's own event. ## 0.5.5 - New client option `ca_certs`. - Fix crash with Django and psycopg2. ## 0.5.4 - Fix deprecation warning in relation to the `collections` stdlib module. - Fix bug that would crash Django and Flask when streaming responses are failing halfway through. ## 0.5.3 - Fix bug where using `push_scope` with a callback would not pop the scope. - Fix crash when initializing the SDK in `push_scope`. - Fix bug where IP addresses were sent when `send_default_pii=False`. ## 0.5.2 - Fix bug where events sent through the RQ integration were sometimes lost. - Remove a deprecation warning about usage of `logger.warn`. - Fix bug where large frame local variables would lead to the event being rejected by Sentry. ## 0.5.1 - Integration for Redis Queue (RQ) ## 0.5.0 - Fix a bug that would omit several debug logs during SDK initialization. - Fix issue that sent a event key `""` Sentry wouldn't understand. - **Breaking change:** The `level` and `event_level` options in the logging integration now work separately from each other. - Fix a bug in the Sanic integration that would report the exception behind any HTTP error code. - Fix a bug that would spam breadcrumbs in the Celery integration. Ignore logger `celery.worker.job`. - Additional attributes on log records are now put into `extra`. - Integration for Pyramid. - `sys.argv` is put into extra automatically. ## 0.4.3 - Fix a bug that would leak WSGI responses. ## 0.4.2 - Fix a bug in the Sanic integration that would leak data between requests. - Fix a bug that would hide all debug logging happening inside of the built-in transport. - Fix a bug that would report errors for typos in Django's shell. ## 0.4.1 - Fix bug that would only show filenames in stacktraces but not the parent directories. ## 0.4.0 - Changed how integrations are initialized. Integrations are now configured and enabled per-client. ## 0.3.11 - Fix issue with certain deployment tools and the AWS Lambda integration. ## 0.3.10 - Set transactions for Django like in Raven. Which transaction behavior is used can be configured. - Fix a bug which would omit frame local variables from stacktraces in Celery. - New option: `attach_stacktrace` ## 0.3.9 - Bugfixes for AWS Lambda integration: Using Zappa did not catch any exceptions. ## 0.3.8 - Nicer log level for internal errors. ## 0.3.7 - Remove `repos` configuration option. There was never a way to make use of this feature. - Fix a bug in `last_event_id`. - Add Django SQL queries to breadcrumbs. - Django integration won't set user attributes if they were already set. - Report correct SDK version to Sentry. ## 0.3.6 - Integration for Sanic ## 0.3.5 - Integration for AWS Lambda - Fix mojibake when encoding local variable values ## 0.3.4 - Performance improvement when storing breadcrumbs ## 0.3.3 - Fix crash when breadcrumbs had to be trunchated ## 0.3.2 - Fixed an issue where some paths where not properly sent as absolute paths sentry-python-1.39.2/CONTRIBUTING-aws-lambda.md 0000664 0000000 0000000 00000002221 14547447232 0020721 0 ustar 00root root 0000000 0000000 # Contributing to Sentry AWS Lambda Layer All the general terms of the [CONTRIBUTING.md](CONTRIBUTING.md) apply. ## Development environment You need to have a AWS account and AWS CLI installed and setup. We put together two helper functions that can help you with development: - `./scripts/aws-deploy-local-layer.sh` This script [scripts/aws-deploy-local-layer.sh](scripts/aws-deploy-local-layer.sh) will take the code you have checked out locally, create a Lambda layer out of it and deploy it to the `eu-central-1` region of your configured AWS account using `aws` CLI. The Lambda layer will have the name `SentryPythonServerlessSDK-local-dev` - `./scripts/aws-attach-layer-to-lambda-function.sh` You can use this script [scripts/aws-attach-layer-to-lambda-function.sh](scripts/aws-attach-layer-to-lambda-function.sh) to attach the Lambda layer you just deployed (using the first script) onto one of your existing Lambda functions. You will have to give the name of the Lambda function to attach onto as an argument. (See the script for details.) With this two helper scripts it should be easy to rapidly iterate your development on the Lambda layer. sentry-python-1.39.2/CONTRIBUTING.md 0000664 0000000 0000000 00000023357 14547447232 0016730 0 ustar 00root root 0000000 0000000 # Contributing to Sentry SDK for Python We welcome contributions to `sentry-python` by the community. This file outlines the process to contribute to the SDK itself. For contributing to the documentation, please see the [Contributing to Docs](https://docs.sentry.io/contributing/) page. ## How to Report a Problem Please search the [issue tracker](https://github.com/getsentry/sentry-python/issues) before creating a new issue (a problem or an improvement request). Please also ask in our [Sentry Community on Discord](https://discord.com/invite/Ww9hbqr) before submitting a new issue. There are a ton of great people in our Discord community ready to help you! ## Submitting Changes - Fork the `sentry-python` repo and prepare your changes. - Add tests for your changes to `tests/`. - Run tests and make sure all of them pass. - Submit a pull request, referencing any issues your changes address. Please follow our [commit message format](https://develop.sentry.dev/commit-messages/#commit-message-format) when naming your pull request. We will review your pull request as soon as possible. Thank you for contributing! ## Development Environment ### Set up Python Make sure that you have Python 3 installed. Version 3.7 or higher is required to run style checkers on pre-commit. On macOS, we recommend using `brew` to install Python. For Windows, we recommend an official [python.org](https://www.python.org/downloads/) release. ### Fork and Clone the Repo Before you can contribute, you will need to [fork the `sentry-python` repository](https://github.com/getsentry/sentry-python/fork). Then, clone the forked repository to your local development environment. ### Create a Virtual Environment To keep your Python development environment and packages separate from the ones used by your operation system, create a [virtual environment](https://docs.python.org/3/tutorial/venv.html): ```bash cd sentry-python python -m venv .venv ``` Then, activate your virtual environment with the following command. You will need to repeat this step every time you wish to work on your changes for `sentry-python`. ```bash source .venv/bin/activate ``` ### Install `sentry-python` in Editable Mode Install `sentry-python` in [editable mode](https://pip.pypa.io/en/latest/topics/local-project-installs/#editable-installs). This will make any changes you make to the SDK code locally immediately effective without you having to reinstall or copy anything. ```bash pip install -e . ``` **Hint:** Sometimes you need a sample project to run your new changes to `sentry-python`. In this case install the sample project in the same virtualenv and you should be good to go. ### Install Coding Style Pre-commit Hooks This will make sure that your commits will have the correct coding style. ```bash cd sentry-python pip install -r linter-requirements.txt pip install pre-commit pre-commit install ``` That's it. You should be ready to make changes, run tests, and make commits! If you experience any problems, please don't hesitate to ping us in our [Discord Community](https://discord.com/invite/Ww9hbqr). ## Running Tests To run the tests, first setup your development environment according to the instructions above. Then, install the required packages for running tests with the following command: ```bash pip install -r test-requirements.txt ``` Once the requirements are installed, you can run all tests with the following command: ```bash pytest tests/ ``` If you would like to run the tests for a specific integration, use a command similar to the one below: ```bash pytest -rs tests/integrations/flask/ # Replace "flask" with the specific integration you wish to test ``` **Hint:** Tests of integrations need additional dependencies. The switch `-rs` will show you why tests were skipped and what dependencies you need to install for the tests to run. (You can also consult the [tox.ini](tox.ini) file to see what dependencies are installed for each integration) ## Adding a New Integration 1. Write the integration. - Instrument all application instances by default. Prefer global signals/patches instead of configuring a specific instance. Don't make the user pass anything to your integration for anything to work. Aim for zero configuration. - Everybody monkeypatches. That means: - Make sure to think about conflicts with other monkeypatches when monkeypatching. - You don't need to feel bad about it. - Make sure your changes don't break end user contracts. The SDK should never alter the expected behavior of the underlying library or framework from the user's perspective and it shouldn't have any side effects. - Avoid modifying the hub, registering a new client or the like. The user drives the client, and the client owns integrations. - Allow the user to turn off the integration by changing the client. Check `Hub.current.get_integration(MyIntegration)` from within your signal handlers to see if your integration is still active before you do anything impactful (such as sending an event). 2. Write tests. - Consider the minimum versions supported, and test each version in a separate env in `tox.ini`. - Create a new folder in `tests/integrations/`, with an `__init__` file that skips the entire suite if the package is not installed. 3. Update package metadata. - We use `extras_require` in `setup.py` to communicate minimum version requirements for integrations. People can use this in combination with tools like Poetry or Pipenv to detect conflicts between our supported versions and their used versions programmatically. Do not set upper bounds on version requirements as people are often faster in adopting new versions of a web framework than we are in adding them to the test matrix or our package metadata. 4. Write the [docs](https://github.com/getsentry/sentry-docs). Follow the structure of [existing integration docs](https://docs.sentry.io/platforms/python/integrations/). And, please **make sure to add your integration to the table in `python/integrations/index.md`** (people often forget this step 🙂). 5. Merge docs after new version has been released. The docs are built and deployed after each merge, so your changes should go live in a few minutes. 6. (optional, if possible) Update data in [`sdk_updates.py`](https://github.com/getsentry/sentry/blob/master/src/sentry/sdk_updates.py) to give users in-app suggestions to use your integration. This step will only apply to some integrations. ## Releasing a New Version _(only relevant for Sentry employees)_ ### Prerequisites - All the changes that should be released must be on the `master` branch. - Every commit should follow the [Commit Message Format](https://develop.sentry.dev/commit-messages/#commit-message-format) convention. - CHANGELOG.md is updated automatically. No human intervention is necessary, but you might want to consider polishing the changelog by hand to make it more user friendly by grouping related things together, adding small code snippets and links to docs, etc. ### Manual Process - On GitHub in the `sentry-python` repository, go to "Actions" and select the "Release" workflow. - Click on "Run workflow" on the right side, and make sure the `master` branch is selected. - Set the "Version to release" input field. Here you decide if it is a major, minor or patch release. (See "Versioning Policy" below) - Click "Run Workflow". This will trigger [Craft](https://github.com/getsentry/craft) to prepare everything needed for a release. (For more information, see [craft prepare](https://github.com/getsentry/craft#craft-prepare-preparing-a-new-release).) At the end of this process a release issue is created in the [Publish](https://github.com/getsentry/publish) repository. (Example release issue: https://github.com/getsentry/publish/issues/815) Now one of the persons with release privileges (most probably your engineering manager) will review this issue and then add the `accepted` label to the issue. There are always two persons involved in a release. If you are in a hurry and the release should be out immediately, there is a Slack channel called `#proj-release-approval` where you can see your release issue and where you can ping people to please have a look immediately. When the release issue is labeled `accepted`, [Craft](https://github.com/getsentry/craft) is triggered again to publish the release to all the right platforms. (See [craft publish](https://github.com/getsentry/craft#craft-publish-publishing-the-release) for more information.) At the end of this process the release issue on GitHub will be closed and the release is completed! Congratulations! There is a sequence diagram visualizing all this in the [README.md](https://github.com/getsentry/publish) of the `Publish` repository. ### Versioning Policy This project follows [semver](https://semver.org/), with three additions: - Semver says that major version `0` can include breaking changes at any time. Still, it is common practice to assume that only `0.x` releases (minor versions) can contain breaking changes while `0.x.y` releases (patch versions) are used for backwards-compatible changes (bugfixes and features). This project also follows that practice. - All undocumented APIs are considered internal. They are not part of this contract. - Certain features (e.g. integrations) may be explicitly called out as "experimental" or "unstable" in the documentation. They come with their own versioning policy described in the documentation. We recommend to pin your version requirements against `1.x.*` or `1.x.y`. Either one of the following is fine: ``` sentry-sdk>=1.0.0,<2.0.0 sentry-sdk==1.5.0 ``` A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. sentry-python-1.39.2/LICENSE 0000664 0000000 0000000 00000002105 14547447232 0015470 0 ustar 00root root 0000000 0000000 MIT License Copyright (c) 2018 Functional Software, Inc. dba Sentry Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. sentry-python-1.39.2/MANIFEST.in 0000664 0000000 0000000 00000000054 14547447232 0016222 0 ustar 00root root 0000000 0000000 include LICENSE include sentry_sdk/py.typed sentry-python-1.39.2/Makefile 0000664 0000000 0000000 00000003303 14547447232 0016124 0 ustar 00root root 0000000 0000000 SHELL = /bin/bash VENV_PATH = .venv help: @echo "Thanks for your interest in the Sentry Python SDK!" @echo @echo "make lint: Run linters" @echo "make test: Run basic tests (not testing most integrations)" @echo "make test-all: Run ALL tests (slow, closest to CI)" @echo "make format: Run code formatters (destructive)" @echo "make aws-lambda-layer: Build AWS Lambda layer directory for serverless integration" @echo @echo "Also make sure to read ./CONTRIBUTING.md" @false .venv: virtualenv -ppython3 $(VENV_PATH) $(VENV_PATH)/bin/pip install tox dist: .venv rm -rf dist dist-serverless build $(VENV_PATH)/bin/pip install wheel setuptools $(VENV_PATH)/bin/python setup.py sdist bdist_wheel .PHONY: dist format: .venv $(VENV_PATH)/bin/tox -e linters --notest .tox/linters/bin/black . .PHONY: format test: .venv @$(VENV_PATH)/bin/tox -e py3.9 .PHONY: test test-all: .venv @TOXPATH=$(VENV_PATH)/bin/tox sh ./scripts/runtox.sh .PHONY: test-all check: lint test .PHONY: check lint: .venv @set -e && $(VENV_PATH)/bin/tox -e linters || ( \ echo "================================"; \ echo "Bad formatting? Run: make format"; \ echo "================================"; \ false) .PHONY: lint apidocs: .venv @$(VENV_PATH)/bin/pip install --editable . @$(VENV_PATH)/bin/pip install -U -r ./docs-requirements.txt @$(VENV_PATH)/bin/sphinx-build -vv -W -b html docs/ docs/_build .PHONY: apidocs apidocs-hotfix: apidocs @$(VENV_PATH)/bin/pip install ghp-import @$(VENV_PATH)/bin/ghp-import -pf docs/_build .PHONY: apidocs-hotfix aws-lambda-layer: dist $(VENV_PATH)/bin/pip install -r aws-lambda-layer-requirements.txt $(VENV_PATH)/bin/python -m scripts.build_aws_lambda_layer .PHONY: aws-lambda-layer sentry-python-1.39.2/README.md 0000664 0000000 0000000 00000011556 14547447232 0015754 0 ustar 00root root 0000000 0000000
_Bad software is everywhere, and we're tired of it. Sentry is on a mission to help developers write better software faster, so we can get back to enjoying technology. If you want to join us [**Check out our open positions**](https://sentry.io/careers/)_ # Official Sentry SDK for Python [](https://github.com/getsentry/sentry-python/actions/workflows/ci.yml) [](https://pypi.python.org/pypi/sentry-sdk) [](https://discord.gg/cWnMQeA) This is the official Python SDK for [Sentry](http://sentry.io/) --- ## Getting Started ### Install ```bash pip install --upgrade sentry-sdk ``` ### Configuration ```python import sentry_sdk sentry_sdk.init( "https://12927b5f211046b575ee51fd8b1ac34f@o1.ingest.sentry.io/1", # Set traces_sample_rate to 1.0 to capture 100% # of transactions for performance monitoring. traces_sample_rate=1.0, ) ``` ### Usage ```python from sentry_sdk import capture_message capture_message("Hello World") # Will create an event in Sentry. raise ValueError() # Will also create an event in Sentry. ``` - To learn more about how to use the SDK [refer to our docs](https://docs.sentry.io/platforms/python/). - Are you coming from `raven-python`? [Use this migration guide](https://docs.sentry.io/platforms/python/migration/). - To learn about internals use the [API Reference](https://getsentry.github.io/sentry-python/). ## Integrations (If you want to create a new integration, have a look at the [Adding a new integration checklist](https://github.com/getsentry/sentry-python/blob/master/CONTRIBUTING.md#adding-a-new-integration).) See [the documentation](https://docs.sentry.io/platforms/python/integrations/) for an up-to-date list of libraries and frameworks we support. Here are some examples: - [Django](https://docs.sentry.io/platforms/python/integrations/django/) - [Flask](https://docs.sentry.io/platforms/python/integrations/flask/) - [FastAPI](https://docs.sentry.io/platforms/python/integrations/fastapi/) - [AIOHTTP](https://docs.sentry.io/platforms/python/integrations/aiohttp/) - [SQLAlchemy](https://docs.sentry.io/platforms/python/integrations/sqlalchemy/) - [asyncpg](https://docs.sentry.io/platforms/python/integrations/asyncpg/) - [Redis](https://docs.sentry.io/platforms/python/integrations/redis/) - [Celery](https://docs.sentry.io/platforms/python/integrations/celery/) - [Apache Airflow](https://docs.sentry.io/platforms/python/integrations/airflow/) - [Apache Spark](https://docs.sentry.io/platforms/python/integrations/pyspark/) - [asyncio](https://docs.sentry.io/platforms/python/integrations/asyncio/) - [Graphene](https://docs.sentry.io/platforms/python/integrations/graphene/) - [Logging](https://docs.sentry.io/platforms/python/integrations/logging/) - [Loguru](https://docs.sentry.io/platforms/python/integrations/loguru/) - [HTTPX](https://docs.sentry.io/platforms/python/integrations/httpx/) - [AWS Lambda](https://docs.sentry.io/platforms/python/integrations/aws-lambda/) - [Google Cloud Functions](https://docs.sentry.io/platforms/python/integrations/gcp-functions/) ## Migrating From `raven-python` The old `raven-python` client has entered maintenance mode and was moved [here](https://github.com/getsentry/raven-python). If you're using `raven-python`, we recommend you to migrate to this new SDK. You can find the benefits of migrating and how to do it in our [migration guide](https://docs.sentry.io/platforms/python/migration/). ## Contributing to the SDK Please refer to [CONTRIBUTING.md](CONTRIBUTING.md). ## Getting Help/Support If you need help setting up or configuring the Python SDK (or anything else in the Sentry universe) please head over to the [Sentry Community on Discord](https://discord.com/invite/Ww9hbqr). There is a ton of great people in our Discord community ready to help you! ## Resources - [](https://docs.sentry.io/quickstart/) - [](https://forum.sentry.io/c/sdks) - [](https://discord.gg/Ww9hbqr) - [](http://stackoverflow.com/questions/tagged/sentry) - [](https://twitter.com/intent/follow?screen_name=getsentry) ## License Licensed under the MIT license, see [`LICENSE`](LICENSE) sentry-python-1.39.2/aws-lambda-layer-requirements.txt 0000664 0000000 0000000 00000000336 14547447232 0023073 0 ustar 00root root 0000000 0000000 certifi # In Lambda functions botocore is used, and botocore is not # yet supporting urllib3 1.27.0 never mind 2+. # So we pin this here to make our Lambda layer work with # Lambda Function using Python 3.7+ urllib3<1.27 sentry-python-1.39.2/checkouts/ 0000775 0000000 0000000 00000000000 14547447232 0016455 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/checkouts/data-schemas/ 0000775 0000000 0000000 00000000000 14547447232 0021007 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/codecov.yml 0000664 0000000 0000000 00000000455 14547447232 0016636 0 ustar 00root root 0000000 0000000 comment: false coverage: status: project: default: target: auto # auto compares coverage to the previous base commit threshold: 10% # this allows a 10% drop from the previous base commit coverage informational: true ignore: - "tests" - "sentry_sdk/_types.py" sentry-python-1.39.2/docs-requirements.txt 0000664 0000000 0000000 00000000127 14547447232 0020677 0 ustar 00root root 0000000 0000000 shibuya sphinx==7.2.6 sphinx-autodoc-typehints[type_comments]>=1.8.0 typing-extensions sentry-python-1.39.2/docs/ 0000775 0000000 0000000 00000000000 14547447232 0015415 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/docs/.gitignore 0000664 0000000 0000000 00000000007 14547447232 0017402 0 ustar 00root root 0000000 0000000 _build sentry-python-1.39.2/docs/_static/ 0000775 0000000 0000000 00000000000 14547447232 0017043 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/docs/_static/.gitkeep 0000664 0000000 0000000 00000000000 14547447232 0020462 0 ustar 00root root 0000000 0000000 sentry-python-1.39.2/docs/api.rst 0000664 0000000 0000000 00000002442 14547447232 0016722 0 ustar 00root root 0000000 0000000 ============= Top Level API ============= This is the user facing API of the SDK. It's exposed as ``sentry_sdk``. With this API you can implement a custom performance monitoring or error reporting solution. Capturing Data ============== .. autofunction:: sentry_sdk.api.capture_event .. autofunction:: sentry_sdk.api.capture_exception .. autofunction:: sentry_sdk.api.capture_message Enriching Events ================ .. autofunction:: sentry_sdk.api.add_breadcrumb .. autofunction:: sentry_sdk.api.set_context .. autofunction:: sentry_sdk.api.set_extra .. autofunction:: sentry_sdk.api.set_level .. autofunction:: sentry_sdk.api.set_tag .. autofunction:: sentry_sdk.api.set_user Performance Monitoring ====================== .. autofunction:: sentry_sdk.api.continue_trace .. autofunction:: sentry_sdk.api.get_current_span .. autofunction:: sentry_sdk.api.start_span .. autofunction:: sentry_sdk.api.start_transaction Distributed Tracing =================== .. autofunction:: sentry_sdk.api.get_baggage .. autofunction:: sentry_sdk.api.get_traceparent Managing Scope (advanced) ========================= .. autofunction:: sentry_sdk.api.configure_scope .. autofunction:: sentry_sdk.api.push_scope .. Not documented (On purpose. Not sure if anyone should use those) .. last_event_id() .. flush() sentry-python-1.39.2/docs/apidocs.rst 0000664 0000000 0000000 00000001431 14547447232 0017570 0 ustar 00root root 0000000 0000000 ======== API Docs ======== .. autoclass:: sentry_sdk.Hub :members: .. autoclass:: sentry_sdk.Scope :members: .. autoclass:: sentry_sdk.Client :members: .. autoclass:: sentry_sdk.Transport :members: .. autoclass:: sentry_sdk.HttpTransport :members: .. autoclass:: sentry_sdk.tracing.Transaction :members: .. autoclass:: sentry_sdk.tracing.Span :members: .. autoclass:: sentry_sdk.profiler.Profile :members: .. autoclass:: sentry_sdk.session.Session :members: .. autoclass:: sentry_sdk.attachments.Attachment :members: .. autoclass:: sentry_sdk.scrubber.EventScrubber :members: .. autoclass:: sentry_sdk.monitor.Monitor :members: .. autoclass:: sentry_sdk.envelope.Envelope :members: .. autoclass:: sentry_sdk.envelope.Item :members: sentry-python-1.39.2/docs/conf.py 0000664 0000000 0000000 00000013006 14547447232 0016714 0 ustar 00root root 0000000 0000000 # -*- coding: utf-8 -*- import os import sys import typing from datetime import datetime # prevent circular imports import sphinx.builders.html import sphinx.builders.latex import sphinx.builders.texinfo import sphinx.builders.text import sphinx.ext.autodoc # noqa: F401 import urllib3.exceptions # noqa: F401 typing.TYPE_CHECKING = True # # Configuration file for the Sphinx documentation builder. # # This file does only contain a selection of the most common options. For a # full list see the documentation: # http://www.sphinx-doc.org/en/master/config sys.path.insert(0, os.path.abspath("..")) # -- Project information ----------------------------------------------------- project = "sentry-python" copyright = "2019-{}, Sentry Team and Contributors".format(datetime.now().year) author = "Sentry Team and Contributors" release = "1.39.2" version = ".".join(release.split(".")[:2]) # The short X.Y version. # -- General configuration --------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. # # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ "sphinx.ext.autodoc", "sphinx_autodoc_typehints", "sphinx.ext.viewcode", "sphinx.ext.githubpages", "sphinx.ext.intersphinx", ] # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # # source_suffix = ['.rst', '.md'] source_suffix = ".rst" # The master toctree document. master_doc = "index" # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = "en" # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path. exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] # The name of the Pygments (syntax highlighting) style to use. pygments_style = None # -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # on_rtd = os.environ.get("READTHEDOCS", None) == "True" html_theme = "shibuya" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # html_theme_options = { "github_url": "https://github.com/getsentry/sentry-python", } # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ["_static"] # Custom sidebar templates, must be a dictionary that maps document names # to template names. # # The default sidebars (for documents that don't match any pattern) are # defined by theme itself. Builtin themes are using these templates by # default: ``['localtoc.html', 'relations.html', 'sourcelink.html', # 'searchbox.html']``. # # html_sidebars = {} # -- Options for HTMLHelp output --------------------------------------------- # Output file base name for HTML help builder. htmlhelp_basename = "sentry-pythondoc" # -- Options for LaTeX output ------------------------------------------------ latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # # 'preamble': '', # Latex figure (float) alignment # # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ ( master_doc, "sentry-python.tex", "sentry-python Documentation", "Sentry Team and Contributors", "manual", ) ] # -- Options for manual page output ------------------------------------------ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [(master_doc, "sentry-python", "sentry-python Documentation", [author], 1)] # -- Options for Texinfo output ---------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ( master_doc, "sentry-python", "sentry-python Documentation", author, "sentry-python", "The official Sentry SDK for Python.", "Miscellaneous", ) ] # -- Options for Epub output ------------------------------------------------- # Bibliographic Dublin Core info. epub_title = project # The unique identifier of the text. This can be a ISBN number # or the project homepage. # # epub_identifier = '' # A unique identification for the text. # # epub_uid = '' # A list of files that should not be packed into the epub file. epub_exclude_files = ["search.html"] intersphinx_mapping = {"python": ("https://docs.python.org/3", None)} sentry-python-1.39.2/docs/index.rst 0000664 0000000 0000000 00000000570 14547447232 0017260 0 ustar 00root root 0000000 0000000 ===================================== sentry-python - Sentry SDK for Python ===================================== This is the API documentation for `Sentry's Python SDK # pre-release
[-_\.]?
(?P(a|b|c|rc|alpha|beta|pre|preview))
[-_\.]?
(?P[0-9]+)?
)?
(?P # post release
(?:-(?P[0-9]+))
|
(?:
[-_\.]?
(?Ppost|rev|r)
[-_\.]?
(?P[0-9]+)?
)
)?
(?P # dev release
[-_\.]?
(?Pdev)
[-_\.]?
(?P[0-9]+)?
)?
)
(?:\+(?P[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version
"""
pattern = re.compile(
r"^\s*" + VERSION_PATTERN + r"\s*$",
re.VERBOSE | re.IGNORECASE,
)
try:
release = pattern.match(version).groupdict()["release"] # type: ignore
release_tuple = tuple(map(int, release.split(".")[:3])) # type: Tuple[int, ...]
except (TypeError, ValueError, AttributeError):
return None
return release_tuple
def _is_contextvars_broken():
# type: () -> bool
"""
Returns whether gevent/eventlet have patched the stdlib in a way where thread locals are now more "correct" than contextvars.
"""
try:
import gevent # type: ignore
from gevent.monkey import is_object_patched # type: ignore
# Get the MAJOR and MINOR version numbers of Gevent
version_tuple = tuple(
[int(part) for part in re.split(r"a|b|rc|\.", gevent.__version__)[:2]]
)
if is_object_patched("threading", "local"):
# Gevent 20.9.0 depends on Greenlet 0.4.17 which natively handles switching
# context vars when greenlets are switched, so, Gevent 20.9.0+ is all fine.
# Ref: https://github.com/gevent/gevent/blob/83c9e2ae5b0834b8f84233760aabe82c3ba065b4/src/gevent/monkey.py#L604-L609
# Gevent 20.5, that doesn't depend on Greenlet 0.4.17 with native support
# for contextvars, is able to patch both thread locals and contextvars, in
# that case, check if contextvars are effectively patched.
if (
# Gevent 20.9.0+
(sys.version_info >= (3, 7) and version_tuple >= (20, 9))
# Gevent 20.5.0+ or Python < 3.7
or (is_object_patched("contextvars", "ContextVar"))
):
return False
return True
except ImportError:
pass
try:
import greenlet # type: ignore
from eventlet.patcher import is_monkey_patched # type: ignore
greenlet_version = parse_version(greenlet.__version__)
if greenlet_version is None:
logger.error(
"Internal error in Sentry SDK: Could not parse Greenlet version from greenlet.__version__."
)
return False
if is_monkey_patched("thread") and greenlet_version < (0, 5):
return True
except ImportError:
pass
return False
def _make_threadlocal_contextvars(local):
# type: (type) -> type
class ContextVar(object):
# Super-limited impl of ContextVar
def __init__(self, name):
# type: (str) -> None
self._name = name
self._local = local()
def get(self, default):
# type: (Any) -> Any
return getattr(self._local, "value", default)
def set(self, value):
# type: (Any) -> None
self._local.value = value
return ContextVar
def _get_contextvars():
# type: () -> Tuple[bool, type]
"""
Figure out the "right" contextvars installation to use. Returns a
`contextvars.ContextVar`-like class with a limited API.
See https://docs.sentry.io/platforms/python/contextvars/ for more information.
"""
if not _is_contextvars_broken():
# aiocontextvars is a PyPI package that ensures that the contextvars
# backport (also a PyPI package) works with asyncio under Python 3.6
#
# Import it if available.
if sys.version_info < (3, 7):
# `aiocontextvars` is absolutely required for functional
# contextvars on Python 3.6.
try:
from aiocontextvars import ContextVar
return True, ContextVar
except ImportError:
pass
else:
# On Python 3.7 contextvars are functional.
try:
from contextvars import ContextVar
return True, ContextVar
except ImportError:
pass
# Fall back to basic thread-local usage.
from threading import local
return False, _make_threadlocal_contextvars(local)
HAS_REAL_CONTEXTVARS, ContextVar = _get_contextvars()
CONTEXTVARS_ERROR_MESSAGE = """
With asyncio/ASGI applications, the Sentry SDK requires a functional
installation of `contextvars` to avoid leaking scope/context data across
requests.
Please refer to https://docs.sentry.io/platforms/python/contextvars/ for more information.
"""
def qualname_from_function(func):
# type: (Callable[..., Any]) -> Optional[str]
"""Return the qualified name of func. Works with regular function, lambda, partial and partialmethod."""
func_qualname = None # type: Optional[str]
# Python 2
try:
return "%s.%s.%s" % (
func.im_class.__module__, # type: ignore
func.im_class.__name__, # type: ignore
func.__name__,
)
except Exception:
pass
prefix, suffix = "", ""
if (
_PARTIALMETHOD_AVAILABLE
and hasattr(func, "_partialmethod")
and isinstance(func._partialmethod, partialmethod)
):
prefix, suffix = "partialmethod()"
func = func._partialmethod.func
elif isinstance(func, partial) and hasattr(func.func, "__name__"):
prefix, suffix = "partial()"
func = func.func
if hasattr(func, "__qualname__"):
func_qualname = func.__qualname__
elif hasattr(func, "__name__"): # Python 2.7 has no __qualname__
func_qualname = func.__name__
# Python 3: methods, functions, classes
if func_qualname is not None:
if hasattr(func, "__module__"):
func_qualname = func.__module__ + "." + func_qualname
func_qualname = prefix + func_qualname + suffix
return func_qualname
def transaction_from_function(func):
# type: (Callable[..., Any]) -> Optional[str]
return qualname_from_function(func)
disable_capture_event = ContextVar("disable_capture_event")
class ServerlessTimeoutWarning(Exception): # noqa: N818
"""Raised when a serverless method is about to reach its timeout."""
pass
class TimeoutThread(threading.Thread):
"""Creates a Thread which runs (sleeps) for a time duration equal to
waiting_time and raises a custom ServerlessTimeout exception.
"""
def __init__(self, waiting_time, configured_timeout):
# type: (float, int) -> None
threading.Thread.__init__(self)
self.waiting_time = waiting_time
self.configured_timeout = configured_timeout
self._stop_event = threading.Event()
def stop(self):
# type: () -> None
self._stop_event.set()
def run(self):
# type: () -> None
self._stop_event.wait(self.waiting_time)
if self._stop_event.is_set():
return
integer_configured_timeout = int(self.configured_timeout)
# Setting up the exact integer value of configured time(in seconds)
if integer_configured_timeout < self.configured_timeout:
integer_configured_timeout = integer_configured_timeout + 1
# Raising Exception after timeout duration is reached
raise ServerlessTimeoutWarning(
"WARNING : Function is expected to get timed out. Configured timeout duration = {} seconds.".format(
integer_configured_timeout
)
)
def to_base64(original):
# type: (str) -> Optional[str]
"""
Convert a string to base64, via UTF-8. Returns None on invalid input.
"""
base64_string = None
try:
utf8_bytes = original.encode("UTF-8")
base64_bytes = base64.b64encode(utf8_bytes)
base64_string = base64_bytes.decode("UTF-8")
except Exception as err:
logger.warning("Unable to encode {orig} to base64:".format(orig=original), err)
return base64_string
def from_base64(base64_string):
# type: (str) -> Optional[str]
"""
Convert a string from base64, via UTF-8. Returns None on invalid input.
"""
utf8_string = None
try:
only_valid_chars = BASE64_ALPHABET.match(base64_string)
assert only_valid_chars
base64_bytes = base64_string.encode("UTF-8")
utf8_bytes = base64.b64decode(base64_bytes)
utf8_string = utf8_bytes.decode("UTF-8")
except Exception as err:
logger.warning(
"Unable to decode {b64} from base64:".format(b64=base64_string), err
)
return utf8_string
Components = namedtuple("Components", ["scheme", "netloc", "path", "query", "fragment"])
def sanitize_url(url, remove_authority=True, remove_query_values=True, split=False):
# type: (str, bool, bool, bool) -> Union[str, Components]
"""
Removes the authority and query parameter values from a given URL.
"""
parsed_url = urlsplit(url)
query_params = parse_qs(parsed_url.query, keep_blank_values=True)
# strip username:password (netloc can be usr:pwd@example.com)
if remove_authority:
netloc_parts = parsed_url.netloc.split("@")
if len(netloc_parts) > 1:
netloc = "%s:%s@%s" % (
SENSITIVE_DATA_SUBSTITUTE,
SENSITIVE_DATA_SUBSTITUTE,
netloc_parts[-1],
)
else:
netloc = parsed_url.netloc
else:
netloc = parsed_url.netloc
# strip values from query string
if remove_query_values:
query_string = unquote(
urlencode({key: SENSITIVE_DATA_SUBSTITUTE for key in query_params})
)
else:
query_string = parsed_url.query
components = Components(
scheme=parsed_url.scheme,
netloc=netloc,
query=query_string,
path=parsed_url.path,
fragment=parsed_url.fragment,
)
if split:
return components
else:
return urlunsplit(components)
ParsedUrl = namedtuple("ParsedUrl", ["url", "query", "fragment"])
def parse_url(url, sanitize=True):
# type: (str, bool) -> ParsedUrl
"""
Splits a URL into a url (including path), query and fragment. If sanitize is True, the query
parameters will be sanitized to remove sensitive data. The autority (username and password)
in the URL will always be removed.
"""
parsed_url = sanitize_url(
url, remove_authority=True, remove_query_values=sanitize, split=True
)
base_url = urlunsplit(
Components(
scheme=parsed_url.scheme, # type: ignore
netloc=parsed_url.netloc, # type: ignore
query="",
path=parsed_url.path, # type: ignore
fragment="",
)
)
return ParsedUrl(
url=base_url,
query=parsed_url.query, # type: ignore
fragment=parsed_url.fragment, # type: ignore
)
def is_valid_sample_rate(rate, source):
# type: (Any, str) -> bool
"""
Checks the given sample rate to make sure it is valid type and value (a
boolean or a number between 0 and 1, inclusive).
"""
# both booleans and NaN are instances of Real, so a) checking for Real
# checks for the possibility of a boolean also, and b) we have to check
# separately for NaN and Decimal does not derive from Real so need to check that too
if not isinstance(rate, (Real, Decimal)) or math.isnan(rate):
logger.warning(
"{source} Given sample rate is invalid. Sample rate must be a boolean or a number between 0 and 1. Got {rate} of type {type}.".format(
source=source, rate=rate, type=type(rate)
)
)
return False
# in case rate is a boolean, it will get cast to 1 if it's True and 0 if it's False
rate = float(rate)
if rate < 0 or rate > 1:
logger.warning(
"{source} Given sample rate is invalid. Sample rate must be between 0 and 1. Got {rate}.".format(
source=source, rate=rate
)
)
return False
return True
def match_regex_list(item, regex_list=None, substring_matching=False):
# type: (str, Optional[List[str]], bool) -> bool
if regex_list is None:
return False
for item_matcher in regex_list:
if not substring_matching and item_matcher[-1] != "$":
item_matcher += "$"
matched = re.search(item_matcher, item)
if matched:
return True
return False
def is_sentry_url(hub, url):
# type: (sentry_sdk.Hub, str) -> bool
"""
Determines whether the given URL matches the Sentry DSN.
"""
return (
hub.client is not None
and hub.client.transport is not None
and hub.client.transport.parsed_dsn is not None
and hub.client.transport.parsed_dsn.netloc in url
)
def _generate_installed_modules():
# type: () -> Iterator[Tuple[str, str]]
try:
from importlib import metadata
for dist in metadata.distributions():
name = dist.metadata["Name"]
# `metadata` values may be `None`, see:
# https://github.com/python/cpython/issues/91216
# and
# https://github.com/python/importlib_metadata/issues/371
if name is not None:
version = metadata.version(name)
if version is not None:
yield _normalize_module_name(name), version
except ImportError:
# < py3.8
try:
import pkg_resources
except ImportError:
return
for info in pkg_resources.working_set:
yield _normalize_module_name(info.key), info.version
def _normalize_module_name(name):
# type: (str) -> str
return name.lower()
def _get_installed_modules():
# type: () -> Dict[str, str]
global _installed_modules
if _installed_modules is None:
_installed_modules = dict(_generate_installed_modules())
return _installed_modules
def package_version(package):
# type: (str) -> Optional[Tuple[int, ...]]
installed_packages = _get_installed_modules()
version = installed_packages.get(package)
if version is None:
return None
return parse_version(version)
if PY37:
def nanosecond_time():
# type: () -> int
return time.perf_counter_ns()
elif PY33:
def nanosecond_time():
# type: () -> int
return int(time.perf_counter() * 1e9)
else:
def nanosecond_time():
# type: () -> int
return int(time.time() * 1e9)
if PY2:
def now():
# type: () -> float
return time.time()
else:
def now():
# type: () -> float
return time.perf_counter()
sentry-python-1.39.2/sentry_sdk/worker.py 0000664 0000000 0000000 00000010730 14547447232 0020536 0 ustar 00root root 0000000 0000000 import os
import threading
from time import sleep, time
from sentry_sdk._compat import check_thread_support
from sentry_sdk._queue import Queue, FullError
from sentry_sdk.utils import logger
from sentry_sdk.consts import DEFAULT_QUEUE_SIZE
from sentry_sdk._types import TYPE_CHECKING
if TYPE_CHECKING:
from typing import Any
from typing import Optional
from typing import Callable
_TERMINATOR = object()
class BackgroundWorker(object):
def __init__(self, queue_size=DEFAULT_QUEUE_SIZE):
# type: (int) -> None
check_thread_support()
self._queue = Queue(queue_size) # type: Queue
self._lock = threading.Lock()
self._thread = None # type: Optional[threading.Thread]
self._thread_for_pid = None # type: Optional[int]
@property
def is_alive(self):
# type: () -> bool
if self._thread_for_pid != os.getpid():
return False
if not self._thread:
return False
return self._thread.is_alive()
def _ensure_thread(self):
# type: () -> None
if not self.is_alive:
self.start()
def _timed_queue_join(self, timeout):
# type: (float) -> bool
deadline = time() + timeout
queue = self._queue
queue.all_tasks_done.acquire()
try:
while queue.unfinished_tasks:
delay = deadline - time()
if delay <= 0:
return False
queue.all_tasks_done.wait(timeout=delay)
return True
finally:
queue.all_tasks_done.release()
def start(self):
# type: () -> None
with self._lock:
if not self.is_alive:
self._thread = threading.Thread(
target=self._target, name="raven-sentry.BackgroundWorker"
)
self._thread.daemon = True
try:
self._thread.start()
self._thread_for_pid = os.getpid()
except RuntimeError:
# At this point we can no longer start because the interpreter
# is already shutting down. Sadly at this point we can no longer
# send out events.
self._thread = None
def kill(self):
# type: () -> None
"""
Kill worker thread. Returns immediately. Not useful for
waiting on shutdown for events, use `flush` for that.
"""
logger.debug("background worker got kill request")
with self._lock:
if self._thread:
try:
self._queue.put_nowait(_TERMINATOR)
except FullError:
logger.debug("background worker queue full, kill failed")
self._thread = None
self._thread_for_pid = None
def flush(self, timeout, callback=None):
# type: (float, Optional[Any]) -> None
logger.debug("background worker got flush request")
with self._lock:
if self.is_alive and timeout > 0.0:
self._wait_flush(timeout, callback)
logger.debug("background worker flushed")
def full(self):
# type: () -> bool
return self._queue.full()
def _wait_flush(self, timeout, callback):
# type: (float, Optional[Any]) -> None
initial_timeout = min(0.1, timeout)
if not self._timed_queue_join(initial_timeout):
pending = self._queue.qsize() + 1
logger.debug("%d event(s) pending on flush", pending)
if callback is not None:
callback(pending, timeout)
if not self._timed_queue_join(timeout - initial_timeout):
pending = self._queue.qsize() + 1
logger.error("flush timed out, dropped %s events", pending)
def submit(self, callback):
# type: (Callable[[], None]) -> bool
self._ensure_thread()
try:
self._queue.put_nowait(callback)
return True
except FullError:
return False
def _target(self):
# type: () -> None
while True:
callback = self._queue.get()
try:
if callback is _TERMINATOR:
break
try:
callback()
except Exception:
logger.error("Failed processing job", exc_info=True)
finally:
self._queue.task_done()
sleep(0)
sentry-python-1.39.2/setup.py 0000664 0000000 0000000 00000007661 14547447232 0016211 0 ustar 00root root 0000000 0000000 #!/usr/bin/env python
"""
Sentry-Python - Sentry SDK for Python
=====================================
**Sentry-Python is an SDK for Sentry.** Check out `GitHub
`_ to find out more.
"""
import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
def get_file_text(file_name):
with open(os.path.join(here, file_name)) as in_file:
return in_file.read()
setup(
name="sentry-sdk",
version="1.39.2",
author="Sentry Team and Contributors",
author_email="hello@sentry.io",
url="https://github.com/getsentry/sentry-python",
project_urls={
"Documentation": "https://docs.sentry.io/platforms/python/",
"Changelog": "https://github.com/getsentry/sentry-python/blob/master/CHANGELOG.md",
},
description="Python client for Sentry (https://sentry.io)",
long_description=get_file_text("README.md"),
long_description_content_type="text/markdown",
packages=find_packages(exclude=("tests", "tests.*")),
# PEP 561
package_data={"sentry_sdk": ["py.typed"]},
zip_safe=False,
license="MIT",
install_requires=[
'urllib3>=1.25.7; python_version<="3.4"',
'urllib3>=1.26.9; python_version=="3.5"',
'urllib3>=1.26.11; python_version>="3.6"',
"certifi",
],
extras_require={
"aiohttp": ["aiohttp>=3.5"],
"arq": ["arq>=0.23"],
"asyncpg": ["asyncpg>=0.23"],
"beam": ["apache-beam>=2.12"],
"bottle": ["bottle>=0.12.13"],
"celery": ["celery>=3"],
"chalice": ["chalice>=1.16.0"],
"clickhouse-driver": ["clickhouse-driver>=0.2.0"],
"django": ["django>=1.8"],
"falcon": ["falcon>=1.4"],
"fastapi": ["fastapi>=0.79.0"],
"flask": ["flask>=0.11", "blinker>=1.1", "markupsafe"],
"grpcio": ["grpcio>=1.21.1"],
"httpx": ["httpx>=0.16.0"],
"huey": ["huey>=2"],
"loguru": ["loguru>=0.5"],
"opentelemetry": ["opentelemetry-distro>=0.35b0"],
"opentelemetry-experimental": [
"opentelemetry-distro~=0.40b0",
"opentelemetry-instrumentation-aiohttp-client~=0.40b0",
"opentelemetry-instrumentation-django~=0.40b0",
"opentelemetry-instrumentation-fastapi~=0.40b0",
"opentelemetry-instrumentation-flask~=0.40b0",
"opentelemetry-instrumentation-requests~=0.40b0",
"opentelemetry-instrumentation-sqlite3~=0.40b0",
"opentelemetry-instrumentation-urllib~=0.40b0",
],
"pure_eval": ["pure_eval", "executing", "asttokens"],
"pymongo": ["pymongo>=3.1"],
"pyspark": ["pyspark>=2.4.4"],
"quart": ["quart>=0.16.1", "blinker>=1.1"],
"rq": ["rq>=0.6"],
"sanic": ["sanic>=0.8"],
"sqlalchemy": ["sqlalchemy>=1.2"],
"starlette": ["starlette>=0.19.1"],
"starlite": ["starlite>=1.48"],
"tornado": ["tornado>=5"],
},
classifiers=[
"Development Status :: 5 - Production/Stable",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Topic :: Software Development :: Libraries :: Python Modules",
],
options={"bdist_wheel": {"universal": "1"}},
)
sentry-python-1.39.2/test-requirements.txt 0000664 0000000 0000000 00000000740 14547447232 0020727 0 ustar 00root root 0000000 0000000 pip # always use newest pip
mock ; python_version<'3.3'
pytest
pytest-cov==2.8.1
pytest-forked<=1.4.0
pytest-localserver==0.5.1 # TODO(py3): 0.6.0 drops 2.7 support: https://github.com/pytest-dev/pytest-localserver/releases/tag/v0.6.0
pytest-watch==4.2.0
tox==3.7.0
jsonschema==3.2.0
pyrsistent==0.16.0 # TODO(py3): 0.17.0 requires python3, see https://github.com/tobgu/pyrsistent/issues/205
executing<2.0.0 # TODO(py3): 2.0.0 requires python3
asttokens
responses
pysocks
ipdb
sentry-python-1.39.2/tests/ 0000775 0000000 0000000 00000000000 14547447232 0015627 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/__init__.py 0000664 0000000 0000000 00000000636 14547447232 0017745 0 ustar 00root root 0000000 0000000 import sys
import pytest
# This is used in _capture_internal_warnings. We need to run this at import
# time because that's where many deprecation warnings might get thrown.
#
# This lives in tests/__init__.py because apparently even tests/conftest.py
# gets loaded too late.
assert "sentry_sdk" not in sys.modules
_warning_recorder_mgr = pytest.warns(None)
_warning_recorder = _warning_recorder_mgr.__enter__()
sentry-python-1.39.2/tests/conftest.py 0000664 0000000 0000000 00000046202 14547447232 0020032 0 ustar 00root root 0000000 0000000 import json
import os
import socket
from threading import Thread
from contextlib import contextmanager
import pytest
import jsonschema
try:
import gevent
except ImportError:
gevent = None
try:
import eventlet
except ImportError:
eventlet = None
try:
# Python 2
import BaseHTTPServer
HTTPServer = BaseHTTPServer.HTTPServer
BaseHTTPRequestHandler = BaseHTTPServer.BaseHTTPRequestHandler
except Exception:
# Python 3
from http.server import BaseHTTPRequestHandler, HTTPServer
try:
from unittest import mock
except ImportError:
import mock
import sentry_sdk
from sentry_sdk._compat import iteritems, reraise, string_types, PY2
from sentry_sdk.envelope import Envelope
from sentry_sdk.integrations import _processed_integrations # noqa: F401
from sentry_sdk.profiler import teardown_profiler
from sentry_sdk.transport import Transport
from sentry_sdk.utils import capture_internal_exceptions
from tests import _warning_recorder, _warning_recorder_mgr
from sentry_sdk._types import TYPE_CHECKING
if TYPE_CHECKING:
from typing import Optional
from collections.abc import Iterator
SENTRY_EVENT_SCHEMA = "./checkouts/data-schemas/relay/event.schema.json"
if not os.path.isfile(SENTRY_EVENT_SCHEMA):
SENTRY_EVENT_SCHEMA = None
else:
with open(SENTRY_EVENT_SCHEMA) as f:
SENTRY_EVENT_SCHEMA = json.load(f)
try:
import pytest_benchmark
except ImportError:
@pytest.fixture
def benchmark():
return lambda x: x()
else:
del pytest_benchmark
@pytest.fixture(autouse=True)
def internal_exceptions(request, monkeypatch):
errors = []
if "tests_internal_exceptions" in request.keywords:
return
def _capture_internal_exception(self, exc_info):
errors.append(exc_info)
@request.addfinalizer
def _():
# reraise the errors so that this just acts as a pass-through (that
# happens to keep track of the errors which pass through it)
for e in errors:
reraise(*e)
monkeypatch.setattr(
sentry_sdk.Hub, "_capture_internal_exception", _capture_internal_exception
)
return errors
@pytest.fixture(autouse=True, scope="session")
def _capture_internal_warnings():
yield
_warning_recorder_mgr.__exit__(None, None, None)
recorder = _warning_recorder
for warning in recorder:
try:
if isinstance(warning.message, ResourceWarning):
continue
except NameError:
pass
if "sentry_sdk" not in str(warning.filename) and "sentry-sdk" not in str(
warning.filename
):
continue
# pytest-django
if "getfuncargvalue" in str(warning.message):
continue
# Happens when re-initializing the SDK
if "but it was only enabled on init()" in str(warning.message):
continue
# sanic's usage of aiohttp for test client
if "verify_ssl is deprecated, use ssl=False instead" in str(warning.message):
continue
if "getargspec" in str(warning.message) and warning.filename.endswith(
("pyramid/config/util.py", "pyramid/config/views.py")
):
continue
if "isAlive() is deprecated" in str(
warning.message
) and warning.filename.endswith("celery/utils/timer2.py"):
continue
if "collections.abc" in str(warning.message) and warning.filename.endswith(
("celery/canvas.py", "werkzeug/datastructures.py", "tornado/httputil.py")
):
continue
# Django 1.7 emits a (seemingly) false-positive warning for our test
# app and suggests to use a middleware that does not exist in later
# Django versions.
if "SessionAuthenticationMiddleware" in str(warning.message):
continue
if "Something has already installed a non-asyncio" in str(warning.message):
continue
if "dns.hash" in str(warning.message) or "dns/namedict" in warning.filename:
continue
raise AssertionError(warning)
@pytest.fixture
def monkeypatch_test_transport(monkeypatch, validate_event_schema):
def check_event(event):
def check_string_keys(map):
for key, value in iteritems(map):
assert isinstance(key, string_types)
if isinstance(value, dict):
check_string_keys(value)
with capture_internal_exceptions():
check_string_keys(event)
validate_event_schema(event)
def check_envelope(envelope):
with capture_internal_exceptions():
# There used to be a check here for errors are not sent in envelopes.
# We changed the behaviour to send errors in envelopes when tracing is enabled.
# This is checked in test_client.py::test_sending_events_with_tracing
# and test_client.py::test_sending_events_with_no_tracing
pass
def inner(client):
monkeypatch.setattr(
client, "transport", TestTransport(check_event, check_envelope)
)
return inner
@pytest.fixture
def validate_event_schema(tmpdir):
def inner(event):
if SENTRY_EVENT_SCHEMA:
jsonschema.validate(instance=event, schema=SENTRY_EVENT_SCHEMA)
return inner
@pytest.fixture
def reset_integrations():
"""
Use with caution, sometimes we really need to start
with a clean slate to ensure monkeypatching works well,
but this also means some other stuff will be monkeypatched twice.
"""
global _processed_integrations
_processed_integrations.clear()
@pytest.fixture
def sentry_init(monkeypatch_test_transport, request):
def inner(*a, **kw):
hub = sentry_sdk.Hub.current
client = sentry_sdk.Client(*a, **kw)
hub.bind_client(client)
if "transport" not in kw:
monkeypatch_test_transport(sentry_sdk.Hub.current.client)
if request.node.get_closest_marker("forked"):
# Do not run isolation if the test is already running in
# ultimate isolation (seems to be required for celery tests that
# fork)
yield inner
else:
with sentry_sdk.Hub(None):
yield inner
class TestTransport(Transport):
def __init__(self, capture_event_callback, capture_envelope_callback):
Transport.__init__(self)
self.capture_event = capture_event_callback
self.capture_envelope = capture_envelope_callback
self._queue = None
@pytest.fixture
def capture_events(monkeypatch):
def inner():
events = []
test_client = sentry_sdk.Hub.current.client
old_capture_event = test_client.transport.capture_event
old_capture_envelope = test_client.transport.capture_envelope
def append_event(event):
events.append(event)
return old_capture_event(event)
def append_envelope(envelope):
for item in envelope:
if item.headers.get("type") in ("event", "transaction"):
test_client.transport.capture_event(item.payload.json)
return old_capture_envelope(envelope)
monkeypatch.setattr(test_client.transport, "capture_event", append_event)
monkeypatch.setattr(test_client.transport, "capture_envelope", append_envelope)
return events
return inner
@pytest.fixture
def capture_envelopes(monkeypatch):
def inner():
envelopes = []
test_client = sentry_sdk.Hub.current.client
old_capture_event = test_client.transport.capture_event
old_capture_envelope = test_client.transport.capture_envelope
def append_event(event):
envelope = Envelope()
envelope.add_event(event)
envelopes.append(envelope)
return old_capture_event(event)
def append_envelope(envelope):
envelopes.append(envelope)
return old_capture_envelope(envelope)
monkeypatch.setattr(test_client.transport, "capture_event", append_event)
monkeypatch.setattr(test_client.transport, "capture_envelope", append_envelope)
return envelopes
return inner
@pytest.fixture
def capture_client_reports(monkeypatch):
def inner():
reports = []
test_client = sentry_sdk.Hub.current.client
def record_lost_event(reason, data_category=None, item=None):
if data_category is None:
data_category = item.data_category
return reports.append((reason, data_category))
monkeypatch.setattr(
test_client.transport, "record_lost_event", record_lost_event
)
return reports
return inner
@pytest.fixture
def capture_events_forksafe(monkeypatch, capture_events, request):
def inner():
capture_events()
events_r, events_w = os.pipe()
events_r = os.fdopen(events_r, "rb", 0)
events_w = os.fdopen(events_w, "wb", 0)
test_client = sentry_sdk.Hub.current.client
old_capture_event = test_client.transport.capture_event
def append(event):
events_w.write(json.dumps(event).encode("utf-8"))
events_w.write(b"\n")
return old_capture_event(event)
def flush(timeout=None, callback=None):
events_w.write(b"flush\n")
monkeypatch.setattr(test_client.transport, "capture_event", append)
monkeypatch.setattr(test_client, "flush", flush)
return EventStreamReader(events_r, events_w)
return inner
class EventStreamReader(object):
def __init__(self, read_file, write_file):
self.read_file = read_file
self.write_file = write_file
def read_event(self):
return json.loads(self.read_file.readline().decode("utf-8"))
def read_flush(self):
assert self.read_file.readline() == b"flush\n"
# scope=session ensures that fixture is run earlier
@pytest.fixture(
scope="session",
params=[None, "eventlet", "gevent"],
ids=("threads", "eventlet", "greenlet"),
)
def maybe_monkeypatched_threading(request):
if request.param == "eventlet":
if eventlet is None:
pytest.skip("no eventlet installed")
try:
eventlet.monkey_patch()
except AttributeError as e:
if "'thread.RLock' object has no attribute" in str(e):
# https://bitbucket.org/pypy/pypy/issues/2962/gevent-cannot-patch-rlock-under-pypy-27-7
pytest.skip("https://github.com/eventlet/eventlet/issues/546")
else:
raise
elif request.param == "gevent":
if gevent is None:
pytest.skip("no gevent installed")
try:
gevent.monkey.patch_all()
except Exception as e:
if "_RLock__owner" in str(e):
pytest.skip("https://github.com/gevent/gevent/issues/1380")
else:
raise
else:
assert request.param is None
return request.param
@pytest.fixture
def render_span_tree():
def inner(event):
assert event["type"] == "transaction"
by_parent = {}
for span in event["spans"]:
by_parent.setdefault(span["parent_span_id"], []).append(span)
def render_span(span):
yield "- op={}: description={}".format(
json.dumps(span.get("op")), json.dumps(span.get("description"))
)
for subspan in by_parent.get(span["span_id"]) or ():
for line in render_span(subspan):
yield " {}".format(line)
root_span = event["contexts"]["trace"]
# Return a list instead of a multiline string because black will know better how to format that
return "\n".join(render_span(root_span))
return inner
@pytest.fixture(name="StringContaining")
def string_containing_matcher():
"""
An object which matches any string containing the substring passed to the
object at instantiation time.
Useful for assert_called_with, assert_any_call, etc.
Used like this:
>>> f = mock.Mock()
>>> f("dogs are great")
>>> f.assert_any_call("dogs") # will raise AssertionError
Traceback (most recent call last):
...
AssertionError: mock('dogs') call not found
>>> f.assert_any_call(StringContaining("dogs")) # no AssertionError
"""
class StringContaining(object):
def __init__(self, substring):
self.substring = substring
try:
# the `unicode` type only exists in python 2, so if this blows up,
# we must be in py3 and have the `bytes` type
self.valid_types = (str, unicode)
except NameError:
self.valid_types = (str, bytes)
def __eq__(self, test_string):
if not isinstance(test_string, self.valid_types):
return False
# this is safe even in py2 because as of 2.6, `bytes` exists in py2
# as an alias for `str`
if isinstance(test_string, bytes):
test_string = test_string.decode()
if len(self.substring) > len(test_string):
return False
return self.substring in test_string
def __ne__(self, test_string):
return not self.__eq__(test_string)
return StringContaining
def _safe_is_equal(x, y):
"""
Compares two values, preferring to use the first's __eq__ method if it
exists and is implemented.
Accounts for py2/py3 differences (like ints in py2 not having a __eq__
method), as well as the incomparability of certain types exposed by using
raw __eq__ () rather than ==.
"""
# Prefer using __eq__ directly to ensure that examples like
#
# maisey = Dog()
# maisey.name = "Maisey the Dog"
# maisey == ObjectDescribedBy(attrs={"name": StringContaining("Maisey")})
#
# evaluate to True (in other words, examples where the values in self.attrs
# might also have custom __eq__ methods; this makes sure those methods get
# used if possible)
try:
is_equal = x.__eq__(y)
except AttributeError:
is_equal = NotImplemented
# this can happen on its own, too (i.e. without an AttributeError being
# thrown), which is why this is separate from the except block above
if is_equal == NotImplemented:
# using == smoothes out weird variations exposed by raw __eq__
return x == y
return is_equal
@pytest.fixture(name="DictionaryContaining")
def dictionary_containing_matcher():
"""
An object which matches any dictionary containing all key-value pairs from
the dictionary passed to the object at instantiation time.
Useful for assert_called_with, assert_any_call, etc.
Used like this:
>>> f = mock.Mock()
>>> f({"dogs": "yes", "cats": "maybe"})
>>> f.assert_any_call({"dogs": "yes"}) # will raise AssertionError
Traceback (most recent call last):
...
AssertionError: mock({'dogs': 'yes'}) call not found
>>> f.assert_any_call(DictionaryContaining({"dogs": "yes"})) # no AssertionError
"""
class DictionaryContaining(object):
def __init__(self, subdict):
self.subdict = subdict
def __eq__(self, test_dict):
if not isinstance(test_dict, dict):
return False
if len(self.subdict) > len(test_dict):
return False
for key, value in self.subdict.items():
try:
test_value = test_dict[key]
except KeyError: # missing key
return False
if not _safe_is_equal(value, test_value):
return False
return True
def __ne__(self, test_dict):
return not self.__eq__(test_dict)
return DictionaryContaining
@pytest.fixture(name="ObjectDescribedBy")
def object_described_by_matcher():
"""
An object which matches any other object with the given properties.
Available properties currently are "type" (a type object) and "attrs" (a
dictionary).
Useful for assert_called_with, assert_any_call, etc.
Used like this:
>>> class Dog(object):
... pass
...
>>> maisey = Dog()
>>> maisey.name = "Maisey"
>>> maisey.age = 7
>>> f = mock.Mock()
>>> f(maisey)
>>> f.assert_any_call(ObjectDescribedBy(type=Dog)) # no AssertionError
>>> f.assert_any_call(ObjectDescribedBy(attrs={"name": "Maisey"})) # no AssertionError
"""
class ObjectDescribedBy(object):
def __init__(self, type=None, attrs=None):
self.type = type
self.attrs = attrs
def __eq__(self, test_obj):
if self.type:
if not isinstance(test_obj, self.type):
return False
if self.attrs:
for attr_name, attr_value in self.attrs.items():
try:
test_value = getattr(test_obj, attr_name)
except AttributeError: # missing attribute
return False
if not _safe_is_equal(attr_value, test_value):
return False
return True
def __ne__(self, test_obj):
return not self.__eq__(test_obj)
return ObjectDescribedBy
@pytest.fixture
def teardown_profiling():
yield
teardown_profiler()
class MockServerRequestHandler(BaseHTTPRequestHandler):
def do_GET(self): # noqa: N802
# Process an HTTP GET request and return a response with an HTTP 200 status.
self.send_response(200)
self.end_headers()
return
def get_free_port():
s = socket.socket(socket.AF_INET, type=socket.SOCK_STREAM)
s.bind(("localhost", 0))
_, port = s.getsockname()
s.close()
return port
def create_mock_http_server():
# Start a mock server to test outgoing http requests
mock_server_port = get_free_port()
mock_server = HTTPServer(("localhost", mock_server_port), MockServerRequestHandler)
mock_server_thread = Thread(target=mock_server.serve_forever)
mock_server_thread.setDaemon(True)
mock_server_thread.start()
return mock_server_port
def unpack_werkzeug_response(response):
# werkzeug < 2.1 returns a tuple as client response, newer versions return
# an object
try:
return response.get_data(), response.status, response.headers
except AttributeError:
content, status, headers = response
return b"".join(content), status, headers
def werkzeug_set_cookie(client, servername, key, value):
# client.set_cookie has a different signature in different werkzeug versions
try:
client.set_cookie(servername, key, value)
except TypeError:
client.set_cookie(key, value)
@contextmanager
def patch_start_tracing_child(fake_transaction_is_none=False):
# type: (bool) -> Iterator[Optional[mock.MagicMock]]
if not fake_transaction_is_none:
fake_transaction = mock.MagicMock()
fake_start_child = mock.MagicMock()
fake_transaction.start_child = fake_start_child
else:
fake_transaction = None
fake_start_child = None
version = "2" if PY2 else "3"
with mock.patch(
"sentry_sdk.tracing_utils_py%s.get_current_span" % version,
return_value=fake_transaction,
):
yield fake_start_child
sentry-python-1.39.2/tests/integrations/ 0000775 0000000 0000000 00000000000 14547447232 0020335 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/__init__.py 0000664 0000000 0000000 00000000000 14547447232 0022434 0 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/aiohttp/ 0000775 0000000 0000000 00000000000 14547447232 0022005 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/aiohttp/__init__.py 0000664 0000000 0000000 00000000056 14547447232 0024117 0 ustar 00root root 0000000 0000000 import pytest
pytest.importorskip("aiohttp")
sentry-python-1.39.2/tests/integrations/aiohttp/test_aiohttp.py 0000664 0000000 0000000 00000035440 14547447232 0025074 0 ustar 00root root 0000000 0000000 import asyncio
import json
from contextlib import suppress
import pytest
from aiohttp import web
from aiohttp.client import ServerDisconnectedError
from aiohttp.web_request import Request
from sentry_sdk import capture_message, start_transaction
from sentry_sdk.integrations.aiohttp import AioHttpIntegration
try:
from unittest import mock # python 3.3 and above
except ImportError:
import mock # python < 3.3
@pytest.mark.asyncio
async def test_basic(sentry_init, aiohttp_client, capture_events):
sentry_init(integrations=[AioHttpIntegration()])
async def hello(request):
1 / 0
app = web.Application()
app.router.add_get("/", hello)
events = capture_events()
client = await aiohttp_client(app)
resp = await client.get("/")
assert resp.status == 500
(event,) = events
assert (
event["transaction"]
== "tests.integrations.aiohttp.test_aiohttp.test_basic..hello"
)
(exception,) = event["exception"]["values"]
assert exception["type"] == "ZeroDivisionError"
request = event["request"]
host = request["headers"]["Host"]
assert request["env"] == {"REMOTE_ADDR": "127.0.0.1"}
assert request["method"] == "GET"
assert request["query_string"] == ""
assert request.get("data") is None
assert request["url"] == "http://{host}/".format(host=host)
assert request["headers"] == {
"Accept": "*/*",
"Accept-Encoding": "gzip, deflate",
"Host": host,
"User-Agent": request["headers"]["User-Agent"],
"baggage": mock.ANY,
"sentry-trace": mock.ANY,
}
@pytest.mark.asyncio
async def test_post_body_not_read(sentry_init, aiohttp_client, capture_events):
from sentry_sdk.integrations.aiohttp import BODY_NOT_READ_MESSAGE
sentry_init(integrations=[AioHttpIntegration()])
body = {"some": "value"}
async def hello(request):
1 / 0
app = web.Application()
app.router.add_post("/", hello)
events = capture_events()
client = await aiohttp_client(app)
resp = await client.post("/", json=body)
assert resp.status == 500
(event,) = events
(exception,) = event["exception"]["values"]
assert exception["type"] == "ZeroDivisionError"
request = event["request"]
assert request["env"] == {"REMOTE_ADDR": "127.0.0.1"}
assert request["method"] == "POST"
assert request["data"] == BODY_NOT_READ_MESSAGE
@pytest.mark.asyncio
async def test_post_body_read(sentry_init, aiohttp_client, capture_events):
sentry_init(integrations=[AioHttpIntegration()])
body = {"some": "value"}
async def hello(request):
await request.json()
1 / 0
app = web.Application()
app.router.add_post("/", hello)
events = capture_events()
client = await aiohttp_client(app)
resp = await client.post("/", json=body)
assert resp.status == 500
(event,) = events
(exception,) = event["exception"]["values"]
assert exception["type"] == "ZeroDivisionError"
request = event["request"]
assert request["env"] == {"REMOTE_ADDR": "127.0.0.1"}
assert request["method"] == "POST"
assert request["data"] == json.dumps(body)
@pytest.mark.asyncio
async def test_403_not_captured(sentry_init, aiohttp_client, capture_events):
sentry_init(integrations=[AioHttpIntegration()])
async def hello(request):
raise web.HTTPForbidden()
app = web.Application()
app.router.add_get("/", hello)
events = capture_events()
client = await aiohttp_client(app)
resp = await client.get("/")
assert resp.status == 403
assert not events
@pytest.mark.asyncio
async def test_cancelled_error_not_captured(
sentry_init, aiohttp_client, capture_events
):
sentry_init(integrations=[AioHttpIntegration()])
async def hello(request):
raise asyncio.CancelledError()
app = web.Application()
app.router.add_get("/", hello)
events = capture_events()
client = await aiohttp_client(app)
with suppress(ServerDisconnectedError):
# Intended `aiohttp` interaction: server will disconnect if it
# encounters `asyncio.CancelledError`
await client.get("/")
assert not events
@pytest.mark.asyncio
async def test_half_initialized(sentry_init, aiohttp_client, capture_events):
sentry_init(integrations=[AioHttpIntegration()])
sentry_init()
async def hello(request):
return web.Response(text="hello")
app = web.Application()
app.router.add_get("/", hello)
events = capture_events()
client = await aiohttp_client(app)
resp = await client.get("/")
assert resp.status == 200
assert events == []
@pytest.mark.asyncio
async def test_tracing(sentry_init, aiohttp_client, capture_events):
sentry_init(integrations=[AioHttpIntegration()], traces_sample_rate=1.0)
async def hello(request):
return web.Response(text="hello")
app = web.Application()
app.router.add_get("/", hello)
events = capture_events()
client = await aiohttp_client(app)
resp = await client.get("/")
assert resp.status == 200
(event,) = events
assert event["type"] == "transaction"
assert (
event["transaction"]
== "tests.integrations.aiohttp.test_aiohttp.test_tracing..hello"
)
@pytest.mark.asyncio
@pytest.mark.parametrize(
"url,transaction_style,expected_transaction,expected_source",
[
(
"/message",
"handler_name",
"tests.integrations.aiohttp.test_aiohttp.test_transaction_style..hello",
"component",
),
(
"/message",
"method_and_path_pattern",
"GET /{var}",
"route",
),
],
)
async def test_transaction_style(
sentry_init,
aiohttp_client,
capture_events,
url,
transaction_style,
expected_transaction,
expected_source,
):
sentry_init(
integrations=[AioHttpIntegration(transaction_style=transaction_style)],
traces_sample_rate=1.0,
)
async def hello(request):
return web.Response(text="hello")
app = web.Application()
app.router.add_get(r"/{var}", hello)
events = capture_events()
client = await aiohttp_client(app)
resp = await client.get(url)
assert resp.status == 200
(event,) = events
assert event["type"] == "transaction"
assert event["transaction"] == expected_transaction
assert event["transaction_info"] == {"source": expected_source}
@pytest.mark.asyncio
async def test_traces_sampler_gets_request_object_in_sampling_context(
sentry_init,
aiohttp_client,
DictionaryContaining, # noqa:N803
ObjectDescribedBy,
):
traces_sampler = mock.Mock()
sentry_init(
integrations=[AioHttpIntegration()],
traces_sampler=traces_sampler,
)
async def kangaroo_handler(request):
return web.Response(text="dogs are great")
app = web.Application()
app.router.add_get("/tricks/kangaroo", kangaroo_handler)
client = await aiohttp_client(app)
await client.get("/tricks/kangaroo")
traces_sampler.assert_any_call(
DictionaryContaining(
{
"aiohttp_request": ObjectDescribedBy(
type=Request, attrs={"method": "GET", "path": "/tricks/kangaroo"}
)
}
)
)
@pytest.mark.asyncio
async def test_has_trace_if_performance_enabled(
sentry_init, aiohttp_client, capture_events
):
sentry_init(integrations=[AioHttpIntegration()], traces_sample_rate=1.0)
async def hello(request):
capture_message("It's a good day to try dividing by 0")
1 / 0
app = web.Application()
app.router.add_get("/", hello)
events = capture_events()
client = await aiohttp_client(app)
resp = await client.get("/")
assert resp.status == 500
msg_event, error_event, transaction_event = events
assert msg_event["contexts"]["trace"]
assert "trace_id" in msg_event["contexts"]["trace"]
assert error_event["contexts"]["trace"]
assert "trace_id" in error_event["contexts"]["trace"]
assert transaction_event["contexts"]["trace"]
assert "trace_id" in transaction_event["contexts"]["trace"]
assert (
error_event["contexts"]["trace"]["trace_id"]
== transaction_event["contexts"]["trace"]["trace_id"]
== msg_event["contexts"]["trace"]["trace_id"]
)
@pytest.mark.asyncio
async def test_has_trace_if_performance_disabled(
sentry_init, aiohttp_client, capture_events
):
sentry_init(integrations=[AioHttpIntegration()])
async def hello(request):
capture_message("It's a good day to try dividing by 0")
1 / 0
app = web.Application()
app.router.add_get("/", hello)
events = capture_events()
client = await aiohttp_client(app)
resp = await client.get("/")
assert resp.status == 500
msg_event, error_event = events
assert msg_event["contexts"]["trace"]
assert "trace_id" in msg_event["contexts"]["trace"]
assert error_event["contexts"]["trace"]
assert "trace_id" in error_event["contexts"]["trace"]
assert (
error_event["contexts"]["trace"]["trace_id"]
== msg_event["contexts"]["trace"]["trace_id"]
)
@pytest.mark.asyncio
async def test_trace_from_headers_if_performance_enabled(
sentry_init, aiohttp_client, capture_events
):
sentry_init(integrations=[AioHttpIntegration()], traces_sample_rate=1.0)
async def hello(request):
capture_message("It's a good day to try dividing by 0")
1 / 0
app = web.Application()
app.router.add_get("/", hello)
events = capture_events()
# The aiohttp_client is instrumented so will generate the sentry-trace header and add request.
# Get the sentry-trace header from the request so we can later compare with transaction events.
client = await aiohttp_client(app)
resp = await client.get("/")
sentry_trace_header = resp.request_info.headers.get("sentry-trace")
trace_id = sentry_trace_header.split("-")[0]
assert resp.status == 500
msg_event, error_event, transaction_event = events
assert msg_event["contexts"]["trace"]
assert "trace_id" in msg_event["contexts"]["trace"]
assert error_event["contexts"]["trace"]
assert "trace_id" in error_event["contexts"]["trace"]
assert transaction_event["contexts"]["trace"]
assert "trace_id" in transaction_event["contexts"]["trace"]
assert msg_event["contexts"]["trace"]["trace_id"] == trace_id
assert error_event["contexts"]["trace"]["trace_id"] == trace_id
assert transaction_event["contexts"]["trace"]["trace_id"] == trace_id
@pytest.mark.asyncio
async def test_trace_from_headers_if_performance_disabled(
sentry_init, aiohttp_client, capture_events
):
sentry_init(integrations=[AioHttpIntegration()])
async def hello(request):
capture_message("It's a good day to try dividing by 0")
1 / 0
app = web.Application()
app.router.add_get("/", hello)
events = capture_events()
# The aiohttp_client is instrumented so will generate the sentry-trace header and add request.
# Get the sentry-trace header from the request so we can later compare with transaction events.
client = await aiohttp_client(app)
resp = await client.get("/")
sentry_trace_header = resp.request_info.headers.get("sentry-trace")
trace_id = sentry_trace_header.split("-")[0]
assert resp.status == 500
msg_event, error_event = events
assert msg_event["contexts"]["trace"]
assert "trace_id" in msg_event["contexts"]["trace"]
assert error_event["contexts"]["trace"]
assert "trace_id" in error_event["contexts"]["trace"]
assert msg_event["contexts"]["trace"]["trace_id"] == trace_id
assert error_event["contexts"]["trace"]["trace_id"] == trace_id
@pytest.mark.asyncio
async def test_crumb_capture(
sentry_init, aiohttp_raw_server, aiohttp_client, loop, capture_events
):
def before_breadcrumb(crumb, hint):
crumb["data"]["extra"] = "foo"
return crumb
sentry_init(
integrations=[AioHttpIntegration()], before_breadcrumb=before_breadcrumb
)
async def handler(request):
return web.Response(text="OK")
raw_server = await aiohttp_raw_server(handler)
with start_transaction():
events = capture_events()
client = await aiohttp_client(raw_server)
resp = await client.get("/")
assert resp.status == 200
capture_message("Testing!")
(event,) = events
crumb = event["breadcrumbs"]["values"][0]
assert crumb["type"] == "http"
assert crumb["category"] == "httplib"
assert crumb["data"] == {
"url": "http://127.0.0.1:{}/".format(raw_server.port),
"http.fragment": "",
"http.method": "GET",
"http.query": "",
"http.response.status_code": 200,
"reason": "OK",
"extra": "foo",
}
@pytest.mark.asyncio
async def test_outgoing_trace_headers(sentry_init, aiohttp_raw_server, aiohttp_client):
sentry_init(
integrations=[AioHttpIntegration()],
traces_sample_rate=1.0,
)
async def handler(request):
return web.Response(text="OK")
raw_server = await aiohttp_raw_server(handler)
with start_transaction(
name="/interactions/other-dogs/new-dog",
op="greeting.sniff",
# make trace_id difference between transactions
trace_id="0123456789012345678901234567890",
) as transaction:
client = await aiohttp_client(raw_server)
resp = await client.get("/")
request_span = transaction._span_recorder.spans[-1]
assert resp.request_info.headers[
"sentry-trace"
] == "{trace_id}-{parent_span_id}-{sampled}".format(
trace_id=transaction.trace_id,
parent_span_id=request_span.span_id,
sampled=1,
)
@pytest.mark.asyncio
async def test_outgoing_trace_headers_append_to_baggage(
sentry_init, aiohttp_raw_server, aiohttp_client
):
sentry_init(
integrations=[AioHttpIntegration()],
traces_sample_rate=1.0,
release="d08ebdb9309e1b004c6f52202de58a09c2268e42",
)
async def handler(request):
return web.Response(text="OK")
raw_server = await aiohttp_raw_server(handler)
with start_transaction(
name="/interactions/other-dogs/new-dog",
op="greeting.sniff",
trace_id="0123456789012345678901234567890",
):
client = await aiohttp_client(raw_server)
resp = await client.get("/", headers={"bagGage": "custom=value"})
assert (
resp.request_info.headers["baggage"]
== "custom=value,sentry-trace_id=0123456789012345678901234567890,sentry-environment=production,sentry-release=d08ebdb9309e1b004c6f52202de58a09c2268e42,sentry-transaction=/interactions/other-dogs/new-dog,sentry-sample_rate=1.0,sentry-sampled=true"
)
sentry-python-1.39.2/tests/integrations/argv/ 0000775 0000000 0000000 00000000000 14547447232 0021274 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/argv/test_argv.py 0000664 0000000 0000000 00000000644 14547447232 0023650 0 ustar 00root root 0000000 0000000 import sys
from sentry_sdk import capture_message
from sentry_sdk.integrations.argv import ArgvIntegration
def test_basic(sentry_init, capture_events, monkeypatch):
sentry_init(integrations=[ArgvIntegration()])
argv = ["foo", "bar", "baz"]
monkeypatch.setattr(sys, "argv", argv)
events = capture_events()
capture_message("hi")
(event,) = events
assert event["extra"]["sys.argv"] == argv
sentry-python-1.39.2/tests/integrations/ariadne/ 0000775 0000000 0000000 00000000000 14547447232 0021740 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/ariadne/__init__.py 0000664 0000000 0000000 00000000152 14547447232 0024047 0 ustar 00root root 0000000 0000000 import pytest
pytest.importorskip("ariadne")
pytest.importorskip("fastapi")
pytest.importorskip("flask")
sentry-python-1.39.2/tests/integrations/ariadne/test_ariadne.py 0000664 0000000 0000000 00000016504 14547447232 0024762 0 ustar 00root root 0000000 0000000 from ariadne import gql, graphql_sync, ObjectType, QueryType, make_executable_schema
from ariadne.asgi import GraphQL
from fastapi import FastAPI
from fastapi.testclient import TestClient
from flask import Flask, request, jsonify
from sentry_sdk.integrations.ariadne import AriadneIntegration
from sentry_sdk.integrations.fastapi import FastApiIntegration
from sentry_sdk.integrations.flask import FlaskIntegration
from sentry_sdk.integrations.starlette import StarletteIntegration
def schema_factory():
type_defs = gql(
"""
type Query {
greeting(name: String): Greeting
error: String
}
type Greeting {
name: String
}
"""
)
query = QueryType()
greeting = ObjectType("Greeting")
@query.field("greeting")
def resolve_greeting(*_, **kwargs):
name = kwargs.pop("name")
return {"name": name}
@query.field("error")
def resolve_error(obj, *_):
raise RuntimeError("resolver failed")
@greeting.field("name")
def resolve_name(obj, *_):
return "Hello, {}!".format(obj["name"])
return make_executable_schema(type_defs, query)
def test_capture_request_and_response_if_send_pii_is_on_async(
sentry_init, capture_events
):
sentry_init(
send_default_pii=True,
integrations=[
AriadneIntegration(),
FastApiIntegration(),
StarletteIntegration(),
],
)
events = capture_events()
schema = schema_factory()
async_app = FastAPI()
async_app.mount("/graphql/", GraphQL(schema))
query = {"query": "query ErrorQuery {error}"}
client = TestClient(async_app)
client.post("/graphql", json=query)
assert len(events) == 1
(event,) = events
assert event["exception"]["values"][0]["mechanism"]["type"] == "ariadne"
assert event["contexts"]["response"] == {
"data": {
"data": {"error": None},
"errors": [
{
"locations": [{"column": 19, "line": 1}],
"message": "resolver failed",
"path": ["error"],
}
],
}
}
assert event["request"]["api_target"] == "graphql"
assert event["request"]["data"] == query
def test_capture_request_and_response_if_send_pii_is_on_sync(
sentry_init, capture_events
):
sentry_init(
send_default_pii=True,
integrations=[AriadneIntegration(), FlaskIntegration()],
)
events = capture_events()
schema = schema_factory()
sync_app = Flask(__name__)
@sync_app.route("/graphql", methods=["POST"])
def graphql_server():
data = request.get_json()
success, result = graphql_sync(schema, data)
return jsonify(result), 200
query = {"query": "query ErrorQuery {error}"}
client = sync_app.test_client()
client.post("/graphql", json=query)
assert len(events) == 1
(event,) = events
assert event["exception"]["values"][0]["mechanism"]["type"] == "ariadne"
assert event["contexts"]["response"] == {
"data": {
"data": {"error": None},
"errors": [
{
"locations": [{"column": 19, "line": 1}],
"message": "resolver failed",
"path": ["error"],
}
],
}
}
assert event["request"]["api_target"] == "graphql"
assert event["request"]["data"] == query
def test_do_not_capture_request_and_response_if_send_pii_is_off_async(
sentry_init, capture_events
):
sentry_init(
integrations=[
AriadneIntegration(),
FastApiIntegration(),
StarletteIntegration(),
],
)
events = capture_events()
schema = schema_factory()
async_app = FastAPI()
async_app.mount("/graphql/", GraphQL(schema))
query = {"query": "query ErrorQuery {error}"}
client = TestClient(async_app)
client.post("/graphql", json=query)
assert len(events) == 1
(event,) = events
assert event["exception"]["values"][0]["mechanism"]["type"] == "ariadne"
assert "data" not in event["request"]
assert "response" not in event["contexts"]
def test_do_not_capture_request_and_response_if_send_pii_is_off_sync(
sentry_init, capture_events
):
sentry_init(
integrations=[AriadneIntegration(), FlaskIntegration()],
)
events = capture_events()
schema = schema_factory()
sync_app = Flask(__name__)
@sync_app.route("/graphql", methods=["POST"])
def graphql_server():
data = request.get_json()
success, result = graphql_sync(schema, data)
return jsonify(result), 200
query = {"query": "query ErrorQuery {error}"}
client = sync_app.test_client()
client.post("/graphql", json=query)
assert len(events) == 1
(event,) = events
assert event["exception"]["values"][0]["mechanism"]["type"] == "ariadne"
assert "data" not in event["request"]
assert "response" not in event["contexts"]
def test_capture_validation_error(sentry_init, capture_events):
sentry_init(
send_default_pii=True,
integrations=[
AriadneIntegration(),
FastApiIntegration(),
StarletteIntegration(),
],
)
events = capture_events()
schema = schema_factory()
async_app = FastAPI()
async_app.mount("/graphql/", GraphQL(schema))
query = {"query": "query ErrorQuery {doesnt_exist}"}
client = TestClient(async_app)
client.post("/graphql", json=query)
assert len(events) == 1
(event,) = events
assert event["exception"]["values"][0]["mechanism"]["type"] == "ariadne"
assert event["contexts"]["response"] == {
"data": {
"errors": [
{
"locations": [{"column": 19, "line": 1}],
"message": "Cannot query field 'doesnt_exist' on type 'Query'.",
}
]
}
}
assert event["request"]["api_target"] == "graphql"
assert event["request"]["data"] == query
def test_no_event_if_no_errors_async(sentry_init, capture_events):
sentry_init(
integrations=[
AriadneIntegration(),
FastApiIntegration(),
StarletteIntegration(),
],
)
events = capture_events()
schema = schema_factory()
async_app = FastAPI()
async_app.mount("/graphql/", GraphQL(schema))
query = {
"query": "query GreetingQuery($name: String) { greeting(name: $name) {name} }",
"variables": {"name": "some name"},
}
client = TestClient(async_app)
client.post("/graphql", json=query)
assert len(events) == 0
def test_no_event_if_no_errors_sync(sentry_init, capture_events):
sentry_init(
integrations=[AriadneIntegration(), FlaskIntegration()],
)
events = capture_events()
schema = schema_factory()
sync_app = Flask(__name__)
@sync_app.route("/graphql", methods=["POST"])
def graphql_server():
data = request.get_json()
success, result = graphql_sync(schema, data)
return jsonify(result), 200
query = {
"query": "query GreetingQuery($name: String) { greeting(name: $name) {name} }",
"variables": {"name": "some name"},
}
client = sync_app.test_client()
client.post("/graphql", json=query)
assert len(events) == 0
sentry-python-1.39.2/tests/integrations/arq/ 0000775 0000000 0000000 00000000000 14547447232 0021120 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/arq/__init__.py 0000664 0000000 0000000 00000000052 14547447232 0023226 0 ustar 00root root 0000000 0000000 import pytest
pytest.importorskip("arq")
sentry-python-1.39.2/tests/integrations/arq/test_arq.py 0000664 0000000 0000000 00000016421 14547447232 0023320 0 ustar 00root root 0000000 0000000 import asyncio
import pytest
from sentry_sdk import start_transaction, Hub
from sentry_sdk.integrations.arq import ArqIntegration
import arq.worker
from arq import cron
from arq.connections import ArqRedis
from arq.jobs import Job
from arq.utils import timestamp_ms
from fakeredis.aioredis import FakeRedis
def async_partial(async_fn, *args, **kwargs):
# asyncio.iscoroutinefunction (Used in the integration code) in Python < 3.8
# does not detect async functions in functools.partial objects.
# This partial implementation returns a coroutine instead.
async def wrapped(ctx):
return await async_fn(ctx, *args, **kwargs)
return wrapped
@pytest.fixture(autouse=True)
def patch_fakeredis_info_command():
from fakeredis._fakesocket import FakeSocket
if not hasattr(FakeSocket, "info"):
from fakeredis._commands import command
from fakeredis._helpers import SimpleString
@command((SimpleString,), name="info")
def info(self, section):
return section
FakeSocket.info = info
@pytest.fixture
def init_arq(sentry_init):
def inner(
cls_functions=None,
cls_cron_jobs=None,
kw_functions=None,
kw_cron_jobs=None,
allow_abort_jobs_=False,
):
cls_functions = cls_functions or []
cls_cron_jobs = cls_cron_jobs or []
kwargs = {}
if kw_functions is not None:
kwargs["functions"] = kw_functions
if kw_cron_jobs is not None:
kwargs["cron_jobs"] = kw_cron_jobs
sentry_init(
integrations=[ArqIntegration()],
traces_sample_rate=1.0,
send_default_pii=True,
debug=True,
)
server = FakeRedis()
pool = ArqRedis(pool_or_conn=server.connection_pool)
class WorkerSettings:
functions = cls_functions
cron_jobs = cls_cron_jobs
redis_pool = pool
allow_abort_jobs = allow_abort_jobs_
if not WorkerSettings.functions:
del WorkerSettings.functions
if not WorkerSettings.cron_jobs:
del WorkerSettings.cron_jobs
worker = arq.worker.create_worker(WorkerSettings, **kwargs)
return pool, worker
return inner
@pytest.mark.asyncio
async def test_job_result(init_arq):
async def increase(ctx, num):
return num + 1
increase.__qualname__ = increase.__name__
pool, worker = init_arq([increase])
job = await pool.enqueue_job("increase", 3)
assert isinstance(job, Job)
await worker.run_job(job.job_id, timestamp_ms())
result = await job.result()
job_result = await job.result_info()
assert result == 4
assert job_result.result == 4
@pytest.mark.asyncio
async def test_job_retry(capture_events, init_arq):
async def retry_job(ctx):
if ctx["job_try"] < 2:
raise arq.worker.Retry
retry_job.__qualname__ = retry_job.__name__
pool, worker = init_arq([retry_job])
job = await pool.enqueue_job("retry_job")
events = capture_events()
await worker.run_job(job.job_id, timestamp_ms())
event = events.pop(0)
assert event["contexts"]["trace"]["status"] == "aborted"
assert event["transaction"] == "retry_job"
assert event["tags"]["arq_task_id"] == job.job_id
assert event["extra"]["arq-job"]["retry"] == 1
await worker.run_job(job.job_id, timestamp_ms())
event = events.pop(0)
assert event["contexts"]["trace"]["status"] == "ok"
assert event["transaction"] == "retry_job"
assert event["tags"]["arq_task_id"] == job.job_id
assert event["extra"]["arq-job"]["retry"] == 2
@pytest.mark.parametrize(
"source", [("cls_functions", "cls_cron_jobs"), ("kw_functions", "kw_cron_jobs")]
)
@pytest.mark.parametrize("job_fails", [True, False], ids=["error", "success"])
@pytest.mark.asyncio
async def test_job_transaction(capture_events, init_arq, source, job_fails):
async def division(_, a, b=0):
return a / b
division.__qualname__ = division.__name__
cron_func = async_partial(division, a=1, b=int(not job_fails))
cron_func.__qualname__ = division.__name__
cron_job = cron(cron_func, minute=0, run_at_startup=True)
functions_key, cron_jobs_key = source
pool, worker = init_arq(**{functions_key: [division], cron_jobs_key: [cron_job]})
events = capture_events()
job = await pool.enqueue_job("division", 1, b=int(not job_fails))
await worker.run_job(job.job_id, timestamp_ms())
loop = asyncio.get_event_loop()
task = loop.create_task(worker.async_run())
await asyncio.sleep(1)
task.cancel()
await worker.close()
if job_fails:
error_func_event = events.pop(0)
error_cron_event = events.pop(1)
assert error_func_event["exception"]["values"][0]["type"] == "ZeroDivisionError"
assert error_func_event["exception"]["values"][0]["mechanism"]["type"] == "arq"
func_extra = error_func_event["extra"]["arq-job"]
assert func_extra["task"] == "division"
assert error_cron_event["exception"]["values"][0]["type"] == "ZeroDivisionError"
assert error_cron_event["exception"]["values"][0]["mechanism"]["type"] == "arq"
cron_extra = error_cron_event["extra"]["arq-job"]
assert cron_extra["task"] == "cron:division"
[func_event, cron_event] = events
assert func_event["type"] == "transaction"
assert func_event["transaction"] == "division"
assert func_event["transaction_info"] == {"source": "task"}
assert "arq_task_id" in func_event["tags"]
assert "arq_task_retry" in func_event["tags"]
func_extra = func_event["extra"]["arq-job"]
assert func_extra["task"] == "division"
assert func_extra["kwargs"] == {"b": int(not job_fails)}
assert func_extra["retry"] == 1
assert cron_event["type"] == "transaction"
assert cron_event["transaction"] == "cron:division"
assert cron_event["transaction_info"] == {"source": "task"}
assert "arq_task_id" in cron_event["tags"]
assert "arq_task_retry" in cron_event["tags"]
cron_extra = cron_event["extra"]["arq-job"]
assert cron_extra["task"] == "cron:division"
assert cron_extra["kwargs"] == {}
assert cron_extra["retry"] == 1
@pytest.mark.parametrize("source", ["cls_functions", "kw_functions"])
@pytest.mark.asyncio
async def test_enqueue_job(capture_events, init_arq, source):
async def dummy_job(_):
pass
pool, _ = init_arq(**{source: [dummy_job]})
events = capture_events()
with start_transaction() as transaction:
await pool.enqueue_job("dummy_job")
(event,) = events
assert event["contexts"]["trace"]["trace_id"] == transaction.trace_id
assert event["contexts"]["trace"]["span_id"] == transaction.span_id
assert len(event["spans"])
assert event["spans"][0]["op"] == "queue.submit.arq"
assert event["spans"][0]["description"] == "dummy_job"
@pytest.mark.asyncio
async def test_execute_job_without_integration(init_arq):
async def dummy_job(_ctx):
pass
dummy_job.__qualname__ = dummy_job.__name__
pool, worker = init_arq([dummy_job])
# remove the integration to trigger the edge case
Hub.current.client.integrations.pop("arq")
job = await pool.enqueue_job("dummy_job")
await worker.run_job(job.job_id, timestamp_ms())
assert await job.result() is None
sentry-python-1.39.2/tests/integrations/asgi/ 0000775 0000000 0000000 00000000000 14547447232 0021260 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/asgi/__init__.py 0000664 0000000 0000000 00000000201 14547447232 0023362 0 ustar 00root root 0000000 0000000 import pytest
pytest.importorskip("asyncio")
pytest.importorskip("pytest_asyncio")
pytest.importorskip("async_asgi_testclient")
sentry-python-1.39.2/tests/integrations/asgi/test_asgi.py 0000664 0000000 0000000 00000045505 14547447232 0023625 0 ustar 00root root 0000000 0000000 import sys
from collections import Counter
import pytest
import sentry_sdk
from sentry_sdk import capture_message
from sentry_sdk.integrations._asgi_common import _get_ip, _get_headers
from sentry_sdk.integrations.asgi import SentryAsgiMiddleware, _looks_like_asgi3
from async_asgi_testclient import TestClient
minimum_python_36 = pytest.mark.skipif(
sys.version_info < (3, 6), reason="ASGI is only supported in Python >= 3.6"
)
@pytest.fixture
def asgi3_app():
async def app(scope, receive, send):
if scope["type"] == "lifespan":
while True:
message = await receive()
if message["type"] == "lifespan.startup":
await send({"type": "lifespan.startup.complete"})
elif message["type"] == "lifespan.shutdown":
await send({"type": "lifespan.shutdown.complete"})
return
elif (
scope["type"] == "http"
and "route" in scope
and scope["route"] == "/trigger/error"
):
1 / 0
await send(
{
"type": "http.response.start",
"status": 200,
"headers": [
[b"content-type", b"text/plain"],
],
}
)
await send(
{
"type": "http.response.body",
"body": b"Hello, world!",
}
)
return app
@pytest.fixture
def asgi3_app_with_error():
async def send_with_error(event):
1 / 0
async def app(scope, receive, send):
if scope["type"] == "lifespan":
while True:
message = await receive()
if message["type"] == "lifespan.startup":
... # Do some startup here!
await send({"type": "lifespan.startup.complete"})
elif message["type"] == "lifespan.shutdown":
... # Do some shutdown here!
await send({"type": "lifespan.shutdown.complete"})
return
else:
await send_with_error(
{
"type": "http.response.start",
"status": 200,
"headers": [
[b"content-type", b"text/plain"],
],
}
)
await send_with_error(
{
"type": "http.response.body",
"body": b"Hello, world!",
}
)
return app
@pytest.fixture
def asgi3_app_with_error_and_msg():
async def app(scope, receive, send):
await send(
{
"type": "http.response.start",
"status": 200,
"headers": [
[b"content-type", b"text/plain"],
],
}
)
capture_message("Let's try dividing by 0")
1 / 0
await send(
{
"type": "http.response.body",
"body": b"Hello, world!",
}
)
return app
@pytest.fixture
def asgi3_ws_app():
def message():
capture_message("Some message to the world!")
raise ValueError("Oh no")
async def app(scope, receive, send):
await send(
{
"type": "websocket.send",
"text": message(),
}
)
return app
@minimum_python_36
def test_invalid_transaction_style(asgi3_app):
with pytest.raises(ValueError) as exp:
SentryAsgiMiddleware(asgi3_app, transaction_style="URL")
assert (
str(exp.value)
== "Invalid value for transaction_style: URL (must be in ('endpoint', 'url'))"
)
@minimum_python_36
@pytest.mark.asyncio
async def test_capture_transaction(
sentry_init,
asgi3_app,
capture_events,
):
sentry_init(send_default_pii=True, traces_sample_rate=1.0)
app = SentryAsgiMiddleware(asgi3_app)
async with TestClient(app) as client:
events = capture_events()
await client.get("/some_url?somevalue=123")
(transaction_event,) = events
assert transaction_event["type"] == "transaction"
assert transaction_event["transaction"] == "/some_url"
assert transaction_event["transaction_info"] == {"source": "url"}
assert transaction_event["contexts"]["trace"]["op"] == "http.server"
assert transaction_event["request"] == {
"headers": {
"host": "localhost",
"remote-addr": "127.0.0.1",
"user-agent": "ASGI-Test-Client",
},
"method": "GET",
"query_string": "somevalue=123",
"url": "http://localhost/some_url",
}
@minimum_python_36
@pytest.mark.asyncio
async def test_capture_transaction_with_error(
sentry_init,
asgi3_app_with_error,
capture_events,
DictionaryContaining, # noqa: N803
):
sentry_init(send_default_pii=True, traces_sample_rate=1.0)
app = SentryAsgiMiddleware(asgi3_app_with_error)
events = capture_events()
with pytest.raises(ZeroDivisionError):
async with TestClient(app) as client:
await client.get("/some_url")
(
error_event,
transaction_event,
) = events
assert error_event["transaction"] == "/some_url"
assert error_event["transaction_info"] == {"source": "url"}
assert error_event["contexts"]["trace"]["op"] == "http.server"
assert error_event["exception"]["values"][0]["type"] == "ZeroDivisionError"
assert error_event["exception"]["values"][0]["value"] == "division by zero"
assert error_event["exception"]["values"][0]["mechanism"]["handled"] is False
assert error_event["exception"]["values"][0]["mechanism"]["type"] == "asgi"
assert transaction_event["type"] == "transaction"
assert transaction_event["contexts"]["trace"] == DictionaryContaining(
error_event["contexts"]["trace"]
)
assert transaction_event["contexts"]["trace"]["status"] == "internal_error"
assert transaction_event["transaction"] == error_event["transaction"]
assert transaction_event["request"] == error_event["request"]
@minimum_python_36
@pytest.mark.asyncio
async def test_has_trace_if_performance_enabled(
sentry_init,
asgi3_app_with_error_and_msg,
capture_events,
):
sentry_init(traces_sample_rate=1.0)
app = SentryAsgiMiddleware(asgi3_app_with_error_and_msg)
with pytest.raises(ZeroDivisionError):
async with TestClient(app) as client:
events = capture_events()
await client.get("/")
msg_event, error_event, transaction_event = events
assert msg_event["contexts"]["trace"]
assert "trace_id" in msg_event["contexts"]["trace"]
assert error_event["contexts"]["trace"]
assert "trace_id" in error_event["contexts"]["trace"]
assert transaction_event["contexts"]["trace"]
assert "trace_id" in transaction_event["contexts"]["trace"]
assert (
error_event["contexts"]["trace"]["trace_id"]
== transaction_event["contexts"]["trace"]["trace_id"]
== msg_event["contexts"]["trace"]["trace_id"]
)
@minimum_python_36
@pytest.mark.asyncio
async def test_has_trace_if_performance_disabled(
sentry_init,
asgi3_app_with_error_and_msg,
capture_events,
):
sentry_init()
app = SentryAsgiMiddleware(asgi3_app_with_error_and_msg)
with pytest.raises(ZeroDivisionError):
async with TestClient(app) as client:
events = capture_events()
await client.get("/")
msg_event, error_event = events
assert msg_event["contexts"]["trace"]
assert "trace_id" in msg_event["contexts"]["trace"]
assert error_event["contexts"]["trace"]
assert "trace_id" in error_event["contexts"]["trace"]
@minimum_python_36
@pytest.mark.asyncio
async def test_trace_from_headers_if_performance_enabled(
sentry_init,
asgi3_app_with_error_and_msg,
capture_events,
):
sentry_init(traces_sample_rate=1.0)
app = SentryAsgiMiddleware(asgi3_app_with_error_and_msg)
trace_id = "582b43a4192642f0b136d5159a501701"
sentry_trace_header = "{}-{}-{}".format(trace_id, "6e8f22c393e68f19", 1)
with pytest.raises(ZeroDivisionError):
async with TestClient(app) as client:
events = capture_events()
await client.get("/", headers={"sentry-trace": sentry_trace_header})
msg_event, error_event, transaction_event = events
assert msg_event["contexts"]["trace"]
assert "trace_id" in msg_event["contexts"]["trace"]
assert error_event["contexts"]["trace"]
assert "trace_id" in error_event["contexts"]["trace"]
assert transaction_event["contexts"]["trace"]
assert "trace_id" in transaction_event["contexts"]["trace"]
assert msg_event["contexts"]["trace"]["trace_id"] == trace_id
assert error_event["contexts"]["trace"]["trace_id"] == trace_id
assert transaction_event["contexts"]["trace"]["trace_id"] == trace_id
@minimum_python_36
@pytest.mark.asyncio
async def test_trace_from_headers_if_performance_disabled(
sentry_init,
asgi3_app_with_error_and_msg,
capture_events,
):
sentry_init()
app = SentryAsgiMiddleware(asgi3_app_with_error_and_msg)
trace_id = "582b43a4192642f0b136d5159a501701"
sentry_trace_header = "{}-{}-{}".format(trace_id, "6e8f22c393e68f19", 1)
with pytest.raises(ZeroDivisionError):
async with TestClient(app) as client:
events = capture_events()
await client.get("/", headers={"sentry-trace": sentry_trace_header})
msg_event, error_event = events
assert msg_event["contexts"]["trace"]
assert "trace_id" in msg_event["contexts"]["trace"]
assert msg_event["contexts"]["trace"]["trace_id"] == trace_id
assert error_event["contexts"]["trace"]
assert "trace_id" in error_event["contexts"]["trace"]
assert error_event["contexts"]["trace"]["trace_id"] == trace_id
@minimum_python_36
@pytest.mark.asyncio
async def test_websocket(sentry_init, asgi3_ws_app, capture_events, request):
sentry_init(debug=True, send_default_pii=True)
events = capture_events()
asgi3_ws_app = SentryAsgiMiddleware(asgi3_ws_app)
scope = {
"type": "websocket",
"endpoint": asgi3_app,
"client": ("127.0.0.1", 60457),
"route": "some_url",
"headers": [
("accept", "*/*"),
],
}
with pytest.raises(ValueError):
async with TestClient(asgi3_ws_app, scope=scope) as client:
async with client.websocket_connect("/ws") as ws:
await ws.receive_text()
msg_event, error_event = events
assert msg_event["message"] == "Some message to the world!"
(exc,) = error_event["exception"]["values"]
assert exc["type"] == "ValueError"
assert exc["value"] == "Oh no"
@minimum_python_36
@pytest.mark.asyncio
async def test_auto_session_tracking_with_aggregates(
sentry_init, asgi3_app, capture_envelopes
):
sentry_init(send_default_pii=True, traces_sample_rate=1.0)
app = SentryAsgiMiddleware(asgi3_app)
scope = {
"endpoint": asgi3_app,
"client": ("127.0.0.1", 60457),
}
with pytest.raises(ZeroDivisionError):
envelopes = capture_envelopes()
async with TestClient(app, scope=scope) as client:
scope["route"] = "/some/fine/url"
await client.get("/some/fine/url")
scope["route"] = "/some/fine/url"
await client.get("/some/fine/url")
scope["route"] = "/trigger/error"
await client.get("/trigger/error")
sentry_sdk.flush()
count_item_types = Counter()
for envelope in envelopes:
count_item_types[envelope.items[0].type] += 1
assert count_item_types["transaction"] == 3
assert count_item_types["event"] == 1
assert count_item_types["sessions"] == 1
assert len(envelopes) == 5
session_aggregates = envelopes[-1].items[0].payload.json["aggregates"]
assert session_aggregates[0]["exited"] == 2
assert session_aggregates[0]["crashed"] == 1
assert len(session_aggregates) == 1
@minimum_python_36
@pytest.mark.parametrize(
"url,transaction_style,expected_transaction,expected_source",
[
(
"/message",
"url",
"generic ASGI request",
"route",
),
(
"/message",
"endpoint",
"tests.integrations.asgi.test_asgi.asgi3_app..app",
"component",
),
],
)
@pytest.mark.asyncio
async def test_transaction_style(
sentry_init,
asgi3_app,
capture_events,
url,
transaction_style,
expected_transaction,
expected_source,
):
sentry_init(send_default_pii=True, traces_sample_rate=1.0)
app = SentryAsgiMiddleware(asgi3_app, transaction_style=transaction_style)
scope = {
"endpoint": asgi3_app,
"route": url,
"client": ("127.0.0.1", 60457),
}
async with TestClient(app, scope=scope) as client:
events = capture_events()
await client.get(url)
(transaction_event,) = events
assert transaction_event["transaction"] == expected_transaction
assert transaction_event["transaction_info"] == {"source": expected_source}
def mock_asgi2_app():
pass
class MockAsgi2App:
def __call__():
pass
class MockAsgi3App(MockAsgi2App):
def __await__():
pass
async def __call__():
pass
@minimum_python_36
def test_looks_like_asgi3(asgi3_app):
# branch: inspect.isclass(app)
assert _looks_like_asgi3(MockAsgi3App)
assert not _looks_like_asgi3(MockAsgi2App)
# branch: inspect.isfunction(app)
assert _looks_like_asgi3(asgi3_app)
assert not _looks_like_asgi3(mock_asgi2_app)
# breanch: else
asgi3 = MockAsgi3App()
assert _looks_like_asgi3(asgi3)
asgi2 = MockAsgi2App()
assert not _looks_like_asgi3(asgi2)
@minimum_python_36
def test_get_ip_x_forwarded_for():
headers = [
(b"x-forwarded-for", b"8.8.8.8"),
]
scope = {
"client": ("127.0.0.1", 60457),
"headers": headers,
}
ip = _get_ip(scope)
assert ip == "8.8.8.8"
# x-forwarded-for overrides x-real-ip
headers = [
(b"x-forwarded-for", b"8.8.8.8"),
(b"x-real-ip", b"10.10.10.10"),
]
scope = {
"client": ("127.0.0.1", 60457),
"headers": headers,
}
ip = _get_ip(scope)
assert ip == "8.8.8.8"
# when multiple x-forwarded-for headers are, the first is taken
headers = [
(b"x-forwarded-for", b"5.5.5.5"),
(b"x-forwarded-for", b"6.6.6.6"),
(b"x-forwarded-for", b"7.7.7.7"),
]
scope = {
"client": ("127.0.0.1", 60457),
"headers": headers,
}
ip = _get_ip(scope)
assert ip == "5.5.5.5"
@minimum_python_36
def test_get_ip_x_real_ip():
headers = [
(b"x-real-ip", b"10.10.10.10"),
]
scope = {
"client": ("127.0.0.1", 60457),
"headers": headers,
}
ip = _get_ip(scope)
assert ip == "10.10.10.10"
# x-forwarded-for overrides x-real-ip
headers = [
(b"x-forwarded-for", b"8.8.8.8"),
(b"x-real-ip", b"10.10.10.10"),
]
scope = {
"client": ("127.0.0.1", 60457),
"headers": headers,
}
ip = _get_ip(scope)
assert ip == "8.8.8.8"
@minimum_python_36
def test_get_ip():
# if now headers are provided the ip is taken from the client.
headers = []
scope = {
"client": ("127.0.0.1", 60457),
"headers": headers,
}
ip = _get_ip(scope)
assert ip == "127.0.0.1"
# x-forwarded-for header overides the ip from client
headers = [
(b"x-forwarded-for", b"8.8.8.8"),
]
scope = {
"client": ("127.0.0.1", 60457),
"headers": headers,
}
ip = _get_ip(scope)
assert ip == "8.8.8.8"
# x-real-for header overides the ip from client
headers = [
(b"x-real-ip", b"10.10.10.10"),
]
scope = {
"client": ("127.0.0.1", 60457),
"headers": headers,
}
ip = _get_ip(scope)
assert ip == "10.10.10.10"
@minimum_python_36
def test_get_headers():
headers = [
(b"x-real-ip", b"10.10.10.10"),
(b"some_header", b"123"),
(b"some_header", b"abc"),
]
scope = {
"client": ("127.0.0.1", 60457),
"headers": headers,
}
headers = _get_headers(scope)
assert headers == {
"x-real-ip": "10.10.10.10",
"some_header": "123, abc",
}
@minimum_python_36
@pytest.mark.asyncio
@pytest.mark.parametrize(
"request_url,transaction_style,expected_transaction_name,expected_transaction_source",
[
(
"/message/123456",
"endpoint",
"/message/123456",
"url",
),
(
"/message/123456",
"url",
"/message/123456",
"url",
),
],
)
async def test_transaction_name(
sentry_init,
request_url,
transaction_style,
expected_transaction_name,
expected_transaction_source,
asgi3_app,
capture_envelopes,
):
"""
Tests that the transaction name is something meaningful.
"""
sentry_init(
traces_sample_rate=1.0,
debug=True,
)
envelopes = capture_envelopes()
app = SentryAsgiMiddleware(asgi3_app, transaction_style=transaction_style)
async with TestClient(app) as client:
await client.get(request_url)
(transaction_envelope,) = envelopes
transaction_event = transaction_envelope.get_transaction_event()
assert transaction_event["transaction"] == expected_transaction_name
assert (
transaction_event["transaction_info"]["source"] == expected_transaction_source
)
@minimum_python_36
@pytest.mark.asyncio
@pytest.mark.parametrize(
"request_url, transaction_style,expected_transaction_name,expected_transaction_source",
[
(
"/message/123456",
"endpoint",
"/message/123456",
"url",
),
(
"/message/123456",
"url",
"/message/123456",
"url",
),
],
)
async def test_transaction_name_in_traces_sampler(
sentry_init,
request_url,
transaction_style,
expected_transaction_name,
expected_transaction_source,
asgi3_app,
):
"""
Tests that a custom traces_sampler has a meaningful transaction name.
In this case the URL or endpoint, because we do not have the route yet.
"""
def dummy_traces_sampler(sampling_context):
assert (
sampling_context["transaction_context"]["name"] == expected_transaction_name
)
assert (
sampling_context["transaction_context"]["source"]
== expected_transaction_source
)
sentry_init(
traces_sampler=dummy_traces_sampler,
traces_sample_rate=1.0,
debug=True,
)
app = SentryAsgiMiddleware(asgi3_app, transaction_style=transaction_style)
async with TestClient(app) as client:
await client.get(request_url)
sentry-python-1.39.2/tests/integrations/asyncio/ 0000775 0000000 0000000 00000000000 14547447232 0022002 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/asyncio/__init__.py 0000664 0000000 0000000 00000000000 14547447232 0024101 0 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/asyncio/test_asyncio_py3.py 0000664 0000000 0000000 00000024721 14547447232 0025661 0 ustar 00root root 0000000 0000000 import asyncio
import inspect
import sys
import pytest
import sentry_sdk
from sentry_sdk.consts import OP
from sentry_sdk.integrations.asyncio import AsyncioIntegration, patch_asyncio
try:
from unittest.mock import MagicMock, patch
except ImportError:
from mock import MagicMock, patch
try:
from contextvars import Context, ContextVar
except ImportError:
pass # All tests will be skipped with incompatible versions
minimum_python_37 = pytest.mark.skipif(
sys.version_info < (3, 7), reason="Asyncio tests need Python >= 3.7"
)
minimum_python_311 = pytest.mark.skipif(
sys.version_info < (3, 11),
reason="Asyncio task context parameter was introduced in Python 3.11",
)
async def foo():
await asyncio.sleep(0.01)
async def bar():
await asyncio.sleep(0.01)
async def boom():
1 / 0
@pytest.fixture(scope="session")
def event_loop(request):
"""Create an instance of the default event loop for each test case."""
loop = asyncio.get_event_loop_policy().new_event_loop()
yield loop
loop.close()
def get_sentry_task_factory(mock_get_running_loop):
"""
Patches (mocked) asyncio and gets the sentry_task_factory.
"""
mock_loop = mock_get_running_loop.return_value
patch_asyncio()
patched_factory = mock_loop.set_task_factory.call_args[0][0]
return patched_factory
@minimum_python_37
@pytest.mark.asyncio
async def test_create_task(
sentry_init,
capture_events,
event_loop,
):
sentry_init(
traces_sample_rate=1.0,
send_default_pii=True,
debug=True,
integrations=[
AsyncioIntegration(),
],
)
events = capture_events()
with sentry_sdk.start_transaction(name="test_transaction_for_create_task"):
with sentry_sdk.start_span(op="root", description="not so important"):
tasks = [event_loop.create_task(foo()), event_loop.create_task(bar())]
await asyncio.wait(tasks, return_when=asyncio.FIRST_EXCEPTION)
sentry_sdk.flush()
(transaction_event,) = events
assert transaction_event["spans"][0]["op"] == "root"
assert transaction_event["spans"][0]["description"] == "not so important"
assert transaction_event["spans"][1]["op"] == OP.FUNCTION
assert transaction_event["spans"][1]["description"] == "foo"
assert (
transaction_event["spans"][1]["parent_span_id"]
== transaction_event["spans"][0]["span_id"]
)
assert transaction_event["spans"][2]["op"] == OP.FUNCTION
assert transaction_event["spans"][2]["description"] == "bar"
assert (
transaction_event["spans"][2]["parent_span_id"]
== transaction_event["spans"][0]["span_id"]
)
@minimum_python_37
@pytest.mark.asyncio
async def test_gather(
sentry_init,
capture_events,
):
sentry_init(
traces_sample_rate=1.0,
send_default_pii=True,
debug=True,
integrations=[
AsyncioIntegration(),
],
)
events = capture_events()
with sentry_sdk.start_transaction(name="test_transaction_for_gather"):
with sentry_sdk.start_span(op="root", description="not so important"):
await asyncio.gather(foo(), bar(), return_exceptions=True)
sentry_sdk.flush()
(transaction_event,) = events
assert transaction_event["spans"][0]["op"] == "root"
assert transaction_event["spans"][0]["description"] == "not so important"
assert transaction_event["spans"][1]["op"] == OP.FUNCTION
assert transaction_event["spans"][1]["description"] == "foo"
assert (
transaction_event["spans"][1]["parent_span_id"]
== transaction_event["spans"][0]["span_id"]
)
assert transaction_event["spans"][2]["op"] == OP.FUNCTION
assert transaction_event["spans"][2]["description"] == "bar"
assert (
transaction_event["spans"][2]["parent_span_id"]
== transaction_event["spans"][0]["span_id"]
)
@minimum_python_37
@pytest.mark.asyncio
async def test_exception(
sentry_init,
capture_events,
event_loop,
):
sentry_init(
traces_sample_rate=1.0,
send_default_pii=True,
debug=True,
integrations=[
AsyncioIntegration(),
],
)
events = capture_events()
with sentry_sdk.start_transaction(name="test_exception"):
with sentry_sdk.start_span(op="root", description="not so important"):
tasks = [event_loop.create_task(boom()), event_loop.create_task(bar())]
await asyncio.wait(tasks, return_when=asyncio.FIRST_EXCEPTION)
sentry_sdk.flush()
(error_event, _) = events
assert error_event["transaction"] == "test_exception"
assert error_event["contexts"]["trace"]["op"] == "function"
assert error_event["exception"]["values"][0]["type"] == "ZeroDivisionError"
assert error_event["exception"]["values"][0]["value"] == "division by zero"
assert error_event["exception"]["values"][0]["mechanism"]["handled"] is False
assert error_event["exception"]["values"][0]["mechanism"]["type"] == "asyncio"
@minimum_python_37
@pytest.mark.asyncio
async def test_task_result(sentry_init):
sentry_init(
integrations=[
AsyncioIntegration(),
],
)
async def add(a, b):
return a + b
result = await asyncio.create_task(add(1, 2))
assert result == 3, result
@minimum_python_311
@pytest.mark.asyncio
async def test_task_with_context(sentry_init):
"""
Integration test to ensure working context parameter in Python 3.11+
"""
sentry_init(
integrations=[
AsyncioIntegration(),
],
)
var = ContextVar("var")
var.set("original value")
async def change_value():
var.set("changed value")
async def retrieve_value():
return var.get()
# Create a context and run both tasks within the context
ctx = Context()
async with asyncio.TaskGroup() as tg:
tg.create_task(change_value(), context=ctx)
retrieve_task = tg.create_task(retrieve_value(), context=ctx)
assert retrieve_task.result() == "changed value"
@minimum_python_37
@patch("asyncio.get_running_loop")
def test_patch_asyncio(mock_get_running_loop):
"""
Test that the patch_asyncio function will patch the task factory.
"""
mock_loop = mock_get_running_loop.return_value
patch_asyncio()
assert mock_loop.set_task_factory.called
set_task_factory_args, _ = mock_loop.set_task_factory.call_args
assert len(set_task_factory_args) == 1
sentry_task_factory, *_ = set_task_factory_args
assert callable(sentry_task_factory)
@minimum_python_37
@pytest.mark.forked
@patch("asyncio.get_running_loop")
@patch("sentry_sdk.integrations.asyncio.Task")
def test_sentry_task_factory_no_factory(MockTask, mock_get_running_loop): # noqa: N803
mock_loop = mock_get_running_loop.return_value
mock_coro = MagicMock()
# Set the original task factory to None
mock_loop.get_task_factory.return_value = None
# Retieve sentry task factory (since it is an inner function within patch_asyncio)
sentry_task_factory = get_sentry_task_factory(mock_get_running_loop)
# The call we are testing
ret_val = sentry_task_factory(mock_loop, mock_coro)
assert MockTask.called
assert ret_val == MockTask.return_value
task_args, task_kwargs = MockTask.call_args
assert len(task_args) == 1
coro_param, *_ = task_args
assert inspect.iscoroutine(coro_param)
assert "loop" in task_kwargs
assert task_kwargs["loop"] == mock_loop
@minimum_python_37
@pytest.mark.forked
@patch("asyncio.get_running_loop")
def test_sentry_task_factory_with_factory(mock_get_running_loop):
mock_loop = mock_get_running_loop.return_value
mock_coro = MagicMock()
# The original task factory will be mocked out here, let's retrieve the value for later
orig_task_factory = mock_loop.get_task_factory.return_value
# Retieve sentry task factory (since it is an inner function within patch_asyncio)
sentry_task_factory = get_sentry_task_factory(mock_get_running_loop)
# The call we are testing
ret_val = sentry_task_factory(mock_loop, mock_coro)
assert orig_task_factory.called
assert ret_val == orig_task_factory.return_value
task_factory_args, _ = orig_task_factory.call_args
assert len(task_factory_args) == 2
loop_arg, coro_arg = task_factory_args
assert loop_arg == mock_loop
assert inspect.iscoroutine(coro_arg)
@minimum_python_311
@patch("asyncio.get_running_loop")
@patch("sentry_sdk.integrations.asyncio.Task")
def test_sentry_task_factory_context_no_factory(
MockTask, mock_get_running_loop # noqa: N803
):
mock_loop = mock_get_running_loop.return_value
mock_coro = MagicMock()
mock_context = MagicMock()
# Set the original task factory to None
mock_loop.get_task_factory.return_value = None
# Retieve sentry task factory (since it is an inner function within patch_asyncio)
sentry_task_factory = get_sentry_task_factory(mock_get_running_loop)
# The call we are testing
ret_val = sentry_task_factory(mock_loop, mock_coro, context=mock_context)
assert MockTask.called
assert ret_val == MockTask.return_value
task_args, task_kwargs = MockTask.call_args
assert len(task_args) == 1
coro_param, *_ = task_args
assert inspect.iscoroutine(coro_param)
assert "loop" in task_kwargs
assert task_kwargs["loop"] == mock_loop
assert "context" in task_kwargs
assert task_kwargs["context"] == mock_context
@minimum_python_311
@patch("asyncio.get_running_loop")
def test_sentry_task_factory_context_with_factory(mock_get_running_loop):
mock_loop = mock_get_running_loop.return_value
mock_coro = MagicMock()
mock_context = MagicMock()
# The original task factory will be mocked out here, let's retrieve the value for later
orig_task_factory = mock_loop.get_task_factory.return_value
# Retieve sentry task factory (since it is an inner function within patch_asyncio)
sentry_task_factory = get_sentry_task_factory(mock_get_running_loop)
# The call we are testing
ret_val = sentry_task_factory(mock_loop, mock_coro, context=mock_context)
assert orig_task_factory.called
assert ret_val == orig_task_factory.return_value
task_factory_args, task_factory_kwargs = orig_task_factory.call_args
assert len(task_factory_args) == 2
loop_arg, coro_arg = task_factory_args
assert loop_arg == mock_loop
assert inspect.iscoroutine(coro_arg)
assert "context" in task_factory_kwargs
assert task_factory_kwargs["context"] == mock_context
sentry-python-1.39.2/tests/integrations/asyncpg/ 0000775 0000000 0000000 00000000000 14547447232 0022001 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/asyncpg/__init__.py 0000664 0000000 0000000 00000000124 14547447232 0024107 0 ustar 00root root 0000000 0000000 import pytest
pytest.importorskip("asyncpg")
pytest.importorskip("pytest_asyncio")
sentry-python-1.39.2/tests/integrations/asyncpg/test_asyncpg.py 0000664 0000000 0000000 00000035631 14547447232 0025066 0 ustar 00root root 0000000 0000000 """
Tests need pytest-asyncio installed.
Tests need a local postgresql instance running, this can best be done using
```sh
docker run --rm --name some-postgres -e POSTGRES_USER=foo -e POSTGRES_PASSWORD=bar -d -p 5432:5432 postgres
```
The tests use the following credentials to establish a database connection.
"""
import os
PG_NAME = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_NAME", "postgres")
PG_USER = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_USER", "foo")
PG_PASSWORD = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_PASSWORD", "bar")
PG_HOST = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_HOST", "localhost")
PG_PORT = 5432
import datetime
import asyncpg
import pytest
import pytest_asyncio
from asyncpg import connect, Connection
from sentry_sdk import capture_message, start_transaction
from sentry_sdk.integrations.asyncpg import AsyncPGIntegration
from sentry_sdk.consts import SPANDATA
PG_CONNECTION_URI = "postgresql://{}:{}@{}/{}".format(
PG_USER, PG_PASSWORD, PG_HOST, PG_NAME
)
CRUMBS_CONNECT = {
"category": "query",
"data": {
"db.name": PG_NAME,
"db.system": "postgresql",
"db.user": PG_USER,
"server.address": PG_HOST,
"server.port": PG_PORT,
},
"message": "connect",
"type": "default",
}
@pytest_asyncio.fixture(autouse=True)
async def _clean_pg():
conn = await connect(PG_CONNECTION_URI)
await conn.execute("DROP TABLE IF EXISTS users")
await conn.execute(
"""
CREATE TABLE users(
id serial PRIMARY KEY,
name text,
password text,
dob date
)
"""
)
await conn.close()
@pytest.mark.asyncio
async def test_connect(sentry_init, capture_events) -> None:
sentry_init(
integrations=[AsyncPGIntegration()],
_experiments={"record_sql_params": True},
)
events = capture_events()
conn: Connection = await connect(PG_CONNECTION_URI)
await conn.close()
capture_message("hi")
(event,) = events
for crumb in event["breadcrumbs"]["values"]:
del crumb["timestamp"]
assert event["breadcrumbs"]["values"] == [CRUMBS_CONNECT]
@pytest.mark.asyncio
async def test_execute(sentry_init, capture_events) -> None:
sentry_init(
integrations=[AsyncPGIntegration()],
_experiments={"record_sql_params": True},
)
events = capture_events()
conn: Connection = await connect(PG_CONNECTION_URI)
await conn.execute(
"INSERT INTO users(name, password, dob) VALUES ('Alice', 'pw', '1990-12-25')",
)
await conn.execute(
"INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
"Bob",
"secret_pw",
datetime.date(1984, 3, 1),
)
row = await conn.fetchrow("SELECT * FROM users WHERE name = $1", "Bob")
assert row == (2, "Bob", "secret_pw", datetime.date(1984, 3, 1))
row = await conn.fetchrow("SELECT * FROM users WHERE name = 'Bob'")
assert row == (2, "Bob", "secret_pw", datetime.date(1984, 3, 1))
await conn.close()
capture_message("hi")
(event,) = events
for crumb in event["breadcrumbs"]["values"]:
del crumb["timestamp"]
assert event["breadcrumbs"]["values"] == [
CRUMBS_CONNECT,
{
"category": "query",
"data": {},
"message": "INSERT INTO users(name, password, dob) VALUES ('Alice', 'pw', '1990-12-25')",
"type": "default",
},
{
"category": "query",
"data": {},
"message": "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
"type": "default",
},
{
"category": "query",
"data": {},
"message": "SELECT * FROM users WHERE name = $1",
"type": "default",
},
{
"category": "query",
"data": {},
"message": "SELECT * FROM users WHERE name = 'Bob'",
"type": "default",
},
]
@pytest.mark.asyncio
async def test_execute_many(sentry_init, capture_events) -> None:
sentry_init(
integrations=[AsyncPGIntegration()],
_experiments={"record_sql_params": True},
)
events = capture_events()
conn: Connection = await connect(PG_CONNECTION_URI)
await conn.executemany(
"INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
[
("Bob", "secret_pw", datetime.date(1984, 3, 1)),
("Alice", "pw", datetime.date(1990, 12, 25)),
],
)
await conn.close()
capture_message("hi")
(event,) = events
for crumb in event["breadcrumbs"]["values"]:
del crumb["timestamp"]
assert event["breadcrumbs"]["values"] == [
CRUMBS_CONNECT,
{
"category": "query",
"data": {"db.executemany": True},
"message": "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
"type": "default",
},
]
@pytest.mark.asyncio
async def test_record_params(sentry_init, capture_events) -> None:
sentry_init(
integrations=[AsyncPGIntegration(record_params=True)],
_experiments={"record_sql_params": True},
)
events = capture_events()
conn: Connection = await connect(PG_CONNECTION_URI)
await conn.execute(
"INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
"Bob",
"secret_pw",
datetime.date(1984, 3, 1),
)
await conn.close()
capture_message("hi")
(event,) = events
for crumb in event["breadcrumbs"]["values"]:
del crumb["timestamp"]
assert event["breadcrumbs"]["values"] == [
CRUMBS_CONNECT,
{
"category": "query",
"data": {
"db.params": ["Bob", "secret_pw", "datetime.date(1984, 3, 1)"],
"db.paramstyle": "format",
},
"message": "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
"type": "default",
},
]
@pytest.mark.asyncio
async def test_cursor(sentry_init, capture_events) -> None:
sentry_init(
integrations=[AsyncPGIntegration()],
_experiments={"record_sql_params": True},
)
events = capture_events()
conn: Connection = await connect(PG_CONNECTION_URI)
await conn.executemany(
"INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
[
("Bob", "secret_pw", datetime.date(1984, 3, 1)),
("Alice", "pw", datetime.date(1990, 12, 25)),
],
)
async with conn.transaction():
# Postgres requires non-scrollable cursors to be created
# and used in a transaction.
async for record in conn.cursor(
"SELECT * FROM users WHERE dob > $1", datetime.date(1970, 1, 1)
):
print(record)
await conn.close()
capture_message("hi")
(event,) = events
for crumb in event["breadcrumbs"]["values"]:
del crumb["timestamp"]
assert event["breadcrumbs"]["values"] == [
CRUMBS_CONNECT,
{
"category": "query",
"data": {"db.executemany": True},
"message": "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
"type": "default",
},
{"category": "query", "data": {}, "message": "BEGIN;", "type": "default"},
{
"category": "query",
"data": {},
"message": "SELECT * FROM users WHERE dob > $1",
"type": "default",
},
{"category": "query", "data": {}, "message": "COMMIT;", "type": "default"},
]
@pytest.mark.asyncio
async def test_cursor_manual(sentry_init, capture_events) -> None:
sentry_init(
integrations=[AsyncPGIntegration()],
_experiments={"record_sql_params": True},
)
events = capture_events()
conn: Connection = await connect(PG_CONNECTION_URI)
await conn.executemany(
"INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
[
("Bob", "secret_pw", datetime.date(1984, 3, 1)),
("Alice", "pw", datetime.date(1990, 12, 25)),
],
)
#
async with conn.transaction():
# Postgres requires non-scrollable cursors to be created
# and used in a transaction.
cur = await conn.cursor(
"SELECT * FROM users WHERE dob > $1", datetime.date(1970, 1, 1)
)
record = await cur.fetchrow()
print(record)
while await cur.forward(1):
record = await cur.fetchrow()
print(record)
await conn.close()
capture_message("hi")
(event,) = events
for crumb in event["breadcrumbs"]["values"]:
del crumb["timestamp"]
assert event["breadcrumbs"]["values"] == [
CRUMBS_CONNECT,
{
"category": "query",
"data": {"db.executemany": True},
"message": "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
"type": "default",
},
{"category": "query", "data": {}, "message": "BEGIN;", "type": "default"},
{
"category": "query",
"data": {},
"message": "SELECT * FROM users WHERE dob > $1",
"type": "default",
},
{"category": "query", "data": {}, "message": "COMMIT;", "type": "default"},
]
@pytest.mark.asyncio
async def test_prepared_stmt(sentry_init, capture_events) -> None:
sentry_init(
integrations=[AsyncPGIntegration()],
_experiments={"record_sql_params": True},
)
events = capture_events()
conn: Connection = await connect(PG_CONNECTION_URI)
await conn.executemany(
"INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
[
("Bob", "secret_pw", datetime.date(1984, 3, 1)),
("Alice", "pw", datetime.date(1990, 12, 25)),
],
)
stmt = await conn.prepare("SELECT * FROM users WHERE name = $1")
print(await stmt.fetchval("Bob"))
print(await stmt.fetchval("Alice"))
await conn.close()
capture_message("hi")
(event,) = events
for crumb in event["breadcrumbs"]["values"]:
del crumb["timestamp"]
assert event["breadcrumbs"]["values"] == [
CRUMBS_CONNECT,
{
"category": "query",
"data": {"db.executemany": True},
"message": "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
"type": "default",
},
{
"category": "query",
"data": {},
"message": "SELECT * FROM users WHERE name = $1",
"type": "default",
},
]
@pytest.mark.asyncio
async def test_connection_pool(sentry_init, capture_events) -> None:
sentry_init(
integrations=[AsyncPGIntegration()],
_experiments={"record_sql_params": True},
)
events = capture_events()
pool_size = 2
pool = await asyncpg.create_pool(
PG_CONNECTION_URI, min_size=pool_size, max_size=pool_size
)
async with pool.acquire() as conn:
await conn.execute(
"INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
"Bob",
"secret_pw",
datetime.date(1984, 3, 1),
)
async with pool.acquire() as conn:
row = await conn.fetchrow("SELECT * FROM users WHERE name = $1", "Bob")
assert row == (1, "Bob", "secret_pw", datetime.date(1984, 3, 1))
await pool.close()
capture_message("hi")
(event,) = events
for crumb in event["breadcrumbs"]["values"]:
del crumb["timestamp"]
assert event["breadcrumbs"]["values"] == [
# The connection pool opens pool_size connections so we have the crumbs pool_size times
*[CRUMBS_CONNECT] * pool_size,
{
"category": "query",
"data": {},
"message": "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
"type": "default",
},
{
"category": "query",
"data": {},
"message": "SELECT pg_advisory_unlock_all();\n"
"CLOSE ALL;\n"
"UNLISTEN *;\n"
"RESET ALL;",
"type": "default",
},
{
"category": "query",
"data": {},
"message": "SELECT * FROM users WHERE name = $1",
"type": "default",
},
{
"category": "query",
"data": {},
"message": "SELECT pg_advisory_unlock_all();\n"
"CLOSE ALL;\n"
"UNLISTEN *;\n"
"RESET ALL;",
"type": "default",
},
]
@pytest.mark.asyncio
@pytest.mark.parametrize("enable_db_query_source", [None, False])
async def test_query_source_disabled(
sentry_init, capture_events, enable_db_query_source
):
sentry_options = {
"integrations": [AsyncPGIntegration()],
"enable_tracing": True,
}
if enable_db_query_source is not None:
sentry_options["enable_db_query_source"] = enable_db_query_source
sentry_options["db_query_source_threshold_ms"] = 0
sentry_init(**sentry_options)
events = capture_events()
with start_transaction(name="test_transaction", sampled=True):
conn: Connection = await connect(PG_CONNECTION_URI)
await conn.execute(
"INSERT INTO users(name, password, dob) VALUES ('Alice', 'secret', '1990-12-25')",
)
await conn.close()
(event,) = events
span = event["spans"][-1]
assert span["description"].startswith("INSERT INTO")
data = span.get("data", {})
assert SPANDATA.CODE_LINENO not in data
assert SPANDATA.CODE_NAMESPACE not in data
assert SPANDATA.CODE_FILEPATH not in data
assert SPANDATA.CODE_FUNCTION not in data
@pytest.mark.asyncio
async def test_query_source(sentry_init, capture_events):
sentry_init(
integrations=[AsyncPGIntegration()],
enable_tracing=True,
enable_db_query_source=True,
db_query_source_threshold_ms=0,
)
events = capture_events()
with start_transaction(name="test_transaction", sampled=True):
conn: Connection = await connect(PG_CONNECTION_URI)
await conn.execute(
"INSERT INTO users(name, password, dob) VALUES ('Alice', 'secret', '1990-12-25')",
)
await conn.close()
(event,) = events
span = event["spans"][-1]
assert span["description"].startswith("INSERT INTO")
data = span.get("data", {})
assert SPANDATA.CODE_LINENO in data
assert SPANDATA.CODE_NAMESPACE in data
assert SPANDATA.CODE_FILEPATH in data
assert SPANDATA.CODE_FUNCTION in data
assert type(data.get(SPANDATA.CODE_LINENO)) == int
assert data.get(SPANDATA.CODE_LINENO) > 0
assert (
data.get(SPANDATA.CODE_NAMESPACE) == "tests.integrations.asyncpg.test_asyncpg"
)
assert data.get(SPANDATA.CODE_FILEPATH).endswith(
"tests/integrations/asyncpg/test_asyncpg.py"
)
is_relative_path = data.get(SPANDATA.CODE_FILEPATH)[0] != os.sep
assert is_relative_path
assert data.get(SPANDATA.CODE_FUNCTION) == "test_query_source"
sentry-python-1.39.2/tests/integrations/aws_lambda/ 0000775 0000000 0000000 00000000000 14547447232 0022427 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/aws_lambda/__init__.py 0000664 0000000 0000000 00000000054 14547447232 0024537 0 ustar 00root root 0000000 0000000 import pytest
pytest.importorskip("boto3")
sentry-python-1.39.2/tests/integrations/aws_lambda/client.py 0000664 0000000 0000000 00000030246 14547447232 0024264 0 ustar 00root root 0000000 0000000 import base64
import boto3
import glob
import hashlib
import os
import subprocess
import sys
import tempfile
from sentry_sdk.consts import VERSION as SDK_VERSION
from sentry_sdk.utils import get_git_revision
AWS_REGION_NAME = "us-east-1"
AWS_CREDENTIALS = {
"aws_access_key_id": os.environ["SENTRY_PYTHON_TEST_AWS_ACCESS_KEY_ID"],
"aws_secret_access_key": os.environ["SENTRY_PYTHON_TEST_AWS_SECRET_ACCESS_KEY"],
}
AWS_LAMBDA_EXECUTION_ROLE_NAME = "lambda-ex"
AWS_LAMBDA_EXECUTION_ROLE_ARN = None
def _install_dependencies(base_dir, subprocess_kwargs):
"""
Installs dependencies for AWS Lambda function
"""
setup_cfg = os.path.join(base_dir, "setup.cfg")
with open(setup_cfg, "w") as f:
f.write("[install]\nprefix=")
# Install requirements for Lambda Layer (these are more limited than the SDK requirements,
# because Lambda does not support the newest versions of some packages)
subprocess.check_call(
[
sys.executable,
"-m",
"pip",
"install",
"-r",
"aws-lambda-layer-requirements.txt",
"--target",
base_dir,
],
**subprocess_kwargs,
)
# Install requirements used for testing
subprocess.check_call(
[
sys.executable,
"-m",
"pip",
"install",
"mock==3.0.0",
"funcsigs",
"--target",
base_dir,
],
**subprocess_kwargs,
)
# Create a source distribution of the Sentry SDK (in parent directory of base_dir)
subprocess.check_call(
[
sys.executable,
"setup.py",
"sdist",
"--dist-dir",
os.path.dirname(base_dir),
],
**subprocess_kwargs,
)
# Install the created Sentry SDK source distribution into the target directory
# Do not install the dependencies of the SDK, because they where installed by aws-lambda-layer-requirements.txt above
source_distribution_archive = glob.glob(
"{}/*.tar.gz".format(os.path.dirname(base_dir))
)[0]
subprocess.check_call(
[
sys.executable,
"-m",
"pip",
"install",
source_distribution_archive,
"--no-deps",
"--target",
base_dir,
],
**subprocess_kwargs,
)
def _create_lambda_function_zip(base_dir):
"""
Zips the given base_dir omitting Python cache files
"""
subprocess.run(
[
"zip",
"-q",
"-x",
"**/__pycache__/*",
"-r",
"lambda-function-package.zip",
"./",
],
cwd=base_dir,
check=True,
)
def _create_lambda_package(
base_dir, code, initial_handler, layer, syntax_check, subprocess_kwargs
):
"""
Creates deployable packages (as zip files) for AWS Lambda function
and optional the accompanying Sentry Lambda layer
"""
if initial_handler:
# If Initial handler value is provided i.e. it is not the default
# `test_lambda.test_handler`, then create another dir level so that our path is
# test_dir.test_lambda.test_handler
test_dir_path = os.path.join(base_dir, "test_dir")
python_init_file = os.path.join(test_dir_path, "__init__.py")
os.makedirs(test_dir_path)
with open(python_init_file, "w"):
# Create __init__ file to make it a python package
pass
test_lambda_py = os.path.join(base_dir, "test_dir", "test_lambda.py")
else:
test_lambda_py = os.path.join(base_dir, "test_lambda.py")
with open(test_lambda_py, "w") as f:
f.write(code)
if syntax_check:
# Check file for valid syntax first, and that the integration does not
# crash when not running in Lambda (but rather a local deployment tool
# such as chalice's)
subprocess.check_call([sys.executable, test_lambda_py])
if layer is None:
_install_dependencies(base_dir, subprocess_kwargs)
_create_lambda_function_zip(base_dir)
else:
_create_lambda_function_zip(base_dir)
# Create Lambda layer zip package
from scripts.build_aws_lambda_layer import build_packaged_zip
build_packaged_zip(
base_dir=base_dir,
make_dist=True,
out_zip_filename="lambda-layer-package.zip",
)
def _get_or_create_lambda_execution_role():
global AWS_LAMBDA_EXECUTION_ROLE_ARN
policy = """{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Principal": {
"Service": "lambda.amazonaws.com"
},
"Action": "sts:AssumeRole"
}
]
}
"""
iam_client = boto3.client(
"iam",
region_name=AWS_REGION_NAME,
**AWS_CREDENTIALS,
)
try:
response = iam_client.get_role(RoleName=AWS_LAMBDA_EXECUTION_ROLE_NAME)
AWS_LAMBDA_EXECUTION_ROLE_ARN = response["Role"]["Arn"]
except iam_client.exceptions.NoSuchEntityException:
# create role for lambda execution
response = iam_client.create_role(
RoleName=AWS_LAMBDA_EXECUTION_ROLE_NAME,
AssumeRolePolicyDocument=policy,
)
AWS_LAMBDA_EXECUTION_ROLE_ARN = response["Role"]["Arn"]
# attach policy to role
iam_client.attach_role_policy(
RoleName=AWS_LAMBDA_EXECUTION_ROLE_NAME,
PolicyArn="arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole",
)
def get_boto_client():
_get_or_create_lambda_execution_role()
return boto3.client(
"lambda",
region_name=AWS_REGION_NAME,
**AWS_CREDENTIALS,
)
def run_lambda_function(
client,
runtime,
code,
payload,
add_finalizer,
syntax_check=True,
timeout=30,
layer=None,
initial_handler=None,
subprocess_kwargs=(),
):
"""
Creates a Lambda function with the given code, and invokes it.
If the same code is run multiple times the function will NOT be
created anew each time but the existing function will be reused.
"""
subprocess_kwargs = dict(subprocess_kwargs)
# Making a unique function name depending on all the code that is run in it (function code plus SDK version)
# The name needs to be short so the generated event/envelope json blobs are small enough to be output
# in the log result of the Lambda function.
rev = get_git_revision() or SDK_VERSION
function_hash = hashlib.shake_256((code + rev).encode("utf-8")).hexdigest(6)
fn_name = "test_{}".format(function_hash)
full_fn_name = "{}_{}".format(
fn_name, runtime.replace(".", "").replace("python", "py")
)
function_exists_in_aws = True
try:
client.get_function(
FunctionName=full_fn_name,
)
print(
"Lambda function in AWS already existing, taking it (and do not create a local one)"
)
except client.exceptions.ResourceNotFoundException:
function_exists_in_aws = False
if not function_exists_in_aws:
tmp_base_dir = tempfile.gettempdir()
base_dir = os.path.join(tmp_base_dir, fn_name)
dir_already_existing = os.path.isdir(base_dir)
if dir_already_existing:
print("Local Lambda function directory already exists, skipping creation")
if not dir_already_existing:
os.mkdir(base_dir)
_create_lambda_package(
base_dir, code, initial_handler, layer, syntax_check, subprocess_kwargs
)
@add_finalizer
def clean_up():
# this closes the web socket so we don't get a
# ResourceWarning: unclosed
# warning on every test
# based on https://github.com/boto/botocore/pull/1810
# (if that's ever merged, this can just become client.close())
session = client._endpoint.http_session
managers = [session._manager] + list(session._proxy_managers.values())
for manager in managers:
manager.clear()
layers = []
environment = {}
handler = initial_handler or "test_lambda.test_handler"
if layer is not None:
with open(
os.path.join(base_dir, "lambda-layer-package.zip"), "rb"
) as lambda_layer_zip:
response = client.publish_layer_version(
LayerName="python-serverless-sdk-test",
Description="Created as part of testsuite for getsentry/sentry-python",
Content={"ZipFile": lambda_layer_zip.read()},
)
layers = [response["LayerVersionArn"]]
handler = (
"sentry_sdk.integrations.init_serverless_sdk.sentry_lambda_handler"
)
environment = {
"Variables": {
"SENTRY_INITIAL_HANDLER": initial_handler
or "test_lambda.test_handler",
"SENTRY_DSN": "https://123abc@example.com/123",
"SENTRY_TRACES_SAMPLE_RATE": "1.0",
}
}
try:
with open(
os.path.join(base_dir, "lambda-function-package.zip"), "rb"
) as lambda_function_zip:
client.create_function(
Description="Created as part of testsuite for getsentry/sentry-python",
FunctionName=full_fn_name,
Runtime=runtime,
Timeout=timeout,
Role=AWS_LAMBDA_EXECUTION_ROLE_ARN,
Handler=handler,
Code={"ZipFile": lambda_function_zip.read()},
Environment=environment,
Layers=layers,
)
waiter = client.get_waiter("function_active_v2")
waiter.wait(FunctionName=full_fn_name)
except client.exceptions.ResourceConflictException:
print(
"Lambda function already exists, this is fine, we will just invoke it."
)
response = client.invoke(
FunctionName=full_fn_name,
InvocationType="RequestResponse",
LogType="Tail",
Payload=payload,
)
assert 200 <= response["StatusCode"] < 300, response
return response
# This is for inspecting new Python runtime environments in AWS Lambda
# If you need to debug a new runtime, use this REPL to run arbitrary Python or bash commands
# in that runtime in a Lambda function:
#
# pip3 install click
# python3 tests/integrations/aws_lambda/client.py --runtime=python4.0
#
_REPL_CODE = """
import os
def test_handler(event, context):
line = {line!r}
if line.startswith(">>> "):
exec(line[4:])
elif line.startswith("$ "):
os.system(line[2:])
else:
print("Start a line with $ or >>>")
return b""
"""
try:
import click
except ImportError:
pass
else:
@click.command()
@click.option(
"--runtime", required=True, help="name of the runtime to use, eg python3.11"
)
@click.option("--verbose", is_flag=True, default=False)
def repl(runtime, verbose):
"""
Launch a "REPL" against AWS Lambda to inspect their runtime.
"""
cleanup = []
client = get_boto_client()
print("Start a line with `$ ` to run shell commands, or `>>> ` to run Python")
while True:
line = input()
response = run_lambda_function(
client,
runtime,
_REPL_CODE.format(line=line),
b"",
cleanup.append,
subprocess_kwargs={
"stdout": subprocess.DEVNULL,
"stderr": subprocess.DEVNULL,
}
if not verbose
else {},
)
for line in base64.b64decode(response["LogResult"]).splitlines():
print(line.decode("utf8"))
for f in cleanup:
f()
cleanup = []
if __name__ == "__main__":
repl()
sentry-python-1.39.2/tests/integrations/aws_lambda/test_aws.py 0000664 0000000 0000000 00000067075 14547447232 0024651 0 ustar 00root root 0000000 0000000 """
# AWS Lambda System Tests
This testsuite uses boto3 to upload actual Lambda functions to AWS Lambda and invoke them.
For running test locally you need to set these env vars:
(You can find the values in the Sentry password manager by searching for "AWS Lambda for Python SDK Tests").
export SENTRY_PYTHON_TEST_AWS_ACCESS_KEY_ID="..."
export SENTRY_PYTHON_TEST_AWS_SECRET_ACCESS_KEY="..."
You can use `scripts/aws-cleanup.sh` to delete all files generated by this test suite.
If you need to debug a new runtime, use this REPL to run arbitrary Python or bash commands
in that runtime in a Lambda function: (see the bottom of client.py for more information.)
pip3 install click
python3 tests/integrations/aws_lambda/client.py --runtime=python4.0
IMPORTANT:
During running of this test suite temporary folders will be created for compiling the Lambda functions.
This temporary folders will not be cleaned up. This is because in CI generated files have to be shared
between tests and thus the folders can not be deleted right after use.
If you run your tests locally, you need to clean up the temporary folders manually. The location of
the temporary folders is printed when running a test.
"""
import base64
import json
import re
from textwrap import dedent
import pytest
LAMBDA_PRELUDE = """
from sentry_sdk.integrations.aws_lambda import AwsLambdaIntegration, get_lambda_bootstrap
import sentry_sdk
import json
import time
from sentry_sdk.transport import HttpTransport
def truncate_data(data):
# AWS Lambda truncates the log output to 4kb, which is small enough to miss
# parts of even a single error-event/transaction-envelope pair if considered
# in full, so only grab the data we need.
cleaned_data = {}
if data.get("type") is not None:
cleaned_data["type"] = data["type"]
if data.get("contexts") is not None:
cleaned_data["contexts"] = {}
if data["contexts"].get("trace") is not None:
cleaned_data["contexts"]["trace"] = data["contexts"].get("trace")
if data.get("transaction") is not None:
cleaned_data["transaction"] = data.get("transaction")
if data.get("request") is not None:
cleaned_data["request"] = data.get("request")
if data.get("tags") is not None:
cleaned_data["tags"] = data.get("tags")
if data.get("exception") is not None:
cleaned_data["exception"] = data.get("exception")
for value in cleaned_data["exception"]["values"]:
for frame in value.get("stacktrace", {}).get("frames", []):
del frame["vars"]
del frame["pre_context"]
del frame["context_line"]
del frame["post_context"]
if data.get("extra") is not None:
cleaned_data["extra"] = {}
for key in data["extra"].keys():
if key == "lambda":
for lambda_key in data["extra"]["lambda"].keys():
if lambda_key in ["function_name"]:
cleaned_data["extra"].setdefault("lambda", {})[lambda_key] = data["extra"]["lambda"][lambda_key]
elif key == "cloudwatch logs":
for cloudwatch_key in data["extra"]["cloudwatch logs"].keys():
if cloudwatch_key in ["url", "log_group", "log_stream"]:
cleaned_data["extra"].setdefault("cloudwatch logs", {})[cloudwatch_key] = data["extra"]["cloudwatch logs"][cloudwatch_key]
if data.get("level") is not None:
cleaned_data["level"] = data.get("level")
if data.get("message") is not None:
cleaned_data["message"] = data.get("message")
if "contexts" not in cleaned_data:
raise Exception(json.dumps(data))
return cleaned_data
def event_processor(event):
return truncate_data(event)
def envelope_processor(envelope):
(item,) = envelope.items
item_json = json.loads(item.get_bytes())
return truncate_data(item_json)
class TestTransport(HttpTransport):
def _send_event(self, event):
event = event_processor(event)
print("\\nEVENT: {}\\n".format(json.dumps(event)))
def _send_envelope(self, envelope):
envelope = envelope_processor(envelope)
print("\\nENVELOPE: {}\\n".format(json.dumps(envelope)))
def init_sdk(timeout_warning=False, **extra_init_args):
sentry_sdk.init(
dsn="https://123abc@example.com/123",
transport=TestTransport,
integrations=[AwsLambdaIntegration(timeout_warning=timeout_warning)],
shutdown_timeout=10,
**extra_init_args
)
"""
@pytest.fixture
def lambda_client():
from tests.integrations.aws_lambda.client import get_boto_client
return get_boto_client()
@pytest.fixture(
params=[
"python3.7",
"python3.8",
"python3.9",
"python3.10",
"python3.11",
]
)
def lambda_runtime(request):
return request.param
@pytest.fixture
def run_lambda_function(request, lambda_client, lambda_runtime):
def inner(
code, payload, timeout=30, syntax_check=True, layer=None, initial_handler=None
):
from tests.integrations.aws_lambda.client import run_lambda_function
response = run_lambda_function(
client=lambda_client,
runtime=lambda_runtime,
code=code,
payload=payload,
add_finalizer=request.addfinalizer,
timeout=timeout,
syntax_check=syntax_check,
layer=layer,
initial_handler=initial_handler,
)
# Make sure the "ENVELOPE:" and "EVENT:" log entries are always starting a new line. (Sometimes they don't.)
response["LogResult"] = (
base64.b64decode(response["LogResult"])
.replace(b"EVENT:", b"\nEVENT:")
.replace(b"ENVELOPE:", b"\nENVELOPE:")
.splitlines()
)
response["Payload"] = json.loads(response["Payload"].read().decode("utf-8"))
del response["ResponseMetadata"]
events = []
envelopes = []
for line in response["LogResult"]:
print("AWS:", line)
if line.startswith(b"EVENT: "):
line = line[len(b"EVENT: ") :]
events.append(json.loads(line.decode("utf-8")))
elif line.startswith(b"ENVELOPE: "):
line = line[len(b"ENVELOPE: ") :]
envelopes.append(json.loads(line.decode("utf-8")))
else:
continue
return envelopes, events, response
return inner
def test_basic(run_lambda_function):
_, events, response = run_lambda_function(
LAMBDA_PRELUDE
+ dedent(
"""
init_sdk()
def test_handler(event, context):
raise Exception("Oh!")
"""
),
b'{"foo": "bar"}',
)
assert response["FunctionError"] == "Unhandled"
(event,) = events
assert event["level"] == "error"
(exception,) = event["exception"]["values"]
assert exception["type"] == "Exception"
assert exception["value"] == "Oh!"
(frame1,) = exception["stacktrace"]["frames"]
assert frame1["filename"] == "test_lambda.py"
assert frame1["abs_path"] == "/var/task/test_lambda.py"
assert frame1["function"] == "test_handler"
assert frame1["in_app"] is True
assert exception["mechanism"]["type"] == "aws_lambda"
assert not exception["mechanism"]["handled"]
assert event["extra"]["lambda"]["function_name"].startswith("test_")
logs_url = event["extra"]["cloudwatch logs"]["url"]
assert logs_url.startswith("https://console.aws.amazon.com/cloudwatch/home?region=")
assert not re.search("(=;|=$)", logs_url)
assert event["extra"]["cloudwatch logs"]["log_group"].startswith(
"/aws/lambda/test_"
)
log_stream_re = "^[0-9]{4}/[0-9]{2}/[0-9]{2}/\\[[^\\]]+][a-f0-9]+$"
log_stream = event["extra"]["cloudwatch logs"]["log_stream"]
assert re.match(log_stream_re, log_stream)
def test_initialization_order(run_lambda_function):
"""Zappa lazily imports our code, so by the time we monkeypatch the handler
as seen by AWS already runs. At this point at least draining the queue
should work."""
_, events, _ = run_lambda_function(
LAMBDA_PRELUDE
+ dedent(
"""
def test_handler(event, context):
init_sdk()
sentry_sdk.capture_exception(Exception("Oh!"))
"""
),
b'{"foo": "bar"}',
)
(event,) = events
assert event["level"] == "error"
(exception,) = event["exception"]["values"]
assert exception["type"] == "Exception"
assert exception["value"] == "Oh!"
def test_request_data(run_lambda_function):
_, events, _ = run_lambda_function(
LAMBDA_PRELUDE
+ dedent(
"""
init_sdk()
def test_handler(event, context):
sentry_sdk.capture_message("hi")
return "ok"
"""
),
payload=b"""
{
"resource": "/asd",
"path": "/asd",
"httpMethod": "GET",
"headers": {
"Host": "iwsz2c7uwi.execute-api.us-east-1.amazonaws.com",
"User-Agent": "custom",
"X-Forwarded-Proto": "https"
},
"queryStringParameters": {
"bonkers": "true"
},
"pathParameters": null,
"stageVariables": null,
"requestContext": {
"identity": {
"sourceIp": "213.47.147.207",
"userArn": "42"
}
},
"body": null,
"isBase64Encoded": false
}
""",
)
(event,) = events
assert event["request"] == {
"headers": {
"Host": "iwsz2c7uwi.execute-api.us-east-1.amazonaws.com",
"User-Agent": "custom",
"X-Forwarded-Proto": "https",
},
"method": "GET",
"query_string": {"bonkers": "true"},
"url": "https://iwsz2c7uwi.execute-api.us-east-1.amazonaws.com/asd",
}
def test_init_error(run_lambda_function, lambda_runtime):
_, events, _ = run_lambda_function(
LAMBDA_PRELUDE
+ dedent(
"""
init_sdk()
func()
"""
),
b'{"foo": "bar"}',
syntax_check=False,
)
(event,) = events
assert event["exception"]["values"][0]["value"] == "name 'func' is not defined"
def test_timeout_error(run_lambda_function):
_, events, _ = run_lambda_function(
LAMBDA_PRELUDE
+ dedent(
"""
init_sdk(timeout_warning=True)
def test_handler(event, context):
time.sleep(10)
return 0
"""
),
b'{"foo": "bar"}',
timeout=2,
)
(event,) = events
assert event["level"] == "error"
(exception,) = event["exception"]["values"]
assert exception["type"] == "ServerlessTimeoutWarning"
assert exception["value"] in (
"WARNING : Function is expected to get timed out. Configured timeout duration = 3 seconds.",
"WARNING : Function is expected to get timed out. Configured timeout duration = 2 seconds.",
)
assert exception["mechanism"]["type"] == "threading"
assert not exception["mechanism"]["handled"]
assert event["extra"]["lambda"]["function_name"].startswith("test_")
logs_url = event["extra"]["cloudwatch logs"]["url"]
assert logs_url.startswith("https://console.aws.amazon.com/cloudwatch/home?region=")
assert not re.search("(=;|=$)", logs_url)
assert event["extra"]["cloudwatch logs"]["log_group"].startswith(
"/aws/lambda/test_"
)
log_stream_re = "^[0-9]{4}/[0-9]{2}/[0-9]{2}/\\[[^\\]]+][a-f0-9]+$"
log_stream = event["extra"]["cloudwatch logs"]["log_stream"]
assert re.match(log_stream_re, log_stream)
def test_performance_no_error(run_lambda_function):
envelopes, _, _ = run_lambda_function(
LAMBDA_PRELUDE
+ dedent(
"""
init_sdk(traces_sample_rate=1.0)
def test_handler(event, context):
return "test_string"
"""
),
b'{"foo": "bar"}',
)
(envelope,) = envelopes
assert envelope["type"] == "transaction"
assert envelope["contexts"]["trace"]["op"] == "function.aws"
assert envelope["transaction"].startswith("test_")
assert envelope["transaction"] in envelope["request"]["url"]
def test_performance_error(run_lambda_function):
envelopes, _, _ = run_lambda_function(
LAMBDA_PRELUDE
+ dedent(
"""
init_sdk(traces_sample_rate=1.0)
def test_handler(event, context):
raise Exception("Oh!")
"""
),
b'{"foo": "bar"}',
)
(
error_event,
transaction_event,
) = envelopes
assert error_event["level"] == "error"
(exception,) = error_event["exception"]["values"]
assert exception["type"] == "Exception"
assert exception["value"] == "Oh!"
assert transaction_event["type"] == "transaction"
assert transaction_event["contexts"]["trace"]["op"] == "function.aws"
assert transaction_event["transaction"].startswith("test_")
assert transaction_event["transaction"] in transaction_event["request"]["url"]
@pytest.mark.parametrize(
"aws_event, has_request_data, batch_size",
[
(b"1231", False, 1),
(b"11.21", False, 1),
(b'"Good dog!"', False, 1),
(b"true", False, 1),
(
b"""
[
{"good dog": "Maisey"},
{"good dog": "Charlie"},
{"good dog": "Cory"},
{"good dog": "Bodhi"}
]
""",
False,
4,
),
(
b"""
[
{
"headers": {
"Host": "x.io",
"X-Forwarded-Proto": "http"
},
"httpMethod": "GET",
"path": "/somepath",
"queryStringParameters": {
"done": "true"
},
"dog": "Maisey"
},
{
"headers": {
"Host": "x.io",
"X-Forwarded-Proto": "http"
},
"httpMethod": "GET",
"path": "/somepath",
"queryStringParameters": {
"done": "true"
},
"dog": "Charlie"
}
]
""",
True,
2,
),
],
)
def test_non_dict_event(
run_lambda_function,
aws_event,
has_request_data,
batch_size,
DictionaryContaining, # noqa:N803
):
envelopes, _, response = run_lambda_function(
LAMBDA_PRELUDE
+ dedent(
"""
init_sdk(traces_sample_rate=1.0)
def test_handler(event, context):
raise Exception("Oh?")
"""
),
aws_event,
)
assert response["FunctionError"] == "Unhandled"
(
error_event,
transaction_event,
) = envelopes
assert error_event["level"] == "error"
assert error_event["contexts"]["trace"]["op"] == "function.aws"
function_name = error_event["extra"]["lambda"]["function_name"]
assert function_name.startswith("test_")
assert error_event["transaction"] == function_name
exception = error_event["exception"]["values"][0]
assert exception["type"] == "Exception"
assert exception["value"] == "Oh?"
assert exception["mechanism"]["type"] == "aws_lambda"
assert transaction_event["type"] == "transaction"
assert transaction_event["contexts"]["trace"] == DictionaryContaining(
error_event["contexts"]["trace"]
)
assert transaction_event["contexts"]["trace"]["status"] == "internal_error"
assert transaction_event["transaction"] == error_event["transaction"]
assert transaction_event["request"]["url"] == error_event["request"]["url"]
if has_request_data:
request_data = {
"headers": {"Host": "x.io", "X-Forwarded-Proto": "http"},
"method": "GET",
"url": "http://x.io/somepath",
"query_string": {
"done": "true",
},
}
else:
request_data = {"url": "awslambda:///{}".format(function_name)}
assert error_event["request"] == request_data
assert transaction_event["request"] == request_data
if batch_size > 1:
assert error_event["tags"]["batch_size"] == batch_size
assert error_event["tags"]["batch_request"] is True
assert transaction_event["tags"]["batch_size"] == batch_size
assert transaction_event["tags"]["batch_request"] is True
def test_traces_sampler_gets_correct_values_in_sampling_context(
run_lambda_function,
DictionaryContaining, # noqa:N803
ObjectDescribedBy,
StringContaining,
):
# TODO: This whole thing is a little hacky, specifically around the need to
# get `conftest.py` code into the AWS runtime, which is why there's both
# `inspect.getsource` and a copy of `_safe_is_equal` included directly in
# the code below. Ideas which have been discussed to fix this:
# - Include the test suite as a module installed in the package which is
# shot up to AWS
# - In client.py, copy `conftest.py` (or wherever the necessary code lives)
# from the test suite into the main SDK directory so it gets included as
# "part of the SDK"
# It's also worth noting why it's necessary to run the assertions in the AWS
# runtime rather than asserting on side effects the way we do with events
# and envelopes. The reasons are two-fold:
# - We're testing against the `LambdaContext` class, which only exists in
# the AWS runtime
# - If we were to transmit call args data they way we transmit event and
# envelope data (through JSON), we'd quickly run into the problem that all
# sorts of stuff isn't serializable by `json.dumps` out of the box, up to
# and including `datetime` objects (so anything with a timestamp is
# automatically out)
# Perhaps these challenges can be solved in a cleaner and more systematic
# way if we ever decide to refactor the entire AWS testing apparatus.
import inspect
_, _, response = run_lambda_function(
LAMBDA_PRELUDE
+ dedent(inspect.getsource(StringContaining))
+ dedent(inspect.getsource(DictionaryContaining))
+ dedent(inspect.getsource(ObjectDescribedBy))
+ dedent(
"""
try:
from unittest import mock # python 3.3 and above
except ImportError:
import mock # python < 3.3
def _safe_is_equal(x, y):
# copied from conftest.py - see docstring and comments there
try:
is_equal = x.__eq__(y)
except AttributeError:
is_equal = NotImplemented
if is_equal == NotImplemented:
# using == smoothes out weird variations exposed by raw __eq__
return x == y
return is_equal
def test_handler(event, context):
# this runs after the transaction has started, which means we
# can make assertions about traces_sampler
try:
traces_sampler.assert_any_call(
DictionaryContaining(
{
"aws_event": DictionaryContaining({
"httpMethod": "GET",
"path": "/sit/stay/rollover",
"headers": {"Host": "x.io", "X-Forwarded-Proto": "http"},
}),
"aws_context": ObjectDescribedBy(
type=get_lambda_bootstrap().LambdaContext,
attrs={
'function_name': StringContaining("test_"),
'function_version': '$LATEST',
}
)
}
)
)
except AssertionError:
# catch the error and return it because the error itself will
# get swallowed by the SDK as an "internal exception"
return {"AssertionError raised": True,}
return {"AssertionError raised": False,}
traces_sampler = mock.Mock(return_value=True)
init_sdk(
traces_sampler=traces_sampler,
)
"""
),
b'{"httpMethod": "GET", "path": "/sit/stay/rollover", "headers": {"Host": "x.io", "X-Forwarded-Proto": "http"}}',
)
assert response["Payload"]["AssertionError raised"] is False
def test_serverless_no_code_instrumentation(run_lambda_function):
"""
Test that ensures that just by adding a lambda layer containing the
python sdk, with no code changes sentry is able to capture errors
"""
for initial_handler in [
None,
"test_dir/test_lambda.test_handler",
"test_dir.test_lambda.test_handler",
]:
print("Testing Initial Handler ", initial_handler)
_, _, response = run_lambda_function(
dedent(
"""
import sentry_sdk
def test_handler(event, context):
current_client = sentry_sdk.Hub.current.client
assert current_client is not None
assert len(current_client.options['integrations']) == 1
assert isinstance(current_client.options['integrations'][0],
sentry_sdk.integrations.aws_lambda.AwsLambdaIntegration)
raise Exception("Oh!")
"""
),
b'{"foo": "bar"}',
layer=True,
initial_handler=initial_handler,
)
assert response["FunctionError"] == "Unhandled"
assert response["StatusCode"] == 200
assert response["Payload"]["errorType"] != "AssertionError"
assert response["Payload"]["errorType"] == "Exception"
assert response["Payload"]["errorMessage"] == "Oh!"
assert "sentry_handler" in response["LogResult"][3].decode("utf-8")
def test_error_has_new_trace_context_performance_enabled(run_lambda_function):
envelopes, _, _ = run_lambda_function(
LAMBDA_PRELUDE
+ dedent(
"""
init_sdk(traces_sample_rate=1.0)
def test_handler(event, context):
sentry_sdk.capture_message("hi")
raise Exception("Oh!")
"""
),
payload=b'{"foo": "bar"}',
)
(msg_event, error_event, transaction_event) = envelopes
assert "trace" in msg_event["contexts"]
assert "trace_id" in msg_event["contexts"]["trace"]
assert "trace" in error_event["contexts"]
assert "trace_id" in error_event["contexts"]["trace"]
assert "trace" in transaction_event["contexts"]
assert "trace_id" in transaction_event["contexts"]["trace"]
assert (
msg_event["contexts"]["trace"]["trace_id"]
== error_event["contexts"]["trace"]["trace_id"]
== transaction_event["contexts"]["trace"]["trace_id"]
)
def test_error_has_new_trace_context_performance_disabled(run_lambda_function):
_, events, _ = run_lambda_function(
LAMBDA_PRELUDE
+ dedent(
"""
init_sdk(traces_sample_rate=None) # this is the default, just added for clarity
def test_handler(event, context):
sentry_sdk.capture_message("hi")
raise Exception("Oh!")
"""
),
payload=b'{"foo": "bar"}',
)
(msg_event, error_event) = events
assert "trace" in msg_event["contexts"]
assert "trace_id" in msg_event["contexts"]["trace"]
assert "trace" in error_event["contexts"]
assert "trace_id" in error_event["contexts"]["trace"]
assert (
msg_event["contexts"]["trace"]["trace_id"]
== error_event["contexts"]["trace"]["trace_id"]
)
def test_error_has_existing_trace_context_performance_enabled(run_lambda_function):
trace_id = "471a43a4192642f0b136d5159a501701"
parent_span_id = "6e8f22c393e68f19"
parent_sampled = 1
sentry_trace_header = "{}-{}-{}".format(trace_id, parent_span_id, parent_sampled)
# We simulate here AWS Api Gateway's behavior of passing HTTP headers
# as the `headers` dict in the event passed to the Lambda function.
payload = {
"headers": {
"sentry-trace": sentry_trace_header,
}
}
envelopes, _, _ = run_lambda_function(
LAMBDA_PRELUDE
+ dedent(
"""
init_sdk(traces_sample_rate=1.0)
def test_handler(event, context):
sentry_sdk.capture_message("hi")
raise Exception("Oh!")
"""
),
payload=json.dumps(payload).encode(),
)
(msg_event, error_event, transaction_event) = envelopes
assert "trace" in msg_event["contexts"]
assert "trace_id" in msg_event["contexts"]["trace"]
assert "trace" in error_event["contexts"]
assert "trace_id" in error_event["contexts"]["trace"]
assert "trace" in transaction_event["contexts"]
assert "trace_id" in transaction_event["contexts"]["trace"]
assert (
msg_event["contexts"]["trace"]["trace_id"]
== error_event["contexts"]["trace"]["trace_id"]
== transaction_event["contexts"]["trace"]["trace_id"]
== "471a43a4192642f0b136d5159a501701"
)
def test_error_has_existing_trace_context_performance_disabled(run_lambda_function):
trace_id = "471a43a4192642f0b136d5159a501701"
parent_span_id = "6e8f22c393e68f19"
parent_sampled = 1
sentry_trace_header = "{}-{}-{}".format(trace_id, parent_span_id, parent_sampled)
# We simulate here AWS Api Gateway's behavior of passing HTTP headers
# as the `headers` dict in the event passed to the Lambda function.
payload = {
"headers": {
"sentry-trace": sentry_trace_header,
}
}
_, events, _ = run_lambda_function(
LAMBDA_PRELUDE
+ dedent(
"""
init_sdk(traces_sample_rate=None) # this is the default, just added for clarity
def test_handler(event, context):
sentry_sdk.capture_message("hi")
raise Exception("Oh!")
"""
),
payload=json.dumps(payload).encode(),
)
(msg_event, error_event) = events
assert "trace" in msg_event["contexts"]
assert "trace_id" in msg_event["contexts"]["trace"]
assert "trace" in error_event["contexts"]
assert "trace_id" in error_event["contexts"]["trace"]
assert (
msg_event["contexts"]["trace"]["trace_id"]
== error_event["contexts"]["trace"]["trace_id"]
== "471a43a4192642f0b136d5159a501701"
)
def test_basic_with_eventbridge_source(run_lambda_function):
_, events, response = run_lambda_function(
LAMBDA_PRELUDE
+ dedent(
"""
init_sdk()
def test_handler(event, context):
raise Exception("Oh!")
"""
),
b'[{"topic":"lps-ranges","partition":1,"offset":0,"timestamp":1701268939207,"timestampType":"CREATE_TIME","key":"REDACTED","value":"REDACTED","headers":[],"eventSourceArn":"REDACTED","bootstrapServers":"REDACTED","eventSource":"aws:kafka","eventSourceKey":"lps-ranges-1"}]',
)
assert response["FunctionError"] == "Unhandled"
(event,) = events
assert event["level"] == "error"
(exception,) = event["exception"]["values"]
assert exception["type"] == "Exception"
assert exception["value"] == "Oh!"
sentry-python-1.39.2/tests/integrations/beam/ 0000775 0000000 0000000 00000000000 14547447232 0021241 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/beam/__init__.py 0000664 0000000 0000000 00000000062 14547447232 0023350 0 ustar 00root root 0000000 0000000 import pytest
pytest.importorskip("apache_beam")
sentry-python-1.39.2/tests/integrations/beam/test_beam.py 0000664 0000000 0000000 00000013454 14547447232 0023565 0 ustar 00root root 0000000 0000000 import pytest
import inspect
import dill
from sentry_sdk.integrations.beam import (
BeamIntegration,
_wrap_task_call,
_wrap_inspect_call,
)
from apache_beam.typehints.trivial_inference import instance_to_type
from apache_beam.typehints.decorators import getcallargs_forhints
from apache_beam.transforms.core import DoFn, ParDo, _DoFnParam, CallableWrapperDoFn
from apache_beam.runners.common import DoFnInvoker, DoFnContext
from apache_beam.utils.windowed_value import WindowedValue
try:
from apache_beam.runners.common import OutputHandler
except ImportError:
from apache_beam.runners.common import OutputProcessor as OutputHandler
def foo():
return True
def bar(x, y):
# print(x + y)
return True
def baz(x, y=2):
# print(x + y)
return True
class A:
def __init__(self, fn):
self.r = "We are in A"
self.fn = fn
self._inspect_fn = _wrap_inspect_call(self, "fn")
def process(self):
return self.fn()
class B(A, object):
def fa(self, x, element=False, another_element=False):
if x or (element and not another_element):
# print(self.r)
return True
1 / 0
return False
def __init__(self):
self.r = "We are in B"
super(B, self).__init__(self.fa)
class SimpleFunc(DoFn):
def process(self, x):
if x:
1 / 0
return [True]
class PlaceHolderFunc(DoFn):
def process(self, x, timestamp=DoFn.TimestampParam, wx=DoFn.WindowParam):
if isinstance(timestamp, _DoFnParam) or isinstance(wx, _DoFnParam):
raise Exception("Bad instance")
if x:
1 / 0
yield True
def fail(x):
if x:
1 / 0
return [True]
test_parent = A(foo)
test_child = B()
test_simple = SimpleFunc()
test_place_holder = PlaceHolderFunc()
test_callable = CallableWrapperDoFn(fail)
# Cannot call simple functions or placeholder test.
@pytest.mark.parametrize(
"obj,f,args,kwargs",
[
[test_parent, "fn", (), {}],
[test_child, "fn", (False,), {"element": True}],
[test_child, "fn", (True,), {}],
[test_simple, "process", (False,), {}],
[test_callable, "process", (False,), {}],
],
)
def test_monkey_patch_call(obj, f, args, kwargs):
func = getattr(obj, f)
assert func(*args, **kwargs)
assert _wrap_task_call(func)(*args, **kwargs)
@pytest.mark.parametrize("f", [foo, bar, baz, test_parent.fn, test_child.fn])
def test_monkey_patch_pickle(f):
f_temp = _wrap_task_call(f)
assert dill.pickles(f_temp), "{} is not pickling correctly!".format(f)
# Pickle everything
s1 = dill.dumps(f_temp)
s2 = dill.loads(s1)
dill.dumps(s2)
@pytest.mark.parametrize(
"f,args,kwargs",
[
[foo, (), {}],
[bar, (1, 5), {}],
[baz, (1,), {}],
[test_parent.fn, (), {}],
[test_child.fn, (False,), {"element": True}],
[test_child.fn, (True,), {}],
],
)
def test_monkey_patch_signature(f, args, kwargs):
arg_types = [instance_to_type(v) for v in args]
kwargs_types = {k: instance_to_type(v) for (k, v) in kwargs.items()}
f_temp = _wrap_task_call(f)
try:
getcallargs_forhints(f, *arg_types, **kwargs_types)
except Exception:
print("Failed on {} with parameters {}, {}".format(f, args, kwargs))
raise
try:
getcallargs_forhints(f_temp, *arg_types, **kwargs_types)
except Exception:
print("Failed on {} with parameters {}, {}".format(f_temp, args, kwargs))
raise
try:
expected_signature = inspect.signature(f)
test_signature = inspect.signature(f_temp)
assert (
expected_signature == test_signature
), "Failed on {}, signature {} does not match {}".format(
f, expected_signature, test_signature
)
except Exception:
# expected to pass for py2.7
pass
class _OutputHandler(OutputHandler):
def process_outputs(
self, windowed_input_element, results, watermark_estimator=None
):
self.handle_process_outputs(
windowed_input_element, results, watermark_estimator
)
def handle_process_outputs(
self, windowed_input_element, results, watermark_estimator=None
):
print(windowed_input_element)
try:
for result in results:
assert result
except StopIteration:
print("In here")
@pytest.fixture
def init_beam(sentry_init):
def inner(fn):
sentry_init(default_integrations=False, integrations=[BeamIntegration()])
# Little hack to avoid having to run the whole pipeline.
pardo = ParDo(fn)
signature = pardo._signature
output_processor = _OutputHandler()
return DoFnInvoker.create_invoker(
signature, output_processor, DoFnContext("test")
)
return inner
@pytest.mark.parametrize("fn", [test_simple, test_callable, test_place_holder])
def test_invoker_normal(init_beam, fn):
invoker = init_beam(fn)
print("Normal testing {} with {} invoker.".format(fn, invoker))
windowed_value = WindowedValue(False, 0, [None])
invoker.invoke_process(windowed_value)
@pytest.mark.parametrize("fn", [test_simple, test_callable, test_place_holder])
def test_invoker_exception(init_beam, capture_events, capture_exceptions, fn):
invoker = init_beam(fn)
events = capture_events()
print("Exception testing {} with {} invoker.".format(fn, invoker))
# Window value will always have one value for the process to run.
windowed_value = WindowedValue(True, 0, [None])
try:
invoker.invoke_process(windowed_value)
except Exception:
pass
(event,) = events
(exception,) = event["exception"]["values"]
assert exception["type"] == "ZeroDivisionError"
assert exception["mechanism"]["type"] == "beam"
sentry-python-1.39.2/tests/integrations/boto3/ 0000775 0000000 0000000 00000000000 14547447232 0021363 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/boto3/__init__.py 0000664 0000000 0000000 00000000346 14547447232 0023477 0 ustar 00root root 0000000 0000000 import pytest
import os
pytest.importorskip("boto3")
xml_fixture_path = os.path.dirname(os.path.abspath(__file__))
def read_fixture(name):
with open(os.path.join(xml_fixture_path, name), "rb") as f:
return f.read()
sentry-python-1.39.2/tests/integrations/boto3/aws_mock.py 0000664 0000000 0000000 00000001563 14547447232 0023545 0 ustar 00root root 0000000 0000000 from io import BytesIO
from botocore.awsrequest import AWSResponse
class Body(BytesIO):
def stream(self, **kwargs):
contents = self.read()
while contents:
yield contents
contents = self.read()
class MockResponse(object):
def __init__(self, client, status_code, headers, body):
self._client = client
self._status_code = status_code
self._headers = headers
self._body = body
def __enter__(self):
self._client.meta.events.register("before-send", self)
return self
def __exit__(self, exc_type, exc_value, traceback):
self._client.meta.events.unregister("before-send", self)
def __call__(self, request, **kwargs):
return AWSResponse(
request.url,
self._status_code,
self._headers,
Body(self._body),
)
sentry-python-1.39.2/tests/integrations/boto3/s3_list.xml 0000664 0000000 0000000 00000001545 14547447232 0023472 0 ustar 00root root 0000000 0000000
marshalls-furious-bucket 1000 url false foo.txt 2020-10-24T00:13:39.000Z "a895ba674b4abd01b5d67cfd7074b827" 206453 7bef397f7e536914d1ff1bbdb105ed90bcfd06269456bf4a06c6e2e54564daf7 STANDARD bar.txt 2020-10-02T15:15:20.000Z "a895ba674b4abd01b5d67cfd7074b827" 206453 7bef397f7e536914d1ff1bbdb105ed90bcfd06269456bf4a06c6e2e54564daf7 STANDARD
sentry-python-1.39.2/tests/integrations/boto3/test_s3.py 0000664 0000000 0000000 00000007701 14547447232 0023326 0 ustar 00root root 0000000 0000000 import pytest
import boto3
from sentry_sdk import Hub
from sentry_sdk.integrations.boto3 import Boto3Integration
from tests.integrations.boto3.aws_mock import MockResponse
from tests.integrations.boto3 import read_fixture
try:
from unittest import mock # python 3.3 and above
except ImportError:
import mock # python < 3.3
session = boto3.Session(
aws_access_key_id="-",
aws_secret_access_key="-",
)
def test_basic(sentry_init, capture_events):
sentry_init(traces_sample_rate=1.0, integrations=[Boto3Integration()])
events = capture_events()
s3 = session.resource("s3")
with Hub.current.start_transaction() as transaction, MockResponse(
s3.meta.client, 200, {}, read_fixture("s3_list.xml")
):
bucket = s3.Bucket("bucket")
items = [obj for obj in bucket.objects.all()]
assert len(items) == 2
assert items[0].key == "foo.txt"
assert items[1].key == "bar.txt"
transaction.finish()
(event,) = events
assert event["type"] == "transaction"
assert len(event["spans"]) == 1
(span,) = event["spans"]
assert span["op"] == "http.client"
assert span["description"] == "aws.s3.ListObjects"
def test_streaming(sentry_init, capture_events):
sentry_init(traces_sample_rate=1.0, integrations=[Boto3Integration()])
events = capture_events()
s3 = session.resource("s3")
with Hub.current.start_transaction() as transaction, MockResponse(
s3.meta.client, 200, {}, b"hello"
):
obj = s3.Bucket("bucket").Object("foo.pdf")
body = obj.get()["Body"]
assert body.read(1) == b"h"
assert body.read(2) == b"el"
assert body.read(3) == b"lo"
assert body.read(1) == b""
transaction.finish()
(event,) = events
assert event["type"] == "transaction"
assert len(event["spans"]) == 2
span1 = event["spans"][0]
assert span1["op"] == "http.client"
assert span1["description"] == "aws.s3.GetObject"
assert span1["data"] == {
"http.method": "GET",
"aws.request.url": "https://bucket.s3.amazonaws.com/foo.pdf",
"http.fragment": "",
"http.query": "",
}
span2 = event["spans"][1]
assert span2["op"] == "http.client.stream"
assert span2["description"] == "aws.s3.GetObject"
assert span2["parent_span_id"] == span1["span_id"]
def test_streaming_close(sentry_init, capture_events):
sentry_init(traces_sample_rate=1.0, integrations=[Boto3Integration()])
events = capture_events()
s3 = session.resource("s3")
with Hub.current.start_transaction() as transaction, MockResponse(
s3.meta.client, 200, {}, b"hello"
):
obj = s3.Bucket("bucket").Object("foo.pdf")
body = obj.get()["Body"]
assert body.read(1) == b"h"
body.close() # close partially-read stream
transaction.finish()
(event,) = events
assert event["type"] == "transaction"
assert len(event["spans"]) == 2
span1 = event["spans"][0]
assert span1["op"] == "http.client"
span2 = event["spans"][1]
assert span2["op"] == "http.client.stream"
@pytest.mark.tests_internal_exceptions
def test_omit_url_data_if_parsing_fails(sentry_init, capture_events):
sentry_init(traces_sample_rate=1.0, integrations=[Boto3Integration()])
events = capture_events()
s3 = session.resource("s3")
with mock.patch(
"sentry_sdk.integrations.boto3.parse_url",
side_effect=ValueError,
):
with Hub.current.start_transaction() as transaction, MockResponse(
s3.meta.client, 200, {}, read_fixture("s3_list.xml")
):
bucket = s3.Bucket("bucket")
items = [obj for obj in bucket.objects.all()]
assert len(items) == 2
assert items[0].key == "foo.txt"
assert items[1].key == "bar.txt"
transaction.finish()
(event,) = events
assert event["spans"][0]["data"] == {
"http.method": "GET",
# no url data
}
sentry-python-1.39.2/tests/integrations/bottle/ 0000775 0000000 0000000 00000000000 14547447232 0021626 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/bottle/__init__.py 0000664 0000000 0000000 00000000055 14547447232 0023737 0 ustar 00root root 0000000 0000000 import pytest
pytest.importorskip("bottle")
sentry-python-1.39.2/tests/integrations/bottle/test_bottle.py 0000664 0000000 0000000 00000027646 14547447232 0024547 0 ustar 00root root 0000000 0000000 import json
import pytest
import logging
from io import BytesIO
from bottle import Bottle, debug as set_debug, abort, redirect
from sentry_sdk import capture_message
from sentry_sdk.serializer import MAX_DATABAG_BREADTH
from sentry_sdk.integrations.logging import LoggingIntegration
from werkzeug.test import Client
import sentry_sdk.integrations.bottle as bottle_sentry
@pytest.fixture(scope="function")
def app(sentry_init):
app = Bottle()
@app.route("/message")
def hi():
capture_message("hi")
return "ok"
@app.route("/message/")
def hi_with_id(message_id):
capture_message("hi")
return "ok"
@app.route("/message-named-route", name="hi")
def named_hi():
capture_message("hi")
return "ok"
yield app
@pytest.fixture
def get_client(app):
def inner():
return Client(app)
return inner
def test_has_context(sentry_init, app, capture_events, get_client):
sentry_init(integrations=[bottle_sentry.BottleIntegration()])
events = capture_events()
client = get_client()
response = client.get("/message")
assert response[1] == "200 OK"
(event,) = events
assert event["message"] == "hi"
assert "data" not in event["request"]
assert event["request"]["url"] == "http://localhost/message"
@pytest.mark.parametrize(
"url,transaction_style,expected_transaction,expected_source",
[
("/message", "endpoint", "hi", "component"),
("/message", "url", "/message", "route"),
("/message/123456", "url", "/message/", "route"),
("/message-named-route", "endpoint", "hi", "component"),
],
)
def test_transaction_style(
sentry_init,
url,
transaction_style,
expected_transaction,
expected_source,
capture_events,
get_client,
):
sentry_init(
integrations=[
bottle_sentry.BottleIntegration(transaction_style=transaction_style)
]
)
events = capture_events()
client = get_client()
response = client.get(url)
assert response[1] == "200 OK"
(event,) = events
# We use endswith() because in Python 2.7 it is "test_bottle.hi"
# and in later Pythons "test_bottle.app..hi"
assert event["transaction"].endswith(expected_transaction)
assert event["transaction_info"] == {"source": expected_source}
@pytest.mark.parametrize("debug", (True, False), ids=["debug", "nodebug"])
@pytest.mark.parametrize("catchall", (True, False), ids=["catchall", "nocatchall"])
def test_errors(
sentry_init, capture_exceptions, capture_events, app, debug, catchall, get_client
):
sentry_init(integrations=[bottle_sentry.BottleIntegration()])
app.catchall = catchall
set_debug(mode=debug)
exceptions = capture_exceptions()
events = capture_events()
@app.route("/")
def index():
1 / 0
client = get_client()
try:
client.get("/")
except ZeroDivisionError:
pass
(exc,) = exceptions
assert isinstance(exc, ZeroDivisionError)
(event,) = events
assert event["exception"]["values"][0]["mechanism"]["type"] == "bottle"
assert event["exception"]["values"][0]["mechanism"]["handled"] is False
def test_large_json_request(sentry_init, capture_events, app, get_client):
sentry_init(integrations=[bottle_sentry.BottleIntegration()])
data = {"foo": {"bar": "a" * 2000}}
@app.route("/", method="POST")
def index():
import bottle
assert bottle.request.json == data
assert bottle.request.body.read() == json.dumps(data).encode("ascii")
capture_message("hi")
return "ok"
events = capture_events()
client = get_client()
response = client.get("/")
response = client.post("/", content_type="application/json", data=json.dumps(data))
assert response[1] == "200 OK"
(event,) = events
assert event["_meta"]["request"]["data"]["foo"]["bar"] == {
"": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
}
assert len(event["request"]["data"]["foo"]["bar"]) == 1024
@pytest.mark.parametrize("data", [{}, []], ids=["empty-dict", "empty-list"])
def test_empty_json_request(sentry_init, capture_events, app, data, get_client):
sentry_init(integrations=[bottle_sentry.BottleIntegration()])
@app.route("/", method="POST")
def index():
import bottle
assert bottle.request.json == data
assert bottle.request.body.read() == json.dumps(data).encode("ascii")
# assert not bottle.request.forms
capture_message("hi")
return "ok"
events = capture_events()
client = get_client()
response = client.post("/", content_type="application/json", data=json.dumps(data))
assert response[1] == "200 OK"
(event,) = events
assert event["request"]["data"] == data
def test_medium_formdata_request(sentry_init, capture_events, app, get_client):
sentry_init(integrations=[bottle_sentry.BottleIntegration()])
data = {"foo": "a" * 2000}
@app.route("/", method="POST")
def index():
import bottle
assert bottle.request.forms["foo"] == data["foo"]
capture_message("hi")
return "ok"
events = capture_events()
client = get_client()
response = client.post("/", data=data)
assert response[1] == "200 OK"
(event,) = events
assert event["_meta"]["request"]["data"]["foo"] == {
"": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
}
assert len(event["request"]["data"]["foo"]) == 1024
@pytest.mark.parametrize("input_char", ["a", b"a"])
def test_too_large_raw_request(
sentry_init, input_char, capture_events, app, get_client
):
sentry_init(
integrations=[bottle_sentry.BottleIntegration()], max_request_body_size="small"
)
data = input_char * 2000
@app.route("/", method="POST")
def index():
import bottle
if isinstance(data, bytes):
assert bottle.request.body.read() == data
else:
assert bottle.request.body.read() == data.encode("ascii")
assert not bottle.request.json
capture_message("hi")
return "ok"
events = capture_events()
client = get_client()
response = client.post("/", data=data)
assert response[1] == "200 OK"
(event,) = events
assert event["_meta"]["request"]["data"] == {"": {"rem": [["!config", "x"]]}}
assert not event["request"]["data"]
def test_files_and_form(sentry_init, capture_events, app, get_client):
sentry_init(
integrations=[bottle_sentry.BottleIntegration()], max_request_body_size="always"
)
data = {"foo": "a" * 2000, "file": (BytesIO(b"hello"), "hello.txt")}
@app.route("/", method="POST")
def index():
import bottle
assert list(bottle.request.forms) == ["foo"]
assert list(bottle.request.files) == ["file"]
assert not bottle.request.json
capture_message("hi")
return "ok"
events = capture_events()
client = get_client()
response = client.post("/", data=data)
assert response[1] == "200 OK"
(event,) = events
assert event["_meta"]["request"]["data"]["foo"] == {
"": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
}
assert len(event["request"]["data"]["foo"]) == 1024
assert event["_meta"]["request"]["data"]["file"] == {
"": {
"rem": [["!raw", "x"]],
}
}
assert not event["request"]["data"]["file"]
def test_json_not_truncated_if_max_request_body_size_is_always(
sentry_init, capture_events, app, get_client
):
sentry_init(
integrations=[bottle_sentry.BottleIntegration()], max_request_body_size="always"
)
data = {
"key{}".format(i): "value{}".format(i) for i in range(MAX_DATABAG_BREADTH + 10)
}
@app.route("/", method="POST")
def index():
import bottle
assert bottle.request.json == data
assert bottle.request.body.read() == json.dumps(data).encode("ascii")
capture_message("hi")
return "ok"
events = capture_events()
client = get_client()
response = client.post("/", content_type="application/json", data=json.dumps(data))
assert response[1] == "200 OK"
(event,) = events
assert event["request"]["data"] == data
@pytest.mark.parametrize(
"integrations",
[
[bottle_sentry.BottleIntegration()],
[bottle_sentry.BottleIntegration(), LoggingIntegration(event_level="ERROR")],
],
)
def test_errors_not_reported_twice(
sentry_init, integrations, capture_events, app, get_client
):
sentry_init(integrations=integrations)
app.catchall = False
logger = logging.getLogger("bottle.app")
@app.route("/")
def index():
try:
1 / 0
except Exception as e:
logger.exception(e)
raise e
events = capture_events()
client = get_client()
with pytest.raises(ZeroDivisionError):
client.get("/")
assert len(events) == 1
def test_logging(sentry_init, capture_events, app, get_client):
# ensure that Bottle's logger magic doesn't break ours
sentry_init(
integrations=[
bottle_sentry.BottleIntegration(),
LoggingIntegration(event_level="ERROR"),
]
)
@app.route("/")
def index():
app.logger.error("hi")
return "ok"
events = capture_events()
client = get_client()
client.get("/")
(event,) = events
assert event["level"] == "error"
def test_mount(app, capture_exceptions, capture_events, sentry_init, get_client):
sentry_init(integrations=[bottle_sentry.BottleIntegration()])
app.catchall = False
def crashing_app(environ, start_response):
1 / 0
app.mount("/wsgi/", crashing_app)
client = Client(app)
exceptions = capture_exceptions()
events = capture_events()
with pytest.raises(ZeroDivisionError) as exc:
client.get("/wsgi/")
(error,) = exceptions
assert error is exc.value
(event,) = events
assert event["exception"]["values"][0]["mechanism"]["type"] == "bottle"
assert event["exception"]["values"][0]["mechanism"]["handled"] is False
def test_500(sentry_init, capture_events, app, get_client):
sentry_init(integrations=[bottle_sentry.BottleIntegration()])
set_debug(False)
app.catchall = True
@app.route("/")
def index():
1 / 0
@app.error(500)
def error_handler(err):
capture_message("error_msg")
return "My error"
events = capture_events()
client = get_client()
response = client.get("/")
assert response[1] == "500 Internal Server Error"
_, event = events
assert event["message"] == "error_msg"
def test_error_in_errorhandler(sentry_init, capture_events, app, get_client):
sentry_init(integrations=[bottle_sentry.BottleIntegration()])
set_debug(False)
app.catchall = True
@app.route("/")
def index():
raise ValueError()
@app.error(500)
def error_handler(err):
1 / 0
events = capture_events()
client = get_client()
with pytest.raises(ZeroDivisionError):
client.get("/")
event1, event2 = events
(exception,) = event1["exception"]["values"]
assert exception["type"] == "ValueError"
exception = event2["exception"]["values"][0]
assert exception["type"] == "ZeroDivisionError"
def test_bad_request_not_captured(sentry_init, capture_events, app, get_client):
sentry_init(integrations=[bottle_sentry.BottleIntegration()])
events = capture_events()
@app.route("/")
def index():
abort(400, "bad request in")
client = get_client()
client.get("/")
assert not events
def test_no_exception_on_redirect(sentry_init, capture_events, app, get_client):
sentry_init(integrations=[bottle_sentry.BottleIntegration()])
events = capture_events()
@app.route("/")
def index():
redirect("/here")
@app.route("/here")
def here():
return "here"
client = get_client()
client.get("/")
assert not events
sentry-python-1.39.2/tests/integrations/celery/ 0000775 0000000 0000000 00000000000 14547447232 0021620 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/celery/__init__.py 0000664 0000000 0000000 00000000055 14547447232 0023731 0 ustar 00root root 0000000 0000000 import pytest
pytest.importorskip("celery")
sentry-python-1.39.2/tests/integrations/celery/test_celery.py 0000664 0000000 0000000 00000044244 14547447232 0024524 0 ustar 00root root 0000000 0000000 import threading
import pytest
from sentry_sdk import Hub, configure_scope, start_transaction, get_current_span
from sentry_sdk.integrations.celery import (
CeleryIntegration,
_get_headers,
_wrap_apply_async,
)
from sentry_sdk._compat import text_type
from celery import Celery, VERSION
from celery.bin import worker
try:
from unittest import mock # python 3.3 and above
except ImportError:
import mock # python < 3.3
@pytest.fixture
def connect_signal(request):
def inner(signal, f):
signal.connect(f)
request.addfinalizer(lambda: signal.disconnect(f))
return inner
@pytest.fixture
def init_celery(sentry_init, request):
def inner(propagate_traces=True, backend="always_eager", **kwargs):
sentry_init(
integrations=[CeleryIntegration(propagate_traces=propagate_traces)],
**kwargs
)
celery = Celery(__name__)
if backend == "always_eager":
if VERSION < (4,):
celery.conf.CELERY_ALWAYS_EAGER = True
else:
celery.conf.task_always_eager = True
elif backend == "redis":
# broken on celery 3
if VERSION < (4,):
pytest.skip("Redis backend broken for some reason")
# this backend requires capture_events_forksafe
celery.conf.worker_max_tasks_per_child = 1
celery.conf.worker_concurrency = 1
celery.conf.broker_url = "redis://127.0.0.1:6379"
celery.conf.result_backend = "redis://127.0.0.1:6379"
celery.conf.task_always_eager = False
Hub.main.bind_client(Hub.current.client)
request.addfinalizer(lambda: Hub.main.bind_client(None))
# Once we drop celery 3 we can use the celery_worker fixture
if VERSION < (5,):
worker_fn = worker.worker(app=celery).run
else:
from celery.bin.base import CLIContext
worker_fn = lambda: worker.worker(
obj=CLIContext(app=celery, no_color=True, workdir=".", quiet=False),
args=[],
)
worker_thread = threading.Thread(target=worker_fn)
worker_thread.daemon = True
worker_thread.start()
else:
raise ValueError(backend)
return celery
return inner
@pytest.fixture
def celery(init_celery):
return init_celery()
@pytest.fixture(
params=[
lambda task, x, y: (
task.delay(x, y),
{"args": [x, y], "kwargs": {}},
),
lambda task, x, y: (
task.apply_async((x, y)),
{"args": [x, y], "kwargs": {}},
),
lambda task, x, y: (
task.apply_async(args=(x, y)),
{"args": [x, y], "kwargs": {}},
),
lambda task, x, y: (
task.apply_async(kwargs=dict(x=x, y=y)),
{"args": [], "kwargs": {"x": x, "y": y}},
),
]
)
def celery_invocation(request):
"""
Invokes a task in multiple ways Celery allows you to (testing our apply_async monkeypatch).
Currently limited to a task signature of the form foo(x, y)
"""
return request.param
def test_simple_with_performance(capture_events, init_celery, celery_invocation):
celery = init_celery(traces_sample_rate=1.0)
events = capture_events()
@celery.task(name="dummy_task")
def dummy_task(x, y):
foo = 42 # noqa
return x / y
with start_transaction(op="unit test transaction") as transaction:
celery_invocation(dummy_task, 1, 2)
_, expected_context = celery_invocation(dummy_task, 1, 0)
(_, error_event, _, _) = events
assert error_event["contexts"]["trace"]["trace_id"] == transaction.trace_id
assert error_event["contexts"]["trace"]["span_id"] != transaction.span_id
assert error_event["transaction"] == "dummy_task"
assert "celery_task_id" in error_event["tags"]
assert error_event["extra"]["celery-job"] == dict(
task_name="dummy_task", **expected_context
)
(exception,) = error_event["exception"]["values"]
assert exception["type"] == "ZeroDivisionError"
assert exception["mechanism"]["type"] == "celery"
assert exception["stacktrace"]["frames"][0]["vars"]["foo"] == "42"
def test_simple_without_performance(capture_events, init_celery, celery_invocation):
celery = init_celery(traces_sample_rate=None)
events = capture_events()
@celery.task(name="dummy_task")
def dummy_task(x, y):
foo = 42 # noqa
return x / y
with configure_scope() as scope:
celery_invocation(dummy_task, 1, 2)
_, expected_context = celery_invocation(dummy_task, 1, 0)
(error_event,) = events
assert (
error_event["contexts"]["trace"]["trace_id"]
== scope._propagation_context["trace_id"]
)
assert (
error_event["contexts"]["trace"]["span_id"]
!= scope._propagation_context["span_id"]
)
assert error_event["transaction"] == "dummy_task"
assert "celery_task_id" in error_event["tags"]
assert error_event["extra"]["celery-job"] == dict(
task_name="dummy_task", **expected_context
)
(exception,) = error_event["exception"]["values"]
assert exception["type"] == "ZeroDivisionError"
assert exception["mechanism"]["type"] == "celery"
assert exception["stacktrace"]["frames"][0]["vars"]["foo"] == "42"
@pytest.mark.parametrize("task_fails", [True, False], ids=["error", "success"])
def test_transaction_events(capture_events, init_celery, celery_invocation, task_fails):
celery = init_celery(traces_sample_rate=1.0)
@celery.task(name="dummy_task")
def dummy_task(x, y):
return x / y
# XXX: For some reason the first call does not get instrumented properly.
celery_invocation(dummy_task, 1, 1)
events = capture_events()
with start_transaction(name="submission") as transaction:
celery_invocation(dummy_task, 1, 0 if task_fails else 1)
if task_fails:
error_event = events.pop(0)
assert error_event["contexts"]["trace"]["trace_id"] == transaction.trace_id
assert error_event["exception"]["values"][0]["type"] == "ZeroDivisionError"
execution_event, submission_event = events
assert execution_event["transaction"] == "dummy_task"
assert execution_event["transaction_info"] == {"source": "task"}
assert submission_event["transaction"] == "submission"
assert submission_event["transaction_info"] == {"source": "custom"}
assert execution_event["type"] == submission_event["type"] == "transaction"
assert execution_event["contexts"]["trace"]["trace_id"] == transaction.trace_id
assert submission_event["contexts"]["trace"]["trace_id"] == transaction.trace_id
if task_fails:
assert execution_event["contexts"]["trace"]["status"] == "internal_error"
else:
assert execution_event["contexts"]["trace"]["status"] == "ok"
assert execution_event["spans"] == []
assert submission_event["spans"] == [
{
"description": "dummy_task",
"op": "queue.submit.celery",
"parent_span_id": submission_event["contexts"]["trace"]["span_id"],
"same_process_as_parent": True,
"span_id": submission_event["spans"][0]["span_id"],
"start_timestamp": submission_event["spans"][0]["start_timestamp"],
"timestamp": submission_event["spans"][0]["timestamp"],
"trace_id": text_type(transaction.trace_id),
}
]
def test_no_stackoverflows(celery):
"""We used to have a bug in the Celery integration where its monkeypatching
was repeated for every task invocation, leading to stackoverflows.
See https://github.com/getsentry/sentry-python/issues/265
"""
results = []
@celery.task(name="dummy_task")
def dummy_task():
with configure_scope() as scope:
scope.set_tag("foo", "bar")
results.append(42)
for _ in range(10000):
dummy_task.delay()
assert results == [42] * 10000
with configure_scope() as scope:
assert not scope._tags
def test_simple_no_propagation(capture_events, init_celery):
celery = init_celery(propagate_traces=False)
events = capture_events()
@celery.task(name="dummy_task")
def dummy_task():
1 / 0
with start_transaction() as transaction:
dummy_task.delay()
(event,) = events
assert event["contexts"]["trace"]["trace_id"] != transaction.trace_id
assert event["transaction"] == "dummy_task"
(exception,) = event["exception"]["values"]
assert exception["type"] == "ZeroDivisionError"
def test_ignore_expected(capture_events, celery):
events = capture_events()
@celery.task(name="dummy_task", throws=(ZeroDivisionError,))
def dummy_task(x, y):
return x / y
dummy_task.delay(1, 2)
dummy_task.delay(1, 0)
assert not events
def test_broken_prerun(init_celery, connect_signal):
from celery.signals import task_prerun
stack_lengths = []
def crash(*args, **kwargs):
# scope should exist in prerun
stack_lengths.append(len(Hub.current._stack))
1 / 0
# Order here is important to reproduce the bug: In Celery 3, a crashing
# prerun would prevent other preruns from running.
connect_signal(task_prerun, crash)
celery = init_celery()
assert len(Hub.current._stack) == 1
@celery.task(name="dummy_task")
def dummy_task(x, y):
stack_lengths.append(len(Hub.current._stack))
return x / y
if VERSION >= (4,):
dummy_task.delay(2, 2)
else:
with pytest.raises(ZeroDivisionError):
dummy_task.delay(2, 2)
assert len(Hub.current._stack) == 1
if VERSION < (4,):
assert stack_lengths == [2]
else:
assert stack_lengths == [2, 2]
@pytest.mark.xfail(
(4, 2, 0) <= VERSION < (4, 4, 3),
strict=True,
reason="https://github.com/celery/celery/issues/4661",
)
def test_retry(celery, capture_events):
events = capture_events()
failures = [True, True, False]
runs = []
@celery.task(name="dummy_task", bind=True)
def dummy_task(self):
runs.append(1)
try:
if failures.pop(0):
1 / 0
except Exception as exc:
self.retry(max_retries=2, exc=exc)
dummy_task.delay()
assert len(runs) == 3
assert not events
failures = [True, True, True]
runs = []
dummy_task.delay()
assert len(runs) == 3
(event,) = events
exceptions = event["exception"]["values"]
for e in exceptions:
assert e["type"] == "ZeroDivisionError"
# TODO: This test is hanging when running test with `tox --parallel auto`. Find out why and fix it!
@pytest.mark.skip
@pytest.mark.forked
def test_redis_backend_trace_propagation(init_celery, capture_events_forksafe):
celery = init_celery(traces_sample_rate=1.0, backend="redis", debug=True)
events = capture_events_forksafe()
runs = []
@celery.task(name="dummy_task", bind=True)
def dummy_task(self):
runs.append(1)
1 / 0
with start_transaction(name="submit_celery"):
# Curious: Cannot use delay() here or py2.7-celery-4.2 crashes
res = dummy_task.apply_async()
with pytest.raises(Exception): # noqa: B017
# Celery 4.1 raises a gibberish exception
res.wait()
# if this is nonempty, the worker never really forked
assert not runs
submit_transaction = events.read_event()
assert submit_transaction["type"] == "transaction"
assert submit_transaction["transaction"] == "submit_celery"
assert len(
submit_transaction["spans"]
), 4 # Because redis integration was auto enabled
span = submit_transaction["spans"][0]
assert span["op"] == "queue.submit.celery"
assert span["description"] == "dummy_task"
event = events.read_event()
(exception,) = event["exception"]["values"]
assert exception["type"] == "ZeroDivisionError"
transaction = events.read_event()
assert (
transaction["contexts"]["trace"]["trace_id"]
== event["contexts"]["trace"]["trace_id"]
== submit_transaction["contexts"]["trace"]["trace_id"]
)
events.read_flush()
# if this is nonempty, the worker never really forked
assert not runs
@pytest.mark.forked
@pytest.mark.parametrize("newrelic_order", ["sentry_first", "sentry_last"])
def test_newrelic_interference(init_celery, newrelic_order, celery_invocation):
def instrument_newrelic():
import celery.app.trace as celery_mod
from newrelic.hooks.application_celery import instrument_celery_execute_trace
assert hasattr(celery_mod, "build_tracer")
instrument_celery_execute_trace(celery_mod)
if newrelic_order == "sentry_first":
celery = init_celery()
instrument_newrelic()
elif newrelic_order == "sentry_last":
instrument_newrelic()
celery = init_celery()
else:
raise ValueError(newrelic_order)
@celery.task(name="dummy_task", bind=True)
def dummy_task(self, x, y):
return x / y
assert dummy_task.apply(kwargs={"x": 1, "y": 1}).wait() == 1
assert celery_invocation(dummy_task, 1, 1)[0].wait() == 1
def test_traces_sampler_gets_task_info_in_sampling_context(
init_celery, celery_invocation, DictionaryContaining # noqa:N803
):
traces_sampler = mock.Mock()
celery = init_celery(traces_sampler=traces_sampler)
@celery.task(name="dog_walk")
def walk_dogs(x, y):
dogs, route = x
num_loops = y
return dogs, route, num_loops
_, args_kwargs = celery_invocation(
walk_dogs, [["Maisey", "Charlie", "Bodhi", "Cory"], "Dog park round trip"], 1
)
traces_sampler.assert_any_call(
# depending on the iteration of celery_invocation, the data might be
# passed as args or as kwargs, so make this generic
DictionaryContaining({"celery_job": dict(task="dog_walk", **args_kwargs)})
)
def test_abstract_task(capture_events, celery, celery_invocation):
events = capture_events()
class AbstractTask(celery.Task):
abstract = True
def __call__(self, *args, **kwargs):
try:
return self.run(*args, **kwargs)
except ZeroDivisionError:
return None
@celery.task(name="dummy_task", base=AbstractTask)
def dummy_task(x, y):
return x / y
with start_transaction():
celery_invocation(dummy_task, 1, 0)
assert not events
def test_task_headers(celery):
"""
Test that the headers set in the Celery Beat auto-instrumentation are passed to the celery signal handlers
"""
sentry_crons_setup = {
"sentry-monitor-slug": "some-slug",
"sentry-monitor-config": {"some": "config"},
"sentry-monitor-check-in-id": "123abc",
}
@celery.task(name="dummy_task", bind=True)
def dummy_task(self, x, y):
return _get_headers(self)
# This is how the Celery Beat auto-instrumentation starts a task
# in the monkey patched version of `apply_async`
# in `sentry_sdk/integrations/celery.py::_wrap_apply_async()`
result = dummy_task.apply_async(args=(1, 0), headers=sentry_crons_setup)
assert result.get() == sentry_crons_setup
def test_baggage_propagation(init_celery):
celery = init_celery(traces_sample_rate=1.0, release="abcdef")
@celery.task(name="dummy_task", bind=True)
def dummy_task(self, x, y):
return _get_headers(self)
with start_transaction() as transaction:
result = dummy_task.apply_async(
args=(1, 0),
headers={"baggage": "custom=value"},
).get()
assert sorted(result["baggage"].split(",")) == sorted(
[
"sentry-release=abcdef",
"sentry-trace_id={}".format(transaction.trace_id),
"sentry-environment=production",
"sentry-sample_rate=1.0",
"sentry-sampled=true",
"custom=value",
]
)
def test_sentry_propagate_traces_override(init_celery):
"""
Test if the `sentry-propagate-traces` header given to `apply_async`
overrides the `propagate_traces` parameter in the integration constructor.
"""
celery = init_celery(
propagate_traces=True, traces_sample_rate=1.0, release="abcdef"
)
@celery.task(name="dummy_task", bind=True)
def dummy_task(self, message):
trace_id = get_current_span().trace_id
return trace_id
with start_transaction() as transaction:
transaction_trace_id = transaction.trace_id
# should propagate trace
task_transaction_id = dummy_task.apply_async(
args=("some message",),
).get()
assert transaction_trace_id == task_transaction_id
# should NOT propagate trace (overrides `propagate_traces` parameter in integration constructor)
task_transaction_id = dummy_task.apply_async(
args=("another message",),
headers={"sentry-propagate-traces": False},
).get()
assert transaction_trace_id != task_transaction_id
def test_apply_async_manually_span(sentry_init):
sentry_init(
integrations=[CeleryIntegration()],
)
def dummy_function(*args, **kwargs):
headers = kwargs.get("headers")
assert "sentry-trace" in headers
assert "baggage" in headers
wrapped = _wrap_apply_async(dummy_function)
wrapped(mock.MagicMock(), (), headers={})
def test_apply_async_from_beat_no_span(sentry_init):
sentry_init(
integrations=[CeleryIntegration()],
)
def dummy_function(*args, **kwargs):
headers = kwargs.get("headers")
assert "sentry-trace" not in headers
assert "baggage" not in headers
wrapped = _wrap_apply_async(dummy_function)
wrapped(
mock.MagicMock(),
[
"BEAT",
],
headers={},
)
def test_apply_async_no_args(init_celery):
celery = init_celery()
@celery.task
def example_task():
return "success"
try:
result = example_task.apply_async(None, {})
except TypeError:
pytest.fail("Calling `apply_async` without arguments raised a TypeError")
assert result.get() == "success"
sentry-python-1.39.2/tests/integrations/celery/test_celery_beat_crons.py 0000664 0000000 0000000 00000033176 14547447232 0026725 0 ustar 00root root 0000000 0000000 import datetime
import sys
import pytest
from sentry_sdk.integrations.celery import (
_get_headers,
_get_humanized_interval,
_get_monitor_config,
_patch_beat_apply_entry,
crons_task_success,
crons_task_failure,
crons_task_retry,
)
from sentry_sdk.crons import MonitorStatus
from celery.schedules import crontab, schedule
try:
from unittest import mock # python 3.3 and above
from unittest.mock import MagicMock
except ImportError:
import mock # python < 3.3
from mock import MagicMock
def test_get_headers():
fake_task = MagicMock()
fake_task.request = {
"bla": "blub",
"foo": "bar",
}
assert _get_headers(fake_task) == {}
fake_task.request.update(
{
"headers": {
"bla": "blub",
},
}
)
assert _get_headers(fake_task) == {"bla": "blub"}
fake_task.request.update(
{
"headers": {
"headers": {
"tri": "blub",
"bar": "baz",
},
"bla": "blub",
},
}
)
assert _get_headers(fake_task) == {"bla": "blub", "tri": "blub", "bar": "baz"}
@pytest.mark.parametrize(
"seconds, expected_tuple",
[
(0, (0, "second")),
(1, (1, "second")),
(0.00001, (0, "second")),
(59, (59, "second")),
(60, (1, "minute")),
(100, (1, "minute")),
(1000, (16, "minute")),
(10000, (2, "hour")),
(100000, (1, "day")),
(100000000, (1157, "day")),
],
)
def test_get_humanized_interval(seconds, expected_tuple):
assert _get_humanized_interval(seconds) == expected_tuple
def test_crons_task_success():
fake_task = MagicMock()
fake_task.request = {
"headers": {
"sentry-monitor-slug": "test123",
"sentry-monitor-check-in-id": "1234567890",
"sentry-monitor-start-timestamp-s": 200.1,
"sentry-monitor-config": {
"schedule": {
"type": "interval",
"value": 3,
"unit": "day",
},
"timezone": "Europe/Vienna",
},
"sentry-monitor-some-future-key": "some-future-value",
},
}
with mock.patch(
"sentry_sdk.integrations.celery.capture_checkin"
) as mock_capture_checkin:
with mock.patch(
"sentry_sdk.integrations.celery._now_seconds_since_epoch",
return_value=500.5,
):
crons_task_success(fake_task)
mock_capture_checkin.assert_called_once_with(
monitor_slug="test123",
monitor_config={
"schedule": {
"type": "interval",
"value": 3,
"unit": "day",
},
"timezone": "Europe/Vienna",
},
duration=300.4,
check_in_id="1234567890",
status=MonitorStatus.OK,
)
def test_crons_task_failure():
fake_task = MagicMock()
fake_task.request = {
"headers": {
"sentry-monitor-slug": "test123",
"sentry-monitor-check-in-id": "1234567890",
"sentry-monitor-start-timestamp-s": 200.1,
"sentry-monitor-config": {
"schedule": {
"type": "interval",
"value": 3,
"unit": "day",
},
"timezone": "Europe/Vienna",
},
"sentry-monitor-some-future-key": "some-future-value",
},
}
with mock.patch(
"sentry_sdk.integrations.celery.capture_checkin"
) as mock_capture_checkin:
with mock.patch(
"sentry_sdk.integrations.celery._now_seconds_since_epoch",
return_value=500.5,
):
crons_task_failure(fake_task)
mock_capture_checkin.assert_called_once_with(
monitor_slug="test123",
monitor_config={
"schedule": {
"type": "interval",
"value": 3,
"unit": "day",
},
"timezone": "Europe/Vienna",
},
duration=300.4,
check_in_id="1234567890",
status=MonitorStatus.ERROR,
)
def test_crons_task_retry():
fake_task = MagicMock()
fake_task.request = {
"headers": {
"sentry-monitor-slug": "test123",
"sentry-monitor-check-in-id": "1234567890",
"sentry-monitor-start-timestamp-s": 200.1,
"sentry-monitor-config": {
"schedule": {
"type": "interval",
"value": 3,
"unit": "day",
},
"timezone": "Europe/Vienna",
},
"sentry-monitor-some-future-key": "some-future-value",
},
}
with mock.patch(
"sentry_sdk.integrations.celery.capture_checkin"
) as mock_capture_checkin:
with mock.patch(
"sentry_sdk.integrations.celery._now_seconds_since_epoch",
return_value=500.5,
):
crons_task_retry(fake_task)
mock_capture_checkin.assert_called_once_with(
monitor_slug="test123",
monitor_config={
"schedule": {
"type": "interval",
"value": 3,
"unit": "day",
},
"timezone": "Europe/Vienna",
},
duration=300.4,
check_in_id="1234567890",
status=MonitorStatus.ERROR,
)
def test_get_monitor_config_crontab():
app = MagicMock()
app.timezone = "Europe/Vienna"
# schedule with the default timezone
celery_schedule = crontab(day_of_month="3", hour="12", minute="*/10")
monitor_config = _get_monitor_config(celery_schedule, app, "foo")
assert monitor_config == {
"schedule": {
"type": "crontab",
"value": "*/10 12 3 * *",
},
"timezone": "UTC", # the default because `crontab` does not know about the app
}
assert "unit" not in monitor_config["schedule"]
# schedule with the timezone from the app
celery_schedule = crontab(day_of_month="3", hour="12", minute="*/10", app=app)
monitor_config = _get_monitor_config(celery_schedule, app, "foo")
assert monitor_config == {
"schedule": {
"type": "crontab",
"value": "*/10 12 3 * *",
},
"timezone": "Europe/Vienna", # the timezone from the app
}
# schedule without a timezone, the celery integration will read the config from the app
celery_schedule = crontab(day_of_month="3", hour="12", minute="*/10")
celery_schedule.tz = None
monitor_config = _get_monitor_config(celery_schedule, app, "foo")
assert monitor_config == {
"schedule": {
"type": "crontab",
"value": "*/10 12 3 * *",
},
"timezone": "Europe/Vienna", # the timezone from the app
}
# schedule without a timezone, and an app without timezone, the celery integration will fall back to UTC
app = MagicMock()
app.timezone = None
celery_schedule = crontab(day_of_month="3", hour="12", minute="*/10")
celery_schedule.tz = None
monitor_config = _get_monitor_config(celery_schedule, app, "foo")
assert monitor_config == {
"schedule": {
"type": "crontab",
"value": "*/10 12 3 * *",
},
"timezone": "UTC", # default timezone from celery integration
}
def test_get_monitor_config_seconds():
app = MagicMock()
app.timezone = "Europe/Vienna"
celery_schedule = schedule(run_every=3) # seconds
with mock.patch(
"sentry_sdk.integrations.celery.logger.warning"
) as mock_logger_warning:
monitor_config = _get_monitor_config(celery_schedule, app, "foo")
mock_logger_warning.assert_called_with(
"Intervals shorter than one minute are not supported by Sentry Crons. Monitor '%s' has an interval of %s seconds. Use the `exclude_beat_tasks` option in the celery integration to exclude it.",
"foo",
3,
)
assert monitor_config == {}
def test_get_monitor_config_minutes():
app = MagicMock()
app.timezone = "Europe/Vienna"
# schedule with the default timezone
celery_schedule = schedule(run_every=60) # seconds
monitor_config = _get_monitor_config(celery_schedule, app, "foo")
assert monitor_config == {
"schedule": {
"type": "interval",
"value": 1,
"unit": "minute",
},
"timezone": "UTC",
}
# schedule with the timezone from the app
celery_schedule = schedule(run_every=60, app=app) # seconds
monitor_config = _get_monitor_config(celery_schedule, app, "foo")
assert monitor_config == {
"schedule": {
"type": "interval",
"value": 1,
"unit": "minute",
},
"timezone": "Europe/Vienna", # the timezone from the app
}
# schedule without a timezone, the celery integration will read the config from the app
celery_schedule = schedule(run_every=60) # seconds
celery_schedule.tz = None
monitor_config = _get_monitor_config(celery_schedule, app, "foo")
assert monitor_config == {
"schedule": {
"type": "interval",
"value": 1,
"unit": "minute",
},
"timezone": "Europe/Vienna", # the timezone from the app
}
# schedule without a timezone, and an app without timezone, the celery integration will fall back to UTC
app = MagicMock()
app.timezone = None
celery_schedule = schedule(run_every=60) # seconds
celery_schedule.tz = None
monitor_config = _get_monitor_config(celery_schedule, app, "foo")
assert monitor_config == {
"schedule": {
"type": "interval",
"value": 1,
"unit": "minute",
},
"timezone": "UTC", # default timezone from celery integration
}
def test_get_monitor_config_unknown():
app = MagicMock()
app.timezone = "Europe/Vienna"
unknown_celery_schedule = MagicMock()
monitor_config = _get_monitor_config(unknown_celery_schedule, app, "foo")
assert monitor_config == {}
def test_get_monitor_config_default_timezone():
app = MagicMock()
app.timezone = None
celery_schedule = crontab(day_of_month="3", hour="12", minute="*/10")
monitor_config = _get_monitor_config(celery_schedule, app, "dummy_monitor_name")
assert monitor_config["timezone"] == "UTC"
def test_get_monitor_config_timezone_in_app_conf():
app = MagicMock()
app.timezone = "Asia/Karachi"
celery_schedule = crontab(day_of_month="3", hour="12", minute="*/10")
celery_schedule.tz = None
monitor_config = _get_monitor_config(celery_schedule, app, "dummy_monitor_name")
assert monitor_config["timezone"] == "Asia/Karachi"
@pytest.mark.skipif(
sys.version_info < (3, 0),
reason="no datetime.timezone for Python 2, so skipping this test.",
)
def test_get_monitor_config_timezone_in_celery_schedule():
app = MagicMock()
app.timezone = "Asia/Karachi"
panama_tz = datetime.timezone(datetime.timedelta(hours=-5), name="America/Panama")
celery_schedule = crontab(day_of_month="3", hour="12", minute="*/10")
celery_schedule.tz = panama_tz
monitor_config = _get_monitor_config(celery_schedule, app, "dummy_monitor_name")
assert monitor_config["timezone"] == str(panama_tz)
@pytest.mark.parametrize(
"task_name,exclude_beat_tasks,task_in_excluded_beat_tasks",
[
["some_task_name", ["xxx", "some_task.*"], True],
["some_task_name", ["xxx", "some_other_task.*"], False],
],
)
def test_exclude_beat_tasks_option(
task_name, exclude_beat_tasks, task_in_excluded_beat_tasks
):
"""
Test excluding Celery Beat tasks from automatic instrumentation.
"""
fake_apply_entry = MagicMock()
fake_scheduler = MagicMock()
fake_scheduler.apply_entry = fake_apply_entry
fake_integration = MagicMock()
fake_integration.exclude_beat_tasks = exclude_beat_tasks
fake_schedule_entry = MagicMock()
fake_schedule_entry.name = task_name
fake_get_monitor_config = MagicMock()
with mock.patch(
"sentry_sdk.integrations.celery.Scheduler", fake_scheduler
) as Scheduler: # noqa: N806
with mock.patch(
"sentry_sdk.integrations.celery.Hub.current.get_integration",
return_value=fake_integration,
):
with mock.patch(
"sentry_sdk.integrations.celery._get_monitor_config",
fake_get_monitor_config,
) as _get_monitor_config:
# Mimic CeleryIntegration patching of Scheduler.apply_entry()
_patch_beat_apply_entry()
# Mimic Celery Beat calling a task from the Beat schedule
Scheduler.apply_entry(fake_scheduler, fake_schedule_entry)
if task_in_excluded_beat_tasks:
# Only the original Scheduler.apply_entry() is called, _get_monitor_config is NOT called.
assert fake_apply_entry.call_count == 1
_get_monitor_config.assert_not_called()
else:
# The original Scheduler.apply_entry() is called, AND _get_monitor_config is called.
assert fake_apply_entry.call_count == 1
assert _get_monitor_config.call_count == 1
sentry-python-1.39.2/tests/integrations/chalice/ 0000775 0000000 0000000 00000000000 14547447232 0021725 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/chalice/__init__.py 0000664 0000000 0000000 00000000056 14547447232 0024037 0 ustar 00root root 0000000 0000000 import pytest
pytest.importorskip("chalice")
sentry-python-1.39.2/tests/integrations/chalice/test_chalice.py 0000664 0000000 0000000 00000010445 14547447232 0024732 0 ustar 00root root 0000000 0000000 import pytest
import time
from chalice import Chalice, BadRequestError
from chalice.local import LambdaContext, LocalGateway
from sentry_sdk import capture_message
from sentry_sdk.integrations.chalice import CHALICE_VERSION, ChaliceIntegration
from sentry_sdk.utils import parse_version
from pytest_chalice.handlers import RequestHandler
def _generate_lambda_context(self):
# Monkeypatch of the function _generate_lambda_context
# from the class LocalGateway
# for mock the timeout
# type: () -> LambdaContext
if self._config.lambda_timeout is None:
timeout = 10 * 1000
else:
timeout = self._config.lambda_timeout * 1000
return LambdaContext(
function_name=self._config.function_name,
memory_size=self._config.lambda_memory_size,
max_runtime_ms=timeout,
)
@pytest.fixture
def app(sentry_init):
sentry_init(integrations=[ChaliceIntegration()])
app = Chalice(app_name="sentry_chalice")
@app.route("/boom")
def boom():
raise Exception("boom goes the dynamite!")
@app.route("/context")
def has_request():
raise Exception("boom goes the dynamite!")
@app.route("/badrequest")
def badrequest():
raise BadRequestError("bad-request")
@app.route("/message")
def hi():
capture_message("hi")
return {"status": "ok"}
@app.route("/message/{message_id}")
def hi_with_id(message_id):
capture_message("hi again")
return {"status": "ok"}
LocalGateway._generate_lambda_context = _generate_lambda_context
return app
@pytest.fixture
def lambda_context_args():
return ["lambda_name", 256]
def test_exception_boom(app, client: RequestHandler) -> None:
response = client.get("/boom")
assert response.status_code == 500
assert response.json == {
"Code": "InternalServerError",
"Message": "An internal server error occurred.",
}
def test_has_request(app, capture_events, client: RequestHandler):
events = capture_events()
response = client.get("/context")
assert response.status_code == 500
(event,) = events
assert event["level"] == "error"
(exception,) = event["exception"]["values"]
assert exception["type"] == "Exception"
def test_scheduled_event(app, lambda_context_args):
@app.schedule("rate(1 minutes)")
def every_hour(event):
raise Exception("schedule event!")
context = LambdaContext(
*lambda_context_args, max_runtime_ms=10000, time_source=time
)
lambda_event = {
"version": "0",
"account": "120987654312",
"region": "us-west-1",
"detail": {},
"detail-type": "Scheduled Event",
"source": "aws.events",
"time": "1970-01-01T00:00:00Z",
"id": "event-id",
"resources": ["arn:aws:events:us-west-1:120987654312:rule/my-schedule"],
}
with pytest.raises(Exception) as exc_info:
every_hour(lambda_event, context=context)
assert str(exc_info.value) == "schedule event!"
@pytest.mark.skipif(
parse_version(CHALICE_VERSION) >= (1, 28),
reason="different behavior based on chalice version",
)
def test_bad_request_old(client: RequestHandler) -> None:
response = client.get("/badrequest")
assert response.status_code == 400
assert response.json == {
"Code": "BadRequestError",
"Message": "BadRequestError: bad-request",
}
@pytest.mark.skipif(
parse_version(CHALICE_VERSION) < (1, 28),
reason="different behavior based on chalice version",
)
def test_bad_request(client: RequestHandler) -> None:
response = client.get("/badrequest")
assert response.status_code == 400
assert response.json == {
"Code": "BadRequestError",
"Message": "bad-request",
}
@pytest.mark.parametrize(
"url,expected_transaction,expected_source",
[
("/message", "api_handler", "component"),
("/message/123456", "api_handler", "component"),
],
)
def test_transaction(
app,
client: RequestHandler,
capture_events,
url,
expected_transaction,
expected_source,
):
events = capture_events()
response = client.get(url)
assert response.status_code == 200
(event,) = events
assert event["transaction"] == expected_transaction
assert event["transaction_info"] == {"source": expected_source}
sentry-python-1.39.2/tests/integrations/clickhouse_driver/ 0000775 0000000 0000000 00000000000 14547447232 0024041 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/clickhouse_driver/__init__.py 0000664 0000000 0000000 00000000070 14547447232 0026147 0 ustar 00root root 0000000 0000000 import pytest
pytest.importorskip("clickhouse_driver")
sentry-python-1.39.2/tests/integrations/clickhouse_driver/test_clickhouse_driver.py 0000664 0000000 0000000 00000067015 14547447232 0031167 0 ustar 00root root 0000000 0000000 """
Tests need a local clickhouse instance running, this can best be done using
```sh
docker run -d -p 18123:8123 -p9000:9000 --name clickhouse-test --ulimit nofile=262144:262144 --rm clickhouse/clickhouse-server
```
"""
import clickhouse_driver
from clickhouse_driver import Client, connect
from sentry_sdk import start_transaction, capture_message
from sentry_sdk.integrations.clickhouse_driver import ClickhouseDriverIntegration
EXPECT_PARAMS_IN_SELECT = True
if clickhouse_driver.VERSION < (0, 2, 6):
EXPECT_PARAMS_IN_SELECT = False
def test_clickhouse_client_breadcrumbs(sentry_init, capture_events) -> None:
sentry_init(
integrations=[ClickhouseDriverIntegration()],
_experiments={"record_sql_params": True},
)
events = capture_events()
client = Client("localhost")
client.execute("DROP TABLE IF EXISTS test")
client.execute("CREATE TABLE test (x Int32) ENGINE = Memory")
client.execute("INSERT INTO test (x) VALUES", [{"x": 100}])
client.execute("INSERT INTO test (x) VALUES", [[170], [200]])
res = client.execute("SELECT sum(x) FROM test WHERE x > %(minv)i", {"minv": 150})
assert res[0][0] == 370
capture_message("hi")
(event,) = events
expected_breadcrumbs = [
{
"category": "query",
"data": {
"db.system": "clickhouse",
"db.name": "",
"db.user": "default",
"server.address": "localhost",
"server.port": 9000,
},
"message": "DROP TABLE IF EXISTS test",
"type": "default",
},
{
"category": "query",
"data": {
"db.system": "clickhouse",
"db.name": "",
"db.user": "default",
"server.address": "localhost",
"server.port": 9000,
},
"message": "CREATE TABLE test (x Int32) ENGINE = Memory",
"type": "default",
},
{
"category": "query",
"data": {
"db.system": "clickhouse",
"db.name": "",
"db.user": "default",
"server.address": "localhost",
"server.port": 9000,
},
"message": "INSERT INTO test (x) VALUES",
"type": "default",
},
{
"category": "query",
"data": {
"db.system": "clickhouse",
"db.name": "",
"db.user": "default",
"server.address": "localhost",
"server.port": 9000,
},
"message": "INSERT INTO test (x) VALUES",
"type": "default",
},
{
"category": "query",
"data": {
"db.system": "clickhouse",
"db.name": "",
"db.user": "default",
"server.address": "localhost",
"server.port": 9000,
},
"message": "SELECT sum(x) FROM test WHERE x > 150",
"type": "default",
},
]
if not EXPECT_PARAMS_IN_SELECT:
expected_breadcrumbs[-1]["data"].pop("db.params", None)
for crumb in event["breadcrumbs"]["values"]:
crumb.pop("timestamp", None)
assert event["breadcrumbs"]["values"] == expected_breadcrumbs
def test_clickhouse_client_breadcrumbs_with_pii(sentry_init, capture_events) -> None:
sentry_init(
integrations=[ClickhouseDriverIntegration()],
send_default_pii=True,
_experiments={"record_sql_params": True},
)
events = capture_events()
client = Client("localhost")
client.execute("DROP TABLE IF EXISTS test")
client.execute("CREATE TABLE test (x Int32) ENGINE = Memory")
client.execute("INSERT INTO test (x) VALUES", [{"x": 100}])
client.execute("INSERT INTO test (x) VALUES", [[170], [200]])
res = client.execute("SELECT sum(x) FROM test WHERE x > %(minv)i", {"minv": 150})
assert res[0][0] == 370
capture_message("hi")
(event,) = events
expected_breadcrumbs = [
{
"category": "query",
"data": {
"db.system": "clickhouse",
"db.name": "",
"db.user": "default",
"server.address": "localhost",
"server.port": 9000,
"db.result": [],
},
"message": "DROP TABLE IF EXISTS test",
"type": "default",
},
{
"category": "query",
"data": {
"db.system": "clickhouse",
"db.name": "",
"db.user": "default",
"server.address": "localhost",
"server.port": 9000,
"db.result": [],
},
"message": "CREATE TABLE test (x Int32) ENGINE = Memory",
"type": "default",
},
{
"category": "query",
"data": {
"db.system": "clickhouse",
"db.name": "",
"db.user": "default",
"server.address": "localhost",
"server.port": 9000,
"db.params": [{"x": 100}],
},
"message": "INSERT INTO test (x) VALUES",
"type": "default",
},
{
"category": "query",
"data": {
"db.system": "clickhouse",
"db.name": "",
"db.user": "default",
"server.address": "localhost",
"server.port": 9000,
"db.params": [[170], [200]],
},
"message": "INSERT INTO test (x) VALUES",
"type": "default",
},
{
"category": "query",
"data": {
"db.system": "clickhouse",
"db.name": "",
"db.user": "default",
"server.address": "localhost",
"server.port": 9000,
"db.result": [[370]],
"db.params": {"minv": 150},
},
"message": "SELECT sum(x) FROM test WHERE x > 150",
"type": "default",
},
]
if not EXPECT_PARAMS_IN_SELECT:
expected_breadcrumbs[-1]["data"].pop("db.params", None)
for crumb in event["breadcrumbs"]["values"]:
crumb.pop("timestamp", None)
assert event["breadcrumbs"]["values"] == expected_breadcrumbs
def test_clickhouse_client_spans(
sentry_init, capture_events, capture_envelopes
) -> None:
sentry_init(
integrations=[ClickhouseDriverIntegration()],
_experiments={"record_sql_params": True},
traces_sample_rate=1.0,
)
events = capture_events()
transaction_trace_id = None
transaction_span_id = None
with start_transaction(name="test_clickhouse_transaction") as transaction:
transaction_trace_id = transaction.trace_id
transaction_span_id = transaction.span_id
client = Client("localhost")
client.execute("DROP TABLE IF EXISTS test")
client.execute("CREATE TABLE test (x Int32) ENGINE = Memory")
client.execute("INSERT INTO test (x) VALUES", [{"x": 100}])
client.execute("INSERT INTO test (x) VALUES", [[170], [200]])
res = client.execute(
"SELECT sum(x) FROM test WHERE x > %(minv)i", {"minv": 150}
)
assert res[0][0] == 370
(event,) = events
expected_spans = [
{
"op": "db",
"description": "DROP TABLE IF EXISTS test",
"data": {
"db.system": "clickhouse",
"db.name": "",
"db.user": "default",
"server.address": "localhost",
"server.port": 9000,
},
"same_process_as_parent": True,
"trace_id": transaction_trace_id,
"parent_span_id": transaction_span_id,
},
{
"op": "db",
"description": "CREATE TABLE test (x Int32) ENGINE = Memory",
"data": {
"db.system": "clickhouse",
"db.name": "",
"db.user": "default",
"server.address": "localhost",
"server.port": 9000,
},
"same_process_as_parent": True,
"trace_id": transaction_trace_id,
"parent_span_id": transaction_span_id,
},
{
"op": "db",
"description": "INSERT INTO test (x) VALUES",
"data": {
"db.system": "clickhouse",
"db.name": "",
"db.user": "default",
"server.address": "localhost",
"server.port": 9000,
},
"same_process_as_parent": True,
"trace_id": transaction_trace_id,
"parent_span_id": transaction_span_id,
},
{
"op": "db",
"description": "INSERT INTO test (x) VALUES",
"data": {
"db.system": "clickhouse",
"db.name": "",
"db.user": "default",
"server.address": "localhost",
"server.port": 9000,
},
"same_process_as_parent": True,
"trace_id": transaction_trace_id,
"parent_span_id": transaction_span_id,
},
{
"op": "db",
"description": "SELECT sum(x) FROM test WHERE x > 150",
"data": {
"db.system": "clickhouse",
"db.name": "",
"db.user": "default",
"server.address": "localhost",
"server.port": 9000,
},
"same_process_as_parent": True,
"trace_id": transaction_trace_id,
"parent_span_id": transaction_span_id,
},
]
if not EXPECT_PARAMS_IN_SELECT:
expected_spans[-1]["data"].pop("db.params", None)
for span in event["spans"]:
span.pop("span_id", None)
span.pop("start_timestamp", None)
span.pop("timestamp", None)
assert event["spans"] == expected_spans
def test_clickhouse_client_spans_with_pii(
sentry_init, capture_events, capture_envelopes
) -> None:
sentry_init(
integrations=[ClickhouseDriverIntegration()],
_experiments={"record_sql_params": True},
traces_sample_rate=1.0,
send_default_pii=True,
)
events = capture_events()
transaction_trace_id = None
transaction_span_id = None
with start_transaction(name="test_clickhouse_transaction") as transaction:
transaction_trace_id = transaction.trace_id
transaction_span_id = transaction.span_id
client = Client("localhost")
client.execute("DROP TABLE IF EXISTS test")
client.execute("CREATE TABLE test (x Int32) ENGINE = Memory")
client.execute("INSERT INTO test (x) VALUES", [{"x": 100}])
client.execute("INSERT INTO test (x) VALUES", [[170], [200]])
res = client.execute(
"SELECT sum(x) FROM test WHERE x > %(minv)i", {"minv": 150}
)
assert res[0][0] == 370
(event,) = events
expected_spans = [
{
"op": "db",
"description": "DROP TABLE IF EXISTS test",
"data": {
"db.system": "clickhouse",
"db.name": "",
"db.user": "default",
"server.address": "localhost",
"server.port": 9000,
"db.result": [],
},
"same_process_as_parent": True,
"trace_id": transaction_trace_id,
"parent_span_id": transaction_span_id,
},
{
"op": "db",
"description": "CREATE TABLE test (x Int32) ENGINE = Memory",
"data": {
"db.system": "clickhouse",
"db.name": "",
"db.user": "default",
"server.address": "localhost",
"server.port": 9000,
"db.result": [],
},
"same_process_as_parent": True,
"trace_id": transaction_trace_id,
"parent_span_id": transaction_span_id,
},
{
"op": "db",
"description": "INSERT INTO test (x) VALUES",
"data": {
"db.system": "clickhouse",
"db.name": "",
"db.user": "default",
"server.address": "localhost",
"server.port": 9000,
"db.params": [{"x": 100}],
},
"same_process_as_parent": True,
"trace_id": transaction_trace_id,
"parent_span_id": transaction_span_id,
},
{
"op": "db",
"description": "INSERT INTO test (x) VALUES",
"data": {
"db.system": "clickhouse",
"db.name": "",
"db.user": "default",
"server.address": "localhost",
"server.port": 9000,
"db.params": [[170], [200]],
},
"same_process_as_parent": True,
"trace_id": transaction_trace_id,
"parent_span_id": transaction_span_id,
},
{
"op": "db",
"description": "SELECT sum(x) FROM test WHERE x > 150",
"data": {
"db.system": "clickhouse",
"db.name": "",
"db.user": "default",
"server.address": "localhost",
"server.port": 9000,
"db.params": {"minv": 150},
"db.result": [[370]],
},
"same_process_as_parent": True,
"trace_id": transaction_trace_id,
"parent_span_id": transaction_span_id,
},
]
if not EXPECT_PARAMS_IN_SELECT:
expected_spans[-1]["data"].pop("db.params", None)
for span in event["spans"]:
span.pop("span_id", None)
span.pop("start_timestamp", None)
span.pop("timestamp", None)
assert event["spans"] == expected_spans
def test_clickhouse_dbapi_breadcrumbs(sentry_init, capture_events) -> None:
sentry_init(
integrations=[ClickhouseDriverIntegration()],
)
events = capture_events()
conn = connect("clickhouse://localhost")
cursor = conn.cursor()
cursor.execute("DROP TABLE IF EXISTS test")
cursor.execute("CREATE TABLE test (x Int32) ENGINE = Memory")
cursor.executemany("INSERT INTO test (x) VALUES", [{"x": 100}])
cursor.executemany("INSERT INTO test (x) VALUES", [[170], [200]])
cursor.execute("SELECT sum(x) FROM test WHERE x > %(minv)i", {"minv": 150})
res = cursor.fetchall()
assert res[0][0] == 370
capture_message("hi")
(event,) = events
expected_breadcrumbs = [
{
"category": "query",
"data": {
"db.system": "clickhouse",
"db.name": "",
"db.user": "default",
"server.address": "localhost",
"server.port": 9000,
},
"message": "DROP TABLE IF EXISTS test",
"type": "default",
},
{
"category": "query",
"data": {
"db.system": "clickhouse",
"db.name": "",
"db.user": "default",
"server.address": "localhost",
"server.port": 9000,
},
"message": "CREATE TABLE test (x Int32) ENGINE = Memory",
"type": "default",
},
{
"category": "query",
"data": {
"db.system": "clickhouse",
"db.name": "",
"db.user": "default",
"server.address": "localhost",
"server.port": 9000,
},
"message": "INSERT INTO test (x) VALUES",
"type": "default",
},
{
"category": "query",
"data": {
"db.system": "clickhouse",
"db.name": "",
"db.user": "default",
"server.address": "localhost",
"server.port": 9000,
},
"message": "INSERT INTO test (x) VALUES",
"type": "default",
},
{
"category": "query",
"data": {
"db.system": "clickhouse",
"db.name": "",
"db.user": "default",
"server.address": "localhost",
"server.port": 9000,
},
"message": "SELECT sum(x) FROM test WHERE x > 150",
"type": "default",
},
]
if not EXPECT_PARAMS_IN_SELECT:
expected_breadcrumbs[-1]["data"].pop("db.params", None)
for crumb in event["breadcrumbs"]["values"]:
crumb.pop("timestamp", None)
assert event["breadcrumbs"]["values"] == expected_breadcrumbs
def test_clickhouse_dbapi_breadcrumbs_with_pii(sentry_init, capture_events) -> None:
sentry_init(
integrations=[ClickhouseDriverIntegration()],
send_default_pii=True,
)
events = capture_events()
conn = connect("clickhouse://localhost")
cursor = conn.cursor()
cursor.execute("DROP TABLE IF EXISTS test")
cursor.execute("CREATE TABLE test (x Int32) ENGINE = Memory")
cursor.executemany("INSERT INTO test (x) VALUES", [{"x": 100}])
cursor.executemany("INSERT INTO test (x) VALUES", [[170], [200]])
cursor.execute("SELECT sum(x) FROM test WHERE x > %(minv)i", {"minv": 150})
res = cursor.fetchall()
assert res[0][0] == 370
capture_message("hi")
(event,) = events
expected_breadcrumbs = [
{
"category": "query",
"data": {
"db.system": "clickhouse",
"db.name": "",
"db.user": "default",
"server.address": "localhost",
"server.port": 9000,
"db.result": [[], []],
},
"message": "DROP TABLE IF EXISTS test",
"type": "default",
},
{
"category": "query",
"data": {
"db.system": "clickhouse",
"db.name": "",
"db.user": "default",
"server.address": "localhost",
"server.port": 9000,
"db.result": [[], []],
},
"message": "CREATE TABLE test (x Int32) ENGINE = Memory",
"type": "default",
},
{
"category": "query",
"data": {
"db.system": "clickhouse",
"db.name": "",
"db.user": "default",
"server.address": "localhost",
"server.port": 9000,
"db.params": [{"x": 100}],
},
"message": "INSERT INTO test (x) VALUES",
"type": "default",
},
{
"category": "query",
"data": {
"db.system": "clickhouse",
"db.name": "",
"db.user": "default",
"server.address": "localhost",
"server.port": 9000,
"db.params": [[170], [200]],
},
"message": "INSERT INTO test (x) VALUES",
"type": "default",
},
{
"category": "query",
"data": {
"db.system": "clickhouse",
"db.name": "",
"db.user": "default",
"server.address": "localhost",
"server.port": 9000,
"db.params": {"minv": 150},
"db.result": [[["370"]], [["'sum(x)'", "'Int64'"]]],
},
"message": "SELECT sum(x) FROM test WHERE x > 150",
"type": "default",
},
]
if not EXPECT_PARAMS_IN_SELECT:
expected_breadcrumbs[-1]["data"].pop("db.params", None)
for crumb in event["breadcrumbs"]["values"]:
crumb.pop("timestamp", None)
assert event["breadcrumbs"]["values"] == expected_breadcrumbs
def test_clickhouse_dbapi_spans(sentry_init, capture_events, capture_envelopes) -> None:
sentry_init(
integrations=[ClickhouseDriverIntegration()],
_experiments={"record_sql_params": True},
traces_sample_rate=1.0,
)
events = capture_events()
transaction_trace_id = None
transaction_span_id = None
with start_transaction(name="test_clickhouse_transaction") as transaction:
transaction_trace_id = transaction.trace_id
transaction_span_id = transaction.span_id
conn = connect("clickhouse://localhost")
cursor = conn.cursor()
cursor.execute("DROP TABLE IF EXISTS test")
cursor.execute("CREATE TABLE test (x Int32) ENGINE = Memory")
cursor.executemany("INSERT INTO test (x) VALUES", [{"x": 100}])
cursor.executemany("INSERT INTO test (x) VALUES", [[170], [200]])
cursor.execute("SELECT sum(x) FROM test WHERE x > %(minv)i", {"minv": 150})
res = cursor.fetchall()
assert res[0][0] == 370
(event,) = events
expected_spans = [
{
"op": "db",
"description": "DROP TABLE IF EXISTS test",
"data": {
"db.system": "clickhouse",
"db.name": "",
"db.user": "default",
"server.address": "localhost",
"server.port": 9000,
},
"same_process_as_parent": True,
"trace_id": transaction_trace_id,
"parent_span_id": transaction_span_id,
},
{
"op": "db",
"description": "CREATE TABLE test (x Int32) ENGINE = Memory",
"data": {
"db.system": "clickhouse",
"db.name": "",
"db.user": "default",
"server.address": "localhost",
"server.port": 9000,
},
"same_process_as_parent": True,
"trace_id": transaction_trace_id,
"parent_span_id": transaction_span_id,
},
{
"op": "db",
"description": "INSERT INTO test (x) VALUES",
"data": {
"db.system": "clickhouse",
"db.name": "",
"db.user": "default",
"server.address": "localhost",
"server.port": 9000,
},
"same_process_as_parent": True,
"trace_id": transaction_trace_id,
"parent_span_id": transaction_span_id,
},
{
"op": "db",
"description": "INSERT INTO test (x) VALUES",
"data": {
"db.system": "clickhouse",
"db.name": "",
"db.user": "default",
"server.address": "localhost",
"server.port": 9000,
},
"same_process_as_parent": True,
"trace_id": transaction_trace_id,
"parent_span_id": transaction_span_id,
},
{
"op": "db",
"description": "SELECT sum(x) FROM test WHERE x > 150",
"data": {
"db.system": "clickhouse",
"db.name": "",
"db.user": "default",
"server.address": "localhost",
"server.port": 9000,
},
"same_process_as_parent": True,
"trace_id": transaction_trace_id,
"parent_span_id": transaction_span_id,
},
]
if not EXPECT_PARAMS_IN_SELECT:
expected_spans[-1]["data"].pop("db.params", None)
for span in event["spans"]:
span.pop("span_id", None)
span.pop("start_timestamp", None)
span.pop("timestamp", None)
assert event["spans"] == expected_spans
def test_clickhouse_dbapi_spans_with_pii(
sentry_init, capture_events, capture_envelopes
) -> None:
sentry_init(
integrations=[ClickhouseDriverIntegration()],
_experiments={"record_sql_params": True},
traces_sample_rate=1.0,
send_default_pii=True,
)
events = capture_events()
transaction_trace_id = None
transaction_span_id = None
with start_transaction(name="test_clickhouse_transaction") as transaction:
transaction_trace_id = transaction.trace_id
transaction_span_id = transaction.span_id
conn = connect("clickhouse://localhost")
cursor = conn.cursor()
cursor.execute("DROP TABLE IF EXISTS test")
cursor.execute("CREATE TABLE test (x Int32) ENGINE = Memory")
cursor.executemany("INSERT INTO test (x) VALUES", [{"x": 100}])
cursor.executemany("INSERT INTO test (x) VALUES", [[170], [200]])
cursor.execute("SELECT sum(x) FROM test WHERE x > %(minv)i", {"minv": 150})
res = cursor.fetchall()
assert res[0][0] == 370
(event,) = events
expected_spans = [
{
"op": "db",
"description": "DROP TABLE IF EXISTS test",
"data": {
"db.system": "clickhouse",
"db.name": "",
"db.user": "default",
"server.address": "localhost",
"server.port": 9000,
"db.result": [[], []],
},
"same_process_as_parent": True,
"trace_id": transaction_trace_id,
"parent_span_id": transaction_span_id,
},
{
"op": "db",
"description": "CREATE TABLE test (x Int32) ENGINE = Memory",
"data": {
"db.system": "clickhouse",
"db.name": "",
"db.user": "default",
"server.address": "localhost",
"server.port": 9000,
"db.result": [[], []],
},
"same_process_as_parent": True,
"trace_id": transaction_trace_id,
"parent_span_id": transaction_span_id,
},
{
"op": "db",
"description": "INSERT INTO test (x) VALUES",
"data": {
"db.system": "clickhouse",
"db.name": "",
"db.user": "default",
"server.address": "localhost",
"server.port": 9000,
"db.params": [{"x": 100}],
},
"same_process_as_parent": True,
"trace_id": transaction_trace_id,
"parent_span_id": transaction_span_id,
},
{
"op": "db",
"description": "INSERT INTO test (x) VALUES",
"data": {
"db.system": "clickhouse",
"db.name": "",
"db.user": "default",
"server.address": "localhost",
"server.port": 9000,
"db.params": [[170], [200]],
},
"same_process_as_parent": True,
"trace_id": transaction_trace_id,
"parent_span_id": transaction_span_id,
},
{
"op": "db",
"description": "SELECT sum(x) FROM test WHERE x > 150",
"data": {
"db.system": "clickhouse",
"db.name": "",
"db.user": "default",
"server.address": "localhost",
"server.port": 9000,
"db.params": {"minv": 150},
"db.result": [[[370]], [["sum(x)", "Int64"]]],
},
"same_process_as_parent": True,
"trace_id": transaction_trace_id,
"parent_span_id": transaction_span_id,
},
]
if not EXPECT_PARAMS_IN_SELECT:
expected_spans[-1]["data"].pop("db.params", None)
for span in event["spans"]:
span.pop("span_id", None)
span.pop("start_timestamp", None)
span.pop("timestamp", None)
assert event["spans"] == expected_spans
sentry-python-1.39.2/tests/integrations/cloud_resource_context/ 0000775 0000000 0000000 00000000000 14547447232 0025116 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/cloud_resource_context/__init__.py 0000664 0000000 0000000 00000000000 14547447232 0027215 0 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/cloud_resource_context/test_cloud_resource_context.py 0000664 0000000 0000000 00000031044 14547447232 0033312 0 ustar 00root root 0000000 0000000 import json
import pytest
try:
from unittest import mock # python 3.3 and above
from unittest.mock import MagicMock
except ImportError:
import mock # python < 3.3
from mock import MagicMock
from sentry_sdk.integrations.cloud_resource_context import (
CLOUD_PLATFORM,
CLOUD_PROVIDER,
)
AWS_EC2_EXAMPLE_IMDSv2_PAYLOAD = {
"accountId": "298817902971",
"architecture": "x86_64",
"availabilityZone": "us-east-1b",
"billingProducts": None,
"devpayProductCodes": None,
"marketplaceProductCodes": None,
"imageId": "ami-00874d747dde344fa",
"instanceId": "i-07d3301297fe0a55a",
"instanceType": "t2.small",
"kernelId": None,
"pendingTime": "2023-02-08T07:54:05Z",
"privateIp": "171.131.65.115",
"ramdiskId": None,
"region": "us-east-1",
"version": "2017-09-30",
}
try:
# Python 3
AWS_EC2_EXAMPLE_IMDSv2_PAYLOAD_BYTES = bytes(
json.dumps(AWS_EC2_EXAMPLE_IMDSv2_PAYLOAD), "utf-8"
)
except TypeError:
# Python 2
AWS_EC2_EXAMPLE_IMDSv2_PAYLOAD_BYTES = bytes(
json.dumps(AWS_EC2_EXAMPLE_IMDSv2_PAYLOAD)
).encode("utf-8")
GCP_GCE_EXAMPLE_METADATA_PLAYLOAD = {
"instance": {
"attributes": {},
"cpuPlatform": "Intel Broadwell",
"description": "",
"disks": [
{
"deviceName": "tests-cloud-contexts-in-python-sdk",
"index": 0,
"interface": "SCSI",
"mode": "READ_WRITE",
"type": "PERSISTENT-BALANCED",
}
],
"guestAttributes": {},
"hostname": "tests-cloud-contexts-in-python-sdk.c.client-infra-internal.internal",
"id": 1535324527892303790,
"image": "projects/debian-cloud/global/images/debian-11-bullseye-v20221206",
"licenses": [{"id": "2853224013536823851"}],
"machineType": "projects/542054129475/machineTypes/e2-medium",
"maintenanceEvent": "NONE",
"name": "tests-cloud-contexts-in-python-sdk",
"networkInterfaces": [
{
"accessConfigs": [
{"externalIp": "134.30.53.15", "type": "ONE_TO_ONE_NAT"}
],
"dnsServers": ["169.254.169.254"],
"forwardedIps": [],
"gateway": "10.188.0.1",
"ip": "10.188.0.3",
"ipAliases": [],
"mac": "42:01:0c:7c:00:13",
"mtu": 1460,
"network": "projects/544954029479/networks/default",
"subnetmask": "255.255.240.0",
"targetInstanceIps": [],
}
],
"preempted": "FALSE",
"remainingCpuTime": -1,
"scheduling": {
"automaticRestart": "TRUE",
"onHostMaintenance": "MIGRATE",
"preemptible": "FALSE",
},
"serviceAccounts": {},
"tags": ["http-server", "https-server"],
"virtualClock": {"driftToken": "0"},
"zone": "projects/142954069479/zones/northamerica-northeast2-b",
},
"oslogin": {"authenticate": {"sessions": {}}},
"project": {
"attributes": {},
"numericProjectId": 204954049439,
"projectId": "my-project-internal",
},
}
try:
# Python 3
GCP_GCE_EXAMPLE_METADATA_PLAYLOAD_BYTES = bytes(
json.dumps(GCP_GCE_EXAMPLE_METADATA_PLAYLOAD), "utf-8"
)
except TypeError:
# Python 2
GCP_GCE_EXAMPLE_METADATA_PLAYLOAD_BYTES = bytes(
json.dumps(GCP_GCE_EXAMPLE_METADATA_PLAYLOAD)
).encode("utf-8")
def test_is_aws_http_error():
from sentry_sdk.integrations.cloud_resource_context import (
CloudResourceContextIntegration,
)
response = MagicMock()
response.status = 405
CloudResourceContextIntegration.http = MagicMock()
CloudResourceContextIntegration.http.request = MagicMock(return_value=response)
assert CloudResourceContextIntegration._is_aws() is False
assert CloudResourceContextIntegration.aws_token == ""
def test_is_aws_ok():
from sentry_sdk.integrations.cloud_resource_context import (
CloudResourceContextIntegration,
)
response = MagicMock()
response.status = 200
response.data = b"something"
CloudResourceContextIntegration.http = MagicMock()
CloudResourceContextIntegration.http.request = MagicMock(return_value=response)
assert CloudResourceContextIntegration._is_aws() is True
assert CloudResourceContextIntegration.aws_token == "something"
CloudResourceContextIntegration.http.request = MagicMock(
side_effect=Exception("Test")
)
assert CloudResourceContextIntegration._is_aws() is False
def test_is_aw_exception():
from sentry_sdk.integrations.cloud_resource_context import (
CloudResourceContextIntegration,
)
CloudResourceContextIntegration.http = MagicMock()
CloudResourceContextIntegration.http.request = MagicMock(
side_effect=Exception("Test")
)
assert CloudResourceContextIntegration._is_aws() is False
@pytest.mark.parametrize(
"http_status, response_data, expected_context",
[
[
405,
b"",
{
"cloud.provider": CLOUD_PROVIDER.AWS,
"cloud.platform": CLOUD_PLATFORM.AWS_EC2,
},
],
[
200,
b"something-but-not-json",
{
"cloud.provider": CLOUD_PROVIDER.AWS,
"cloud.platform": CLOUD_PLATFORM.AWS_EC2,
},
],
[
200,
AWS_EC2_EXAMPLE_IMDSv2_PAYLOAD_BYTES,
{
"cloud.provider": "aws",
"cloud.platform": "aws_ec2",
"cloud.account.id": "298817902971",
"cloud.availability_zone": "us-east-1b",
"cloud.region": "us-east-1",
"host.id": "i-07d3301297fe0a55a",
"host.type": "t2.small",
},
],
],
)
def test_get_aws_context(http_status, response_data, expected_context):
from sentry_sdk.integrations.cloud_resource_context import (
CloudResourceContextIntegration,
)
response = MagicMock()
response.status = http_status
response.data = response_data
CloudResourceContextIntegration.http = MagicMock()
CloudResourceContextIntegration.http.request = MagicMock(return_value=response)
assert CloudResourceContextIntegration._get_aws_context() == expected_context
def test_is_gcp_http_error():
from sentry_sdk.integrations.cloud_resource_context import (
CloudResourceContextIntegration,
)
response = MagicMock()
response.status = 405
response.data = b'{"some": "json"}'
CloudResourceContextIntegration.http = MagicMock()
CloudResourceContextIntegration.http.request = MagicMock(return_value=response)
assert CloudResourceContextIntegration._is_gcp() is False
assert CloudResourceContextIntegration.gcp_metadata is None
def test_is_gcp_ok():
from sentry_sdk.integrations.cloud_resource_context import (
CloudResourceContextIntegration,
)
response = MagicMock()
response.status = 200
response.data = b'{"some": "json"}'
CloudResourceContextIntegration.http = MagicMock()
CloudResourceContextIntegration.http.request = MagicMock(return_value=response)
assert CloudResourceContextIntegration._is_gcp() is True
assert CloudResourceContextIntegration.gcp_metadata == {"some": "json"}
def test_is_gcp_exception():
from sentry_sdk.integrations.cloud_resource_context import (
CloudResourceContextIntegration,
)
CloudResourceContextIntegration.http = MagicMock()
CloudResourceContextIntegration.http.request = MagicMock(
side_effect=Exception("Test")
)
assert CloudResourceContextIntegration._is_gcp() is False
@pytest.mark.parametrize(
"http_status, response_data, expected_context",
[
[
405,
None,
{
"cloud.provider": CLOUD_PROVIDER.GCP,
"cloud.platform": CLOUD_PLATFORM.GCP_COMPUTE_ENGINE,
},
],
[
200,
b"something-but-not-json",
{
"cloud.provider": CLOUD_PROVIDER.GCP,
"cloud.platform": CLOUD_PLATFORM.GCP_COMPUTE_ENGINE,
},
],
[
200,
GCP_GCE_EXAMPLE_METADATA_PLAYLOAD_BYTES,
{
"cloud.provider": "gcp",
"cloud.platform": "gcp_compute_engine",
"cloud.account.id": "my-project-internal",
"cloud.availability_zone": "northamerica-northeast2-b",
"host.id": 1535324527892303790,
},
],
],
)
def test_get_gcp_context(http_status, response_data, expected_context):
from sentry_sdk.integrations.cloud_resource_context import (
CloudResourceContextIntegration,
)
CloudResourceContextIntegration.gcp_metadata = None
response = MagicMock()
response.status = http_status
response.data = response_data
CloudResourceContextIntegration.http = MagicMock()
CloudResourceContextIntegration.http.request = MagicMock(return_value=response)
assert CloudResourceContextIntegration._get_gcp_context() == expected_context
@pytest.mark.parametrize(
"is_aws, is_gcp, expected_provider",
[
[False, False, ""],
[False, True, CLOUD_PROVIDER.GCP],
[True, False, CLOUD_PROVIDER.AWS],
[True, True, CLOUD_PROVIDER.AWS],
],
)
def test_get_cloud_provider(is_aws, is_gcp, expected_provider):
from sentry_sdk.integrations.cloud_resource_context import (
CloudResourceContextIntegration,
)
CloudResourceContextIntegration._is_aws = MagicMock(return_value=is_aws)
CloudResourceContextIntegration._is_gcp = MagicMock(return_value=is_gcp)
assert CloudResourceContextIntegration._get_cloud_provider() == expected_provider
@pytest.mark.parametrize(
"cloud_provider",
[
CLOUD_PROVIDER.ALIBABA,
CLOUD_PROVIDER.AZURE,
CLOUD_PROVIDER.IBM,
CLOUD_PROVIDER.TENCENT,
],
)
def test_get_cloud_resource_context_unsupported_providers(cloud_provider):
from sentry_sdk.integrations.cloud_resource_context import (
CloudResourceContextIntegration,
)
CloudResourceContextIntegration._get_cloud_provider = MagicMock(
return_value=cloud_provider
)
assert CloudResourceContextIntegration._get_cloud_resource_context() == {}
@pytest.mark.parametrize(
"cloud_provider",
[
CLOUD_PROVIDER.AWS,
CLOUD_PROVIDER.GCP,
],
)
def test_get_cloud_resource_context_supported_providers(cloud_provider):
from sentry_sdk.integrations.cloud_resource_context import (
CloudResourceContextIntegration,
)
CloudResourceContextIntegration._get_cloud_provider = MagicMock(
return_value=cloud_provider
)
assert CloudResourceContextIntegration._get_cloud_resource_context() != {}
@pytest.mark.parametrize(
"cloud_provider, cloud_resource_context, warning_called, set_context_called",
[
["", {}, False, False],
[CLOUD_PROVIDER.AWS, {}, False, False],
[CLOUD_PROVIDER.GCP, {}, False, False],
[CLOUD_PROVIDER.AZURE, {}, True, False],
[CLOUD_PROVIDER.ALIBABA, {}, True, False],
[CLOUD_PROVIDER.IBM, {}, True, False],
[CLOUD_PROVIDER.TENCENT, {}, True, False],
["", {"some": "context"}, False, True],
[CLOUD_PROVIDER.AWS, {"some": "context"}, False, True],
[CLOUD_PROVIDER.GCP, {"some": "context"}, False, True],
],
)
def test_setup_once(
cloud_provider, cloud_resource_context, warning_called, set_context_called
):
from sentry_sdk.integrations.cloud_resource_context import (
CloudResourceContextIntegration,
)
CloudResourceContextIntegration.cloud_provider = cloud_provider
CloudResourceContextIntegration._get_cloud_resource_context = MagicMock(
return_value=cloud_resource_context
)
with mock.patch(
"sentry_sdk.integrations.cloud_resource_context.set_context"
) as fake_set_context:
with mock.patch(
"sentry_sdk.integrations.cloud_resource_context.logger.warning"
) as fake_warning:
CloudResourceContextIntegration.setup_once()
if set_context_called:
fake_set_context.assert_called_once_with(
"cloud_resource", cloud_resource_context
)
else:
fake_set_context.assert_not_called()
if warning_called:
assert fake_warning.call_count == 1
else:
fake_warning.assert_not_called()
sentry-python-1.39.2/tests/integrations/conftest.py 0000664 0000000 0000000 00000001060 14547447232 0022531 0 ustar 00root root 0000000 0000000 import pytest
import sentry_sdk
@pytest.fixture
def capture_exceptions(monkeypatch):
def inner():
errors = set()
old_capture_event = sentry_sdk.Hub.capture_event
def capture_event(self, event, hint=None):
if hint:
if "exc_info" in hint:
error = hint["exc_info"][1]
errors.add(error)
return old_capture_event(self, event, hint=hint)
monkeypatch.setattr(sentry_sdk.Hub, "capture_event", capture_event)
return errors
return inner
sentry-python-1.39.2/tests/integrations/django/ 0000775 0000000 0000000 00000000000 14547447232 0021577 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/django/__init__.py 0000664 0000000 0000000 00000000055 14547447232 0023710 0 ustar 00root root 0000000 0000000 import pytest
pytest.importorskip("django")
sentry-python-1.39.2/tests/integrations/django/asgi/ 0000775 0000000 0000000 00000000000 14547447232 0022522 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/django/asgi/__init__.py 0000664 0000000 0000000 00000000057 14547447232 0024635 0 ustar 00root root 0000000 0000000 import pytest
pytest.importorskip("channels")
sentry-python-1.39.2/tests/integrations/django/asgi/image.png 0000664 0000000 0000000 00000000464 14547447232 0024316 0 ustar 00root root 0000000 0000000 PNG
IHDR
IDATWcHsWT,pƃϟ+e+FQ0}^-//CfR3
VWhgVd2ܺ lzjVB!H#SM/;'15e0H6$[72iȃM32bXd;PS1KJ04`H2fÌ5b.rfO_`4;PלfŘ
M
fh@ 4 x8L IENDB` sentry-python-1.39.2/tests/integrations/django/asgi/test_asgi.py 0000664 0000000 0000000 00000037267 14547447232 0025075 0 ustar 00root root 0000000 0000000 import base64
import json
import os
import django
import pytest
from channels.testing import HttpCommunicator
from sentry_sdk import capture_message
from sentry_sdk.integrations.django import DjangoIntegration
from tests.integrations.django.myapp.asgi import channels_application
try:
from django.urls import reverse
except ImportError:
from django.core.urlresolvers import reverse
try:
from unittest import mock # python 3.3 and above
except ImportError:
import mock # python < 3.3
APPS = [channels_application]
if django.VERSION >= (3, 0):
from tests.integrations.django.myapp.asgi import asgi_application
APPS += [asgi_application]
@pytest.mark.parametrize("application", APPS)
@pytest.mark.asyncio
@pytest.mark.forked
async def test_basic(sentry_init, capture_events, application):
sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
events = capture_events()
comm = HttpCommunicator(application, "GET", "/view-exc?test=query")
response = await comm.get_response()
assert response["status"] == 500
(event,) = events
(exception,) = event["exception"]["values"]
assert exception["type"] == "ZeroDivisionError"
# Test that the ASGI middleware got set up correctly. Right now this needs
# to be installed manually (see myapp/asgi.py)
assert event["transaction"] == "/view-exc"
assert event["request"] == {
"cookies": {},
"headers": {},
"method": "GET",
"query_string": "test=query",
"url": "/view-exc",
}
capture_message("hi")
event = events[-1]
assert "request" not in event
@pytest.mark.parametrize("application", APPS)
@pytest.mark.asyncio
@pytest.mark.forked
@pytest.mark.skipif(
django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
)
async def test_async_views(sentry_init, capture_events, application):
sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
events = capture_events()
comm = HttpCommunicator(application, "GET", "/async_message")
response = await comm.get_response()
assert response["status"] == 200
(event,) = events
assert event["transaction"] == "/async_message"
assert event["request"] == {
"cookies": {},
"headers": {},
"method": "GET",
"query_string": None,
"url": "/async_message",
}
@pytest.mark.parametrize("application", APPS)
@pytest.mark.parametrize("endpoint", ["/sync/thread_ids", "/async/thread_ids"])
@pytest.mark.asyncio
@pytest.mark.forked
@pytest.mark.skipif(
django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
)
async def test_active_thread_id(sentry_init, capture_envelopes, endpoint, application):
with mock.patch("sentry_sdk.profiler.PROFILE_MINIMUM_SAMPLES", 0):
sentry_init(
integrations=[DjangoIntegration()],
traces_sample_rate=1.0,
_experiments={"profiles_sample_rate": 1.0},
)
envelopes = capture_envelopes()
comm = HttpCommunicator(application, "GET", endpoint)
response = await comm.get_response()
assert response["status"] == 200, response["body"]
await comm.wait()
data = json.loads(response["body"])
envelopes = [envelope for envelope in envelopes]
assert len(envelopes) == 1
profiles = [item for item in envelopes[0].items if item.type == "profile"]
assert len(profiles) == 1
for profile in profiles:
transactions = profile.payload.json["transactions"]
assert len(transactions) == 1
assert str(data["active"]) == transactions[0]["active_thread_id"]
@pytest.mark.asyncio
@pytest.mark.forked
@pytest.mark.skipif(
django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
)
async def test_async_views_concurrent_execution(sentry_init, settings):
import asyncio
import time
settings.MIDDLEWARE = []
asgi_application.load_middleware(is_async=True)
sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
comm = HttpCommunicator(asgi_application, "GET", "/my_async_view")
comm2 = HttpCommunicator(asgi_application, "GET", "/my_async_view")
loop = asyncio.get_event_loop()
start = time.time()
r1 = loop.create_task(comm.get_response(timeout=5))
r2 = loop.create_task(comm2.get_response(timeout=5))
(resp1, resp2), _ = await asyncio.wait({r1, r2})
end = time.time()
assert resp1.result()["status"] == 200
assert resp2.result()["status"] == 200
assert end - start < 1.5
@pytest.mark.asyncio
@pytest.mark.forked
@pytest.mark.skipif(
django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
)
async def test_async_middleware_that_is_function_concurrent_execution(
sentry_init, settings
):
import asyncio
import time
settings.MIDDLEWARE = [
"tests.integrations.django.myapp.middleware.simple_middleware"
]
asgi_application.load_middleware(is_async=True)
sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
comm = HttpCommunicator(asgi_application, "GET", "/my_async_view")
comm2 = HttpCommunicator(asgi_application, "GET", "/my_async_view")
loop = asyncio.get_event_loop()
start = time.time()
r1 = loop.create_task(comm.get_response(timeout=5))
r2 = loop.create_task(comm2.get_response(timeout=5))
(resp1, resp2), _ = await asyncio.wait({r1, r2})
end = time.time()
assert resp1.result()["status"] == 200
assert resp2.result()["status"] == 200
assert end - start < 1.5
@pytest.mark.asyncio
@pytest.mark.forked
@pytest.mark.skipif(
django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
)
async def test_async_middleware_spans(
sentry_init, render_span_tree, capture_events, settings
):
settings.MIDDLEWARE = [
"django.contrib.sessions.middleware.SessionMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"tests.integrations.django.myapp.settings.TestMiddleware",
]
asgi_application.load_middleware(is_async=True)
sentry_init(
integrations=[DjangoIntegration(middleware_spans=True)],
traces_sample_rate=1.0,
_experiments={"record_sql_params": True},
)
events = capture_events()
comm = HttpCommunicator(asgi_application, "GET", "/async_message")
response = await comm.get_response()
assert response["status"] == 200
await comm.wait()
message, transaction = events
assert (
render_span_tree(transaction)
== """\
- op="http.server": description=null
- op="event.django": description="django.db.reset_queries"
- op="event.django": description="django.db.close_old_connections"
- op="middleware.django": description="django.contrib.sessions.middleware.SessionMiddleware.__acall__"
- op="middleware.django": description="django.contrib.auth.middleware.AuthenticationMiddleware.__acall__"
- op="middleware.django": description="django.middleware.csrf.CsrfViewMiddleware.__acall__"
- op="middleware.django": description="tests.integrations.django.myapp.settings.TestMiddleware.__acall__"
- op="middleware.django": description="django.middleware.csrf.CsrfViewMiddleware.process_view"
- op="view.render": description="async_message"
- op="event.django": description="django.db.close_old_connections"
- op="event.django": description="django.core.cache.close_caches"
- op="event.django": description="django.core.handlers.base.reset_urlconf\""""
)
@pytest.mark.asyncio
@pytest.mark.forked
@pytest.mark.skipif(
django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
)
async def test_has_trace_if_performance_enabled(sentry_init, capture_events):
sentry_init(integrations=[DjangoIntegration()], traces_sample_rate=1.0)
events = capture_events()
comm = HttpCommunicator(asgi_application, "GET", "/view-exc-with-msg")
response = await comm.get_response()
assert response["status"] == 500
# ASGI Django does not create transactions per default,
# so we do not have a transaction_event here.
(msg_event, error_event) = events
assert msg_event["contexts"]["trace"]
assert "trace_id" in msg_event["contexts"]["trace"]
assert error_event["contexts"]["trace"]
assert "trace_id" in error_event["contexts"]["trace"]
assert (
msg_event["contexts"]["trace"]["trace_id"]
== error_event["contexts"]["trace"]["trace_id"]
)
@pytest.mark.asyncio
@pytest.mark.forked
@pytest.mark.skipif(
django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
)
async def test_has_trace_if_performance_disabled(sentry_init, capture_events):
sentry_init(integrations=[DjangoIntegration()])
events = capture_events()
comm = HttpCommunicator(asgi_application, "GET", "/view-exc-with-msg")
response = await comm.get_response()
assert response["status"] == 500
(msg_event, error_event) = events
assert msg_event["contexts"]["trace"]
assert "trace_id" in msg_event["contexts"]["trace"]
assert error_event["contexts"]["trace"]
assert "trace_id" in error_event["contexts"]["trace"]
assert (
msg_event["contexts"]["trace"]["trace_id"]
== error_event["contexts"]["trace"]["trace_id"]
)
@pytest.mark.asyncio
@pytest.mark.forked
@pytest.mark.skipif(
django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
)
async def test_trace_from_headers_if_performance_enabled(sentry_init, capture_events):
sentry_init(integrations=[DjangoIntegration()], traces_sample_rate=1.0)
events = capture_events()
trace_id = "582b43a4192642f0b136d5159a501701"
sentry_trace_header = "{}-{}-{}".format(trace_id, "6e8f22c393e68f19", 1)
comm = HttpCommunicator(
asgi_application,
"GET",
"/view-exc-with-msg",
headers=[(b"sentry-trace", sentry_trace_header.encode())],
)
response = await comm.get_response()
assert response["status"] == 500
# ASGI Django does not create transactions per default,
# so we do not have a transaction_event here.
(msg_event, error_event) = events
assert msg_event["contexts"]["trace"]
assert "trace_id" in msg_event["contexts"]["trace"]
assert error_event["contexts"]["trace"]
assert "trace_id" in error_event["contexts"]["trace"]
assert msg_event["contexts"]["trace"]["trace_id"] == trace_id
assert error_event["contexts"]["trace"]["trace_id"] == trace_id
@pytest.mark.asyncio
@pytest.mark.forked
@pytest.mark.skipif(
django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
)
async def test_trace_from_headers_if_performance_disabled(sentry_init, capture_events):
sentry_init(integrations=[DjangoIntegration()])
events = capture_events()
trace_id = "582b43a4192642f0b136d5159a501701"
sentry_trace_header = "{}-{}-{}".format(trace_id, "6e8f22c393e68f19", 1)
comm = HttpCommunicator(
asgi_application,
"GET",
"/view-exc-with-msg",
headers=[(b"sentry-trace", sentry_trace_header.encode())],
)
response = await comm.get_response()
assert response["status"] == 500
(msg_event, error_event) = events
assert msg_event["contexts"]["trace"]
assert "trace_id" in msg_event["contexts"]["trace"]
assert error_event["contexts"]["trace"]
assert "trace_id" in error_event["contexts"]["trace"]
assert msg_event["contexts"]["trace"]["trace_id"] == trace_id
assert error_event["contexts"]["trace"]["trace_id"] == trace_id
PICTURE = os.path.join(os.path.dirname(os.path.abspath(__file__)), "image.png")
BODY_FORM = """--fd721ef49ea403a6\r\nContent-Disposition: form-data; name="username"\r\n\r\nJane\r\n--fd721ef49ea403a6\r\nContent-Disposition: form-data; name="password"\r\n\r\nhello123\r\n--fd721ef49ea403a6\r\nContent-Disposition: form-data; name="photo"; filename="image.png"\r\nContent-Type: image/png\r\nContent-Transfer-Encoding: base64\r\n\r\n{{image_data}}\r\n--fd721ef49ea403a6--\r\n""".replace(
"{{image_data}}", base64.b64encode(open(PICTURE, "rb").read()).decode("utf-8")
).encode(
"utf-8"
)
BODY_FORM_CONTENT_LENGTH = str(len(BODY_FORM)).encode("utf-8")
@pytest.mark.parametrize("application", APPS)
@pytest.mark.parametrize(
"send_default_pii,method,headers,url_name,body,expected_data",
[
(
True,
"POST",
[(b"content-type", b"text/plain")],
"post_echo_async",
b"",
None,
),
(
True,
"POST",
[(b"content-type", b"text/plain")],
"post_echo_async",
b"some raw text body",
"",
),
(
True,
"POST",
[(b"content-type", b"application/json")],
"post_echo_async",
b'{"username":"xyz","password":"xyz"}',
{"username": "xyz", "password": "xyz"},
),
(
True,
"POST",
[(b"content-type", b"application/xml")],
"post_echo_async",
b' ',
"",
),
(
True,
"POST",
[
(b"content-type", b"multipart/form-data; boundary=fd721ef49ea403a6"),
(b"content-length", BODY_FORM_CONTENT_LENGTH),
],
"post_echo_async",
BODY_FORM,
{"password": "hello123", "photo": "", "username": "Jane"},
),
(
False,
"POST",
[(b"content-type", b"text/plain")],
"post_echo_async",
b"",
None,
),
(
False,
"POST",
[(b"content-type", b"text/plain")],
"post_echo_async",
b"some raw text body",
"",
),
(
False,
"POST",
[(b"content-type", b"application/json")],
"post_echo_async",
b'{"username":"xyz","password":"xyz"}',
{"username": "xyz", "password": "[Filtered]"},
),
(
False,
"POST",
[(b"content-type", b"application/xml")],
"post_echo_async",
b' ',
"",
),
(
False,
"POST",
[
(b"content-type", b"multipart/form-data; boundary=fd721ef49ea403a6"),
(b"content-length", BODY_FORM_CONTENT_LENGTH),
],
"post_echo_async",
BODY_FORM,
{"password": "[Filtered]", "photo": "", "username": "Jane"},
),
],
)
@pytest.mark.asyncio
@pytest.mark.forked
@pytest.mark.skipif(
django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
)
async def test_asgi_request_body(
sentry_init,
capture_envelopes,
application,
send_default_pii,
method,
headers,
url_name,
body,
expected_data,
):
sentry_init(
send_default_pii=send_default_pii,
integrations=[
DjangoIntegration(),
],
)
envelopes = capture_envelopes()
comm = HttpCommunicator(
application,
method=method,
headers=headers,
path=reverse(url_name),
body=body,
)
response = await comm.get_response()
assert response["status"] == 200
await comm.wait()
assert response["body"] == body
(envelope,) = envelopes
event = envelope.get_event()
if expected_data is not None:
assert event["request"]["data"] == expected_data
else:
assert "data" not in event["request"]
sentry-python-1.39.2/tests/integrations/django/myapp/ 0000775 0000000 0000000 00000000000 14547447232 0022725 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/django/myapp/__init__.py 0000664 0000000 0000000 00000000000 14547447232 0025024 0 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/django/myapp/asgi.py 0000664 0000000 0000000 00000000747 14547447232 0024232 0 ustar 00root root 0000000 0000000 """
ASGI entrypoint. Configures Django and then runs the application
defined in the ASGI_APPLICATION setting.
"""
import os
import django
from channels.routing import get_default_application
os.environ.setdefault(
"DJANGO_SETTINGS_MODULE", "tests.integrations.django.myapp.settings"
)
django.setup()
channels_application = get_default_application()
if django.VERSION >= (3, 0):
from django.core.asgi import get_asgi_application
asgi_application = get_asgi_application()
sentry-python-1.39.2/tests/integrations/django/myapp/custom_urls.py 0000664 0000000 0000000 00000001773 14547447232 0025666 0 ustar 00root root 0000000 0000000 """myapp URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from __future__ import absolute_import
try:
from django.urls import path
except ImportError:
from django.conf.urls import url
def path(path, *args, **kwargs):
return url("^{}$".format(path), *args, **kwargs)
from . import views
urlpatterns = [
path("custom/ok", views.custom_ok, name="custom_ok"),
path("custom/exc", views.custom_exc, name="custom_exc"),
]
sentry-python-1.39.2/tests/integrations/django/myapp/manage.py 0000664 0000000 0000000 00000000434 14547447232 0024530 0 ustar 00root root 0000000 0000000 #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault(
"DJANGO_SETTINGS_MODULE", "tests.integrations.django.myapp.settings"
)
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
sentry-python-1.39.2/tests/integrations/django/myapp/management/ 0000775 0000000 0000000 00000000000 14547447232 0025041 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/django/myapp/management/__init__.py 0000664 0000000 0000000 00000000000 14547447232 0027140 0 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/django/myapp/management/commands/ 0000775 0000000 0000000 00000000000 14547447232 0026642 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/django/myapp/management/commands/__init__.py 0000664 0000000 0000000 00000000000 14547447232 0030741 0 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/django/myapp/management/commands/mycrash.py 0000664 0000000 0000000 00000000273 14547447232 0030664 0 ustar 00root root 0000000 0000000 from django.core.management.base import BaseCommand
class Command(BaseCommand):
def add_arguments(self, parser):
pass
def handle(self, *args, **options):
1 / 0
sentry-python-1.39.2/tests/integrations/django/myapp/middleware.py 0000664 0000000 0000000 00000001420 14547447232 0025411 0 ustar 00root root 0000000 0000000 import django
if django.VERSION >= (3, 1):
import asyncio
from django.utils.decorators import sync_and_async_middleware
@sync_and_async_middleware
def simple_middleware(get_response):
if asyncio.iscoroutinefunction(get_response):
async def middleware(request):
response = await get_response(request)
return response
else:
def middleware(request):
response = get_response(request)
return response
return middleware
def custom_urlconf_middleware(get_response):
def middleware(request):
request.urlconf = "tests.integrations.django.myapp.custom_urls"
response = get_response(request)
return response
return middleware
sentry-python-1.39.2/tests/integrations/django/myapp/routing.py 0000664 0000000 0000000 00000000756 14547447232 0024776 0 ustar 00root root 0000000 0000000 import channels
from channels.routing import ProtocolTypeRouter
try:
from channels.http import AsgiHandler
if channels.__version__ < "3.0.0":
django_asgi_app = AsgiHandler
else:
django_asgi_app = AsgiHandler()
except ModuleNotFoundError:
# Since channels 4.0 ASGI handling is done by Django itself
from django.core.asgi import get_asgi_application
django_asgi_app = get_asgi_application()
application = ProtocolTypeRouter({"http": django_asgi_app})
sentry-python-1.39.2/tests/integrations/django/myapp/settings.py 0000664 0000000 0000000 00000012030 14547447232 0025133 0 ustar 00root root 0000000 0000000 """
Django settings for myapp project.
Generated by 'django-admin startproject' using Django 2.0.7.
For more information on this file, see
https://docs.djangoproject.com/en/2.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.0/ref/settings/
"""
# We shouldn't access settings while setting up integrations. Initialize SDK
# here to provoke any errors that might occur.
import sentry_sdk
from sentry_sdk.integrations.django import DjangoIntegration
sentry_sdk.init(integrations=[DjangoIntegration()])
import os
try:
# Django >= 1.10
from django.utils.deprecation import MiddlewareMixin
except ImportError:
# Not required for Django <= 1.9, see:
# https://docs.djangoproject.com/en/1.10/topics/http/middleware/#upgrading-pre-django-1-10-style-middleware
MiddlewareMixin = object
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = "u95e#xr$t3!vdux)fj11!*q*^w^^r#kiyrvt3kjui-t_k%m3op"
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ["localhost"]
# Application definition
INSTALLED_APPS = [
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.messages",
"django.contrib.staticfiles",
"tests.integrations.django.myapp",
]
class TestMiddleware(MiddlewareMixin):
def process_request(self, request):
# https://github.com/getsentry/sentry-python/issues/837 -- We should
# not touch the resolver_match because apparently people rely on it.
if request.resolver_match:
assert not getattr(request.resolver_match.callback, "__wrapped__", None)
if "middleware-exc" in request.path:
1 / 0
def process_response(self, request, response):
return response
def TestFunctionMiddleware(get_response): # noqa: N802
def middleware(request):
return get_response(request)
return middleware
MIDDLEWARE_CLASSES = [
"django.contrib.sessions.middleware.SessionMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"tests.integrations.django.myapp.settings.TestMiddleware",
]
if MiddlewareMixin is not object:
MIDDLEWARE = MIDDLEWARE_CLASSES + [
"tests.integrations.django.myapp.settings.TestFunctionMiddleware"
]
ROOT_URLCONF = "tests.integrations.django.myapp.urls"
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [],
"APP_DIRS": True,
"OPTIONS": {
"debug": True,
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
],
},
}
]
WSGI_APPLICATION = "tests.integrations.django.myapp.wsgi.application"
# Database
# https://docs.djangoproject.com/en/2.0/ref/settings/#databases
DATABASES = {"default": {"ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:"}}
try:
import psycopg2 # noqa
db_engine = "django.db.backends.postgresql"
try:
from django.db.backends import postgresql # noqa: F401
except ImportError:
db_engine = "django.db.backends.postgresql_psycopg2"
DATABASES["postgres"] = {
"ENGINE": db_engine,
"NAME": os.environ["SENTRY_PYTHON_TEST_POSTGRES_NAME"],
"USER": os.environ["SENTRY_PYTHON_TEST_POSTGRES_USER"],
"PASSWORD": os.environ["SENTRY_PYTHON_TEST_POSTGRES_PASSWORD"],
"HOST": os.environ.get("SENTRY_PYTHON_TEST_POSTGRES_HOST", "localhost"),
"PORT": 5432,
}
except (ImportError, KeyError):
from sentry_sdk.utils import logger
logger.warn("No psycopg2 found, testing with SQLite.")
# Password validation
# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator"
},
{"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator"},
{"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator"},
{"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator"},
]
# Internationalization
# https://docs.djangoproject.com/en/2.0/topics/i18n/
LANGUAGE_CODE = "en-us"
TIME_ZONE = "UTC"
USE_I18N = True
USE_L10N = True
USE_TZ = False
TEMPLATE_DEBUG = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.0/howto/static-files/
STATIC_URL = "/static/"
# django-channels specific
ASGI_APPLICATION = "tests.integrations.django.myapp.routing.application"
sentry-python-1.39.2/tests/integrations/django/myapp/templates/ 0000775 0000000 0000000 00000000000 14547447232 0024723 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/django/myapp/templates/error.html 0000664 0000000 0000000 00000000113 14547447232 0026735 0 ustar 00root root 0000000 0000000 1
2
3
4
5
6
7
8
9
{% invalid template tag %}
11
12
13
14
15
16
17
18
19
20
sentry-python-1.39.2/tests/integrations/django/myapp/templates/trace_meta.html 0000664 0000000 0000000 00000000030 14547447232 0027706 0 ustar 00root root 0000000 0000000 {{ sentry_trace_meta }}
sentry-python-1.39.2/tests/integrations/django/myapp/templates/user_name.html 0000664 0000000 0000000 00000000043 14547447232 0027564 0 ustar 00root root 0000000 0000000 {{ request.user }}: {{ user_age }}
sentry-python-1.39.2/tests/integrations/django/myapp/urls.py 0000664 0000000 0000000 00000010053 14547447232 0024263 0 ustar 00root root 0000000 0000000 """myapp URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from __future__ import absolute_import
try:
from django.urls import path
except ImportError:
from django.conf.urls import url
def path(path, *args, **kwargs):
return url("^{}$".format(path), *args, **kwargs)
from . import views
urlpatterns = [
path("view-exc", views.view_exc, name="view_exc"),
path("view-exc-with-msg", views.view_exc_with_msg, name="view_exc_with_msg"),
path("cached-view", views.cached_view, name="cached_view"),
path("not-cached-view", views.not_cached_view, name="not_cached_view"),
path(
"view-with-cached-template-fragment",
views.view_with_cached_template_fragment,
name="view_with_cached_template_fragment",
),
path(
"read-body-and-view-exc",
views.read_body_and_view_exc,
name="read_body_and_view_exc",
),
path("middleware-exc", views.message, name="middleware_exc"),
path("message", views.message, name="message"),
path("mylogin", views.mylogin, name="mylogin"),
path("classbased", views.ClassBasedView.as_view(), name="classbased"),
path("sentryclass", views.SentryClassBasedView(), name="sentryclass"),
path(
"sentryclass-csrf",
views.SentryClassBasedViewWithCsrf(),
name="sentryclass_csrf",
),
path("post-echo", views.post_echo, name="post_echo"),
path("template-exc", views.template_exc, name="template_exc"),
path("template-test", views.template_test, name="template_test"),
path("template-test2", views.template_test2, name="template_test2"),
path("template-test3", views.template_test3, name="template_test3"),
path("postgres-select", views.postgres_select, name="postgres_select"),
path("postgres-select-slow", views.postgres_select_orm, name="postgres_select_orm"),
path(
"permission-denied-exc",
views.permission_denied_exc,
name="permission_denied_exc",
),
path(
"csrf-hello-not-exempt",
views.csrf_hello_not_exempt,
name="csrf_hello_not_exempt",
),
path("sync/thread_ids", views.thread_ids_sync, name="thread_ids_sync"),
]
# async views
if views.async_message is not None:
urlpatterns.append(path("async_message", views.async_message, name="async_message"))
if views.my_async_view is not None:
urlpatterns.append(path("my_async_view", views.my_async_view, name="my_async_view"))
if views.thread_ids_async is not None:
urlpatterns.append(
path("async/thread_ids", views.thread_ids_async, name="thread_ids_async")
)
if views.post_echo_async is not None:
urlpatterns.append(
path("post_echo_async", views.post_echo_async, name="post_echo_async")
)
# rest framework
try:
urlpatterns.append(
path("rest-framework-exc", views.rest_framework_exc, name="rest_framework_exc")
)
urlpatterns.append(
path(
"rest-framework-read-body-and-exc",
views.rest_framework_read_body_and_exc,
name="rest_framework_read_body_and_exc",
)
)
urlpatterns.append(path("rest-hello", views.rest_hello, name="rest_hello"))
urlpatterns.append(
path("rest-json-response", views.rest_json_response, name="rest_json_response")
)
urlpatterns.append(
path(
"rest-permission-denied-exc",
views.rest_permission_denied_exc,
name="rest_permission_denied_exc",
)
)
except AttributeError:
pass
handler500 = views.handler500
handler404 = views.handler404
sentry-python-1.39.2/tests/integrations/django/myapp/views.py 0000664 0000000 0000000 00000013627 14547447232 0024445 0 ustar 00root root 0000000 0000000 import json
import threading
from django import VERSION
from django.contrib.auth import login
from django.contrib.auth.models import User
from django.core.exceptions import PermissionDenied
from django.http import HttpResponse, HttpResponseNotFound, HttpResponseServerError
from django.shortcuts import render
from django.template import Context, Template
from django.template.response import TemplateResponse
from django.utils.decorators import method_decorator
from django.views.decorators.cache import cache_page
from django.views.decorators.csrf import csrf_exempt
from django.views.generic import ListView
try:
from rest_framework.decorators import api_view
from rest_framework.response import Response
@api_view(["POST"])
def rest_framework_exc(request):
1 / 0
@api_view(["POST"])
def rest_framework_read_body_and_exc(request):
request.data
1 / 0
@api_view(["GET"])
def rest_hello(request):
return HttpResponse("ok")
@api_view(["GET"])
def rest_permission_denied_exc(request):
raise PermissionDenied("bye")
@api_view(["GET"])
def rest_json_response(request):
return Response(dict(ok=True))
except ImportError:
pass
import sentry_sdk
from sentry_sdk import capture_message
@csrf_exempt
def view_exc(request):
1 / 0
@csrf_exempt
def view_exc_with_msg(request):
capture_message("oops")
1 / 0
@cache_page(60)
def cached_view(request):
return HttpResponse("ok")
def not_cached_view(request):
return HttpResponse("ok")
def view_with_cached_template_fragment(request):
template = Template(
"""{% load cache %}
Not cached content goes here.
{% cache 500 some_identifier %}
And here some cached content.
{% endcache %}
"""
)
rendered = template.render(Context({}))
return HttpResponse(rendered)
# This is a "class based view" as previously found in the sentry codebase. The
# interesting property of this one is that csrf_exempt, as a class attribute,
# is not in __dict__, so regular use of functools.wraps will not forward the
# attribute.
class SentryClassBasedView(object):
csrf_exempt = True
def __call__(self, request):
return HttpResponse("ok")
class SentryClassBasedViewWithCsrf(object):
def __call__(self, request):
return HttpResponse("ok")
@csrf_exempt
def read_body_and_view_exc(request):
request.read()
1 / 0
@csrf_exempt
def message(request):
sentry_sdk.capture_message("hi")
return HttpResponse("ok")
@csrf_exempt
def mylogin(request):
user = User.objects.create_user("john", "lennon@thebeatles.com", "johnpassword")
user.backend = "django.contrib.auth.backends.ModelBackend"
login(request, user)
return HttpResponse("ok")
@csrf_exempt
def handler500(request):
return HttpResponseServerError("Sentry error: %s" % sentry_sdk.last_event_id())
class ClassBasedView(ListView):
model = None
@method_decorator(csrf_exempt)
def dispatch(self, request, *args, **kwargs):
return super(ClassBasedView, self).dispatch(request, *args, **kwargs)
def head(self, *args, **kwargs):
sentry_sdk.capture_message("hi")
return HttpResponse("")
def post(self, *args, **kwargs):
return HttpResponse("ok")
@csrf_exempt
def post_echo(request):
sentry_sdk.capture_message("hi")
return HttpResponse(request.body)
@csrf_exempt
def handler404(*args, **kwargs):
sentry_sdk.capture_message("not found", level="error")
return HttpResponseNotFound("404")
@csrf_exempt
def template_exc(request, *args, **kwargs):
return render(request, "error.html")
@csrf_exempt
def template_test(request, *args, **kwargs):
return render(request, "user_name.html", {"user_age": 20})
@csrf_exempt
def custom_ok(request, *args, **kwargs):
return HttpResponse("custom ok")
@csrf_exempt
def custom_exc(request, *args, **kwargs):
1 / 0
@csrf_exempt
def template_test2(request, *args, **kwargs):
return TemplateResponse(
request, ("user_name.html", "another_template.html"), {"user_age": 25}
)
@csrf_exempt
def template_test3(request, *args, **kwargs):
from sentry_sdk import Hub
hub = Hub.current
capture_message(hub.get_traceparent() + "\n" + hub.get_baggage())
return render(request, "trace_meta.html", {})
@csrf_exempt
def postgres_select(request, *args, **kwargs):
from django.db import connections
cursor = connections["postgres"].cursor()
cursor.execute("SELECT 1;")
return HttpResponse("ok")
@csrf_exempt
def postgres_select_orm(request, *args, **kwargs):
user = User.objects.using("postgres").all().first()
return HttpResponse("ok {}".format(user))
@csrf_exempt
def permission_denied_exc(*args, **kwargs):
raise PermissionDenied("bye")
def csrf_hello_not_exempt(*args, **kwargs):
return HttpResponse("ok")
def thread_ids_sync(*args, **kwargs):
response = json.dumps(
{
"main": threading.main_thread().ident,
"active": threading.current_thread().ident,
}
)
return HttpResponse(response)
if VERSION >= (3, 1):
# Use exec to produce valid Python 2
exec(
"""async def async_message(request):
sentry_sdk.capture_message("hi")
return HttpResponse("ok")"""
)
exec(
"""async def my_async_view(request):
import asyncio
await asyncio.sleep(1)
return HttpResponse('Hello World')"""
)
exec(
"""async def thread_ids_async(request):
response = json.dumps({
"main": threading.main_thread().ident,
"active": threading.current_thread().ident,
})
return HttpResponse(response)"""
)
exec(
"""async def post_echo_async(request):
sentry_sdk.capture_message("hi")
return HttpResponse(request.body)
post_echo_async.csrf_exempt = True"""
)
else:
async_message = None
my_async_view = None
thread_ids_async = None
post_echo_async = None
sentry-python-1.39.2/tests/integrations/django/myapp/wsgi.py 0000664 0000000 0000000 00000000643 14547447232 0024253 0 ustar 00root root 0000000 0000000 """
WSGI config for myapp project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.0/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault(
"DJANGO_SETTINGS_MODULE", "tests.integrations.django.myapp.settings"
)
application = get_wsgi_application()
sentry-python-1.39.2/tests/integrations/django/test_basic.py 0000664 0000000 0000000 00000124211 14547447232 0024272 0 ustar 00root root 0000000 0000000 from __future__ import absolute_import
import json
import os
import random
import re
import pytest
from functools import partial
from werkzeug.test import Client
from django import VERSION as DJANGO_VERSION
from django.contrib.auth.models import User
from django.core.management import execute_from_command_line
from django.db.utils import OperationalError, ProgrammingError, DataError
try:
from django.urls import reverse
except ImportError:
from django.core.urlresolvers import reverse
from sentry_sdk._compat import PY2, PY310
from sentry_sdk import capture_message, capture_exception, configure_scope
from sentry_sdk.consts import SPANDATA
from sentry_sdk.integrations.django import DjangoIntegration, _set_db_data
from sentry_sdk.integrations.django.signals_handlers import _get_receiver_name
from sentry_sdk.integrations.django.caching import _get_span_description
from sentry_sdk.integrations.executing import ExecutingIntegration
from sentry_sdk.tracing import Span
from tests.conftest import unpack_werkzeug_response
from tests.integrations.django.myapp.wsgi import application
from tests.integrations.django.utils import pytest_mark_django_db_decorator
DJANGO_VERSION = DJANGO_VERSION[:2]
@pytest.fixture
def client():
return Client(application)
@pytest.fixture
def use_django_caching(settings):
settings.CACHES = {
"default": {
"BACKEND": "django.core.cache.backends.locmem.LocMemCache",
"LOCATION": "unique-snowflake-%s" % random.randint(1, 1000000),
}
}
@pytest.fixture
def use_django_caching_with_middlewares(settings):
settings.CACHES = {
"default": {
"BACKEND": "django.core.cache.backends.locmem.LocMemCache",
"LOCATION": "unique-snowflake-%s" % random.randint(1, 1000000),
}
}
if hasattr(settings, "MIDDLEWARE"):
middleware = settings.MIDDLEWARE
elif hasattr(settings, "MIDDLEWARE_CLASSES"):
middleware = settings.MIDDLEWARE_CLASSES
else:
middleware = None
if middleware is not None:
middleware.insert(0, "django.middleware.cache.UpdateCacheMiddleware")
middleware.append("django.middleware.cache.FetchFromCacheMiddleware")
def test_view_exceptions(sentry_init, client, capture_exceptions, capture_events):
sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
exceptions = capture_exceptions()
events = capture_events()
client.get(reverse("view_exc"))
(error,) = exceptions
assert isinstance(error, ZeroDivisionError)
(event,) = events
assert event["exception"]["values"][0]["mechanism"]["type"] == "django"
def test_ensures_x_forwarded_header_is_honored_in_sdk_when_enabled_in_django(
sentry_init, client, capture_exceptions, capture_events, settings
):
"""
Test that ensures if django settings.USE_X_FORWARDED_HOST is set to True
then the SDK sets the request url to the `HTTP_X_FORWARDED_FOR`
"""
settings.USE_X_FORWARDED_HOST = True
sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
exceptions = capture_exceptions()
events = capture_events()
client.get(reverse("view_exc"), headers={"X_FORWARDED_HOST": "example.com"})
(error,) = exceptions
assert isinstance(error, ZeroDivisionError)
(event,) = events
assert event["request"]["url"] == "http://example.com/view-exc"
def test_ensures_x_forwarded_header_is_not_honored_when_unenabled_in_django(
sentry_init, client, capture_exceptions, capture_events
):
"""
Test that ensures if django settings.USE_X_FORWARDED_HOST is set to False
then the SDK sets the request url to the `HTTP_POST`
"""
sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
exceptions = capture_exceptions()
events = capture_events()
client.get(reverse("view_exc"), headers={"X_FORWARDED_HOST": "example.com"})
(error,) = exceptions
assert isinstance(error, ZeroDivisionError)
(event,) = events
assert event["request"]["url"] == "http://localhost/view-exc"
def test_middleware_exceptions(sentry_init, client, capture_exceptions):
sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
exceptions = capture_exceptions()
client.get(reverse("middleware_exc"))
(error,) = exceptions
assert isinstance(error, ZeroDivisionError)
def test_request_captured(sentry_init, client, capture_events):
sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
events = capture_events()
content, status, headers = unpack_werkzeug_response(client.get(reverse("message")))
assert content == b"ok"
(event,) = events
assert event["transaction"] == "/message"
assert event["request"] == {
"cookies": {},
"env": {"SERVER_NAME": "localhost", "SERVER_PORT": "80"},
"headers": {"Host": "localhost"},
"method": "GET",
"query_string": "",
"url": "http://localhost/message",
}
def test_transaction_with_class_view(sentry_init, client, capture_events):
sentry_init(
integrations=[DjangoIntegration(transaction_style="function_name")],
send_default_pii=True,
)
events = capture_events()
content, status, headers = unpack_werkzeug_response(
client.head(reverse("classbased"))
)
assert status.lower() == "200 ok"
(event,) = events
assert (
event["transaction"] == "tests.integrations.django.myapp.views.ClassBasedView"
)
assert event["message"] == "hi"
def test_has_trace_if_performance_enabled(sentry_init, client, capture_events):
sentry_init(
integrations=[DjangoIntegration()],
traces_sample_rate=1.0,
)
events = capture_events()
client.head(reverse("view_exc_with_msg"))
(msg_event, error_event, transaction_event) = events
assert msg_event["contexts"]["trace"]
assert "trace_id" in msg_event["contexts"]["trace"]
assert error_event["contexts"]["trace"]
assert "trace_id" in error_event["contexts"]["trace"]
assert transaction_event["contexts"]["trace"]
assert "trace_id" in transaction_event["contexts"]["trace"]
assert (
msg_event["contexts"]["trace"]["trace_id"]
== error_event["contexts"]["trace"]["trace_id"]
== transaction_event["contexts"]["trace"]["trace_id"]
)
def test_has_trace_if_performance_disabled(sentry_init, client, capture_events):
sentry_init(
integrations=[DjangoIntegration()],
)
events = capture_events()
client.head(reverse("view_exc_with_msg"))
(msg_event, error_event) = events
assert msg_event["contexts"]["trace"]
assert "trace_id" in msg_event["contexts"]["trace"]
assert error_event["contexts"]["trace"]
assert "trace_id" in error_event["contexts"]["trace"]
assert (
msg_event["contexts"]["trace"]["trace_id"]
== error_event["contexts"]["trace"]["trace_id"]
)
def test_trace_from_headers_if_performance_enabled(sentry_init, client, capture_events):
sentry_init(
integrations=[DjangoIntegration()],
traces_sample_rate=1.0,
)
events = capture_events()
trace_id = "582b43a4192642f0b136d5159a501701"
sentry_trace_header = "{}-{}-{}".format(trace_id, "6e8f22c393e68f19", 1)
client.head(
reverse("view_exc_with_msg"), headers={"sentry-trace": sentry_trace_header}
)
(msg_event, error_event, transaction_event) = events
assert msg_event["contexts"]["trace"]
assert "trace_id" in msg_event["contexts"]["trace"]
assert error_event["contexts"]["trace"]
assert "trace_id" in error_event["contexts"]["trace"]
assert transaction_event["contexts"]["trace"]
assert "trace_id" in transaction_event["contexts"]["trace"]
assert msg_event["contexts"]["trace"]["trace_id"] == trace_id
assert error_event["contexts"]["trace"]["trace_id"] == trace_id
assert transaction_event["contexts"]["trace"]["trace_id"] == trace_id
def test_trace_from_headers_if_performance_disabled(
sentry_init, client, capture_events
):
sentry_init(
integrations=[DjangoIntegration()],
)
events = capture_events()
trace_id = "582b43a4192642f0b136d5159a501701"
sentry_trace_header = "{}-{}-{}".format(trace_id, "6e8f22c393e68f19", 1)
client.head(
reverse("view_exc_with_msg"), headers={"sentry-trace": sentry_trace_header}
)
(msg_event, error_event) = events
assert msg_event["contexts"]["trace"]
assert "trace_id" in msg_event["contexts"]["trace"]
assert error_event["contexts"]["trace"]
assert "trace_id" in error_event["contexts"]["trace"]
assert msg_event["contexts"]["trace"]["trace_id"] == trace_id
assert error_event["contexts"]["trace"]["trace_id"] == trace_id
@pytest.mark.forked
@pytest.mark.django_db
def test_user_captured(sentry_init, client, capture_events):
sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
events = capture_events()
content, status, headers = unpack_werkzeug_response(client.get(reverse("mylogin")))
assert content == b"ok"
assert not events
content, status, headers = unpack_werkzeug_response(client.get(reverse("message")))
assert content == b"ok"
(event,) = events
assert event["user"] == {
"email": "lennon@thebeatles.com",
"username": "john",
"id": "1",
}
@pytest.mark.forked
@pytest.mark.django_db
def test_queryset_repr(sentry_init, capture_events):
sentry_init(integrations=[DjangoIntegration()])
events = capture_events()
User.objects.create_user("john", "lennon@thebeatles.com", "johnpassword")
try:
my_queryset = User.objects.all() # noqa
1 / 0
except Exception:
capture_exception()
(event,) = events
(exception,) = event["exception"]["values"]
assert exception["type"] == "ZeroDivisionError"
(frame,) = exception["stacktrace"]["frames"]
assert frame["vars"]["my_queryset"].startswith(
"\n',
rendered_meta,
)
assert match is not None
assert match.group(1) == traceparent
# Python 2 does not preserve sort order
rendered_baggage = match.group(2)
assert sorted(rendered_baggage.split(",")) == sorted(baggage.split(","))
@pytest.mark.parametrize("with_executing_integration", [[], [ExecutingIntegration()]])
def test_template_exception(
sentry_init, client, capture_events, with_executing_integration
):
sentry_init(integrations=[DjangoIntegration()] + with_executing_integration)
events = capture_events()
content, status, headers = unpack_werkzeug_response(
client.get(reverse("template_exc"))
)
assert status.lower() == "500 internal server error"
(event,) = events
exception = event["exception"]["values"][-1]
assert exception["type"] == "TemplateSyntaxError"
frames = [
f
for f in exception["stacktrace"]["frames"]
if not f["filename"].startswith("django/")
]
view_frame, template_frame = frames[-2:]
assert template_frame["context_line"] == "{% invalid template tag %}\n"
assert template_frame["pre_context"] == ["5\n", "6\n", "7\n", "8\n", "9\n"]
assert template_frame["post_context"] == ["11\n", "12\n", "13\n", "14\n", "15\n"]
assert template_frame["lineno"] == 10
assert template_frame["filename"].endswith("error.html")
filenames = [
(f.get("function"), f.get("module")) for f in exception["stacktrace"]["frames"]
]
if with_executing_integration:
assert filenames[-3:] == [
("Parser.parse", "django.template.base"),
(None, None),
("Parser.invalid_block_tag", "django.template.base"),
]
else:
assert filenames[-3:] == [
("parse", "django.template.base"),
(None, None),
("invalid_block_tag", "django.template.base"),
]
@pytest.mark.parametrize(
"route", ["rest_framework_exc", "rest_framework_read_body_and_exc"]
)
@pytest.mark.parametrize(
"ct,body",
[
["application/json", {"foo": "bar"}],
["application/json", 1],
["application/json", "foo"],
["application/x-www-form-urlencoded", {"foo": "bar"}],
],
)
def test_rest_framework_basic(
sentry_init, client, capture_events, capture_exceptions, ct, body, route
):
pytest.importorskip("rest_framework")
sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
exceptions = capture_exceptions()
events = capture_events()
if ct == "application/json":
client.post(
reverse(route), data=json.dumps(body), content_type="application/json"
)
elif ct == "application/x-www-form-urlencoded":
client.post(reverse(route), data=body)
else:
raise AssertionError("unreachable")
(error,) = exceptions
assert isinstance(error, ZeroDivisionError)
(event,) = events
assert event["exception"]["values"][0]["mechanism"]["type"] == "django"
assert event["request"]["data"] == body
assert event["request"]["headers"]["Content-Type"] == ct
@pytest.mark.parametrize(
"endpoint", ["rest_permission_denied_exc", "permission_denied_exc"]
)
def test_does_not_capture_403(sentry_init, client, capture_events, endpoint):
if endpoint == "rest_permission_denied_exc":
pytest.importorskip("rest_framework")
sentry_init(integrations=[DjangoIntegration()])
events = capture_events()
_, status, _ = unpack_werkzeug_response(client.get(reverse(endpoint)))
assert status.lower() == "403 forbidden"
assert not events
def test_render_spans(sentry_init, client, capture_events, render_span_tree):
sentry_init(
integrations=[DjangoIntegration()],
traces_sample_rate=1.0,
)
views_tests = [
(
reverse("template_test2"),
'- op="template.render": description="[user_name.html, ...]"',
),
]
if DJANGO_VERSION >= (1, 7):
views_tests.append(
(
reverse("template_test"),
'- op="template.render": description="user_name.html"',
),
)
for url, expected_line in views_tests:
events = capture_events()
client.get(url)
transaction = events[0]
assert expected_line in render_span_tree(transaction)
if DJANGO_VERSION >= (1, 10):
EXPECTED_MIDDLEWARE_SPANS = """\
- op="http.server": description=null
- op="middleware.django": description="django.contrib.sessions.middleware.SessionMiddleware.__call__"
- op="middleware.django": description="django.contrib.auth.middleware.AuthenticationMiddleware.__call__"
- op="middleware.django": description="django.middleware.csrf.CsrfViewMiddleware.__call__"
- op="middleware.django": description="tests.integrations.django.myapp.settings.TestMiddleware.__call__"
- op="middleware.django": description="tests.integrations.django.myapp.settings.TestFunctionMiddleware.__call__"
- op="middleware.django": description="django.middleware.csrf.CsrfViewMiddleware.process_view"
- op="view.render": description="message"\
"""
else:
EXPECTED_MIDDLEWARE_SPANS = """\
- op="http.server": description=null
- op="middleware.django": description="django.contrib.sessions.middleware.SessionMiddleware.process_request"
- op="middleware.django": description="django.contrib.auth.middleware.AuthenticationMiddleware.process_request"
- op="middleware.django": description="tests.integrations.django.myapp.settings.TestMiddleware.process_request"
- op="middleware.django": description="django.middleware.csrf.CsrfViewMiddleware.process_view"
- op="view.render": description="message"
- op="middleware.django": description="tests.integrations.django.myapp.settings.TestMiddleware.process_response"
- op="middleware.django": description="django.middleware.csrf.CsrfViewMiddleware.process_response"
- op="middleware.django": description="django.contrib.sessions.middleware.SessionMiddleware.process_response"\
"""
def test_middleware_spans(sentry_init, client, capture_events, render_span_tree):
sentry_init(
integrations=[
DjangoIntegration(signals_spans=False),
],
traces_sample_rate=1.0,
)
events = capture_events()
client.get(reverse("message"))
message, transaction = events
assert message["message"] == "hi"
assert render_span_tree(transaction) == EXPECTED_MIDDLEWARE_SPANS
def test_middleware_spans_disabled(sentry_init, client, capture_events):
sentry_init(
integrations=[
DjangoIntegration(middleware_spans=False, signals_spans=False),
],
traces_sample_rate=1.0,
)
events = capture_events()
client.get(reverse("message"))
message, transaction = events
assert message["message"] == "hi"
assert not len(transaction["spans"])
EXPECTED_SIGNALS_SPANS = """\
- op="http.server": description=null
- op="event.django": description="django.db.reset_queries"
- op="event.django": description="django.db.close_old_connections"\
"""
def test_signals_spans(sentry_init, client, capture_events, render_span_tree):
sentry_init(
integrations=[
DjangoIntegration(middleware_spans=False),
],
traces_sample_rate=1.0,
)
events = capture_events()
client.get(reverse("message"))
message, transaction = events
assert message["message"] == "hi"
assert render_span_tree(transaction) == EXPECTED_SIGNALS_SPANS
assert transaction["spans"][0]["op"] == "event.django"
assert transaction["spans"][0]["description"] == "django.db.reset_queries"
assert transaction["spans"][1]["op"] == "event.django"
assert transaction["spans"][1]["description"] == "django.db.close_old_connections"
def test_signals_spans_disabled(sentry_init, client, capture_events):
sentry_init(
integrations=[
DjangoIntegration(middleware_spans=False, signals_spans=False),
],
traces_sample_rate=1.0,
)
events = capture_events()
client.get(reverse("message"))
message, transaction = events
assert message["message"] == "hi"
assert not transaction["spans"]
def test_csrf(sentry_init, client):
"""
Assert that CSRF view decorator works even with the view wrapped in our own
callable.
"""
sentry_init(integrations=[DjangoIntegration()])
content, status, _headers = unpack_werkzeug_response(
client.post(reverse("csrf_hello_not_exempt"))
)
assert status.lower() == "403 forbidden"
content, status, _headers = unpack_werkzeug_response(
client.post(reverse("sentryclass_csrf"))
)
assert status.lower() == "403 forbidden"
content, status, _headers = unpack_werkzeug_response(
client.post(reverse("sentryclass"))
)
assert status.lower() == "200 ok"
assert content == b"ok"
content, status, _headers = unpack_werkzeug_response(
client.post(reverse("classbased"))
)
assert status.lower() == "200 ok"
assert content == b"ok"
content, status, _headers = unpack_werkzeug_response(
client.post(reverse("message"))
)
assert status.lower() == "200 ok"
assert content == b"ok"
@pytest.mark.skipif(DJANGO_VERSION < (2, 0), reason="Requires Django > 2.0")
def test_custom_urlconf_middleware(
settings, sentry_init, client, capture_events, render_span_tree
):
"""
Some middlewares (for instance in django-tenants) overwrite request.urlconf.
Test that the resolver picks up the correct urlconf for transaction naming.
"""
urlconf = "tests.integrations.django.myapp.middleware.custom_urlconf_middleware"
settings.ROOT_URLCONF = ""
settings.MIDDLEWARE.insert(0, urlconf)
client.application.load_middleware()
sentry_init(integrations=[DjangoIntegration()], traces_sample_rate=1.0)
events = capture_events()
content, status, _headers = unpack_werkzeug_response(client.get("/custom/ok"))
assert status.lower() == "200 ok"
assert content == b"custom ok"
event = events.pop(0)
assert event["transaction"] == "/custom/ok"
assert "custom_urlconf_middleware" in render_span_tree(event)
_content, status, _headers = unpack_werkzeug_response(client.get("/custom/exc"))
assert status.lower() == "500 internal server error"
error_event, transaction_event = events
assert error_event["transaction"] == "/custom/exc"
assert error_event["exception"]["values"][-1]["mechanism"]["type"] == "django"
assert transaction_event["transaction"] == "/custom/exc"
assert "custom_urlconf_middleware" in render_span_tree(transaction_event)
settings.MIDDLEWARE.pop(0)
def test_get_receiver_name():
def dummy(a, b):
return a + b
name = _get_receiver_name(dummy)
if PY2:
assert name == "tests.integrations.django.test_basic.dummy"
else:
assert (
name
== "tests.integrations.django.test_basic.test_get_receiver_name..dummy"
)
a_partial = partial(dummy)
name = _get_receiver_name(a_partial)
if PY310:
assert name == "functools.partial()"
else:
assert name == "partial()"
@pytest.mark.forked
@pytest_mark_django_db_decorator()
@pytest.mark.skipif(DJANGO_VERSION < (1, 9), reason="Requires Django >= 1.9")
def test_cache_spans_disabled_middleware(
sentry_init, client, capture_events, use_django_caching_with_middlewares
):
sentry_init(
integrations=[
DjangoIntegration(
cache_spans=False,
middleware_spans=False,
signals_spans=False,
)
],
traces_sample_rate=1.0,
)
events = capture_events()
client.get(reverse("not_cached_view"))
client.get(reverse("not_cached_view"))
(first_event, second_event) = events
assert len(first_event["spans"]) == 0
assert len(second_event["spans"]) == 0
@pytest.mark.forked
@pytest_mark_django_db_decorator()
@pytest.mark.skipif(DJANGO_VERSION < (1, 9), reason="Requires Django >= 1.9")
def test_cache_spans_disabled_decorator(
sentry_init, client, capture_events, use_django_caching
):
sentry_init(
integrations=[
DjangoIntegration(
cache_spans=False,
middleware_spans=False,
signals_spans=False,
)
],
traces_sample_rate=1.0,
)
events = capture_events()
client.get(reverse("cached_view"))
client.get(reverse("cached_view"))
(first_event, second_event) = events
assert len(first_event["spans"]) == 0
assert len(second_event["spans"]) == 0
@pytest.mark.forked
@pytest_mark_django_db_decorator()
@pytest.mark.skipif(DJANGO_VERSION < (1, 9), reason="Requires Django >= 1.9")
def test_cache_spans_disabled_templatetag(
sentry_init, client, capture_events, use_django_caching
):
sentry_init(
integrations=[
DjangoIntegration(
cache_spans=False,
middleware_spans=False,
signals_spans=False,
)
],
traces_sample_rate=1.0,
)
events = capture_events()
client.get(reverse("view_with_cached_template_fragment"))
client.get(reverse("view_with_cached_template_fragment"))
(first_event, second_event) = events
assert len(first_event["spans"]) == 0
assert len(second_event["spans"]) == 0
@pytest.mark.forked
@pytest_mark_django_db_decorator()
@pytest.mark.skipif(DJANGO_VERSION < (1, 9), reason="Requires Django >= 1.9")
def test_cache_spans_middleware(
sentry_init, client, capture_events, use_django_caching_with_middlewares
):
sentry_init(
integrations=[
DjangoIntegration(
cache_spans=True,
middleware_spans=False,
signals_spans=False,
)
],
traces_sample_rate=1.0,
)
client.application.load_middleware()
events = capture_events()
client.get(reverse("not_cached_view"))
client.get(reverse("not_cached_view"))
(first_event, second_event) = events
assert len(first_event["spans"]) == 1
assert first_event["spans"][0]["op"] == "cache.get_item"
assert first_event["spans"][0]["description"].startswith(
"get views.decorators.cache.cache_header."
)
assert first_event["spans"][0]["data"] == {"cache.hit": False}
assert len(second_event["spans"]) == 2
assert second_event["spans"][0]["op"] == "cache.get_item"
assert second_event["spans"][0]["description"].startswith(
"get views.decorators.cache.cache_header."
)
assert second_event["spans"][0]["data"] == {"cache.hit": False}
assert second_event["spans"][1]["op"] == "cache.get_item"
assert second_event["spans"][1]["description"].startswith(
"get views.decorators.cache.cache_page."
)
assert second_event["spans"][1]["data"]["cache.hit"]
assert "cache.item_size" in second_event["spans"][1]["data"]
@pytest.mark.forked
@pytest_mark_django_db_decorator()
@pytest.mark.skipif(DJANGO_VERSION < (1, 9), reason="Requires Django >= 1.9")
def test_cache_spans_decorator(sentry_init, client, capture_events, use_django_caching):
sentry_init(
integrations=[
DjangoIntegration(
cache_spans=True,
middleware_spans=False,
signals_spans=False,
)
],
traces_sample_rate=1.0,
)
events = capture_events()
client.get(reverse("cached_view"))
client.get(reverse("cached_view"))
(first_event, second_event) = events
assert len(first_event["spans"]) == 1
assert first_event["spans"][0]["op"] == "cache.get_item"
assert first_event["spans"][0]["description"].startswith(
"get views.decorators.cache.cache_header."
)
assert first_event["spans"][0]["data"] == {"cache.hit": False}
assert len(second_event["spans"]) == 2
assert second_event["spans"][0]["op"] == "cache.get_item"
assert second_event["spans"][0]["description"].startswith(
"get views.decorators.cache.cache_header."
)
assert second_event["spans"][0]["data"] == {"cache.hit": False}
assert second_event["spans"][1]["op"] == "cache.get_item"
assert second_event["spans"][1]["description"].startswith(
"get views.decorators.cache.cache_page."
)
assert second_event["spans"][1]["data"]["cache.hit"]
assert "cache.item_size" in second_event["spans"][1]["data"]
@pytest.mark.forked
@pytest_mark_django_db_decorator()
@pytest.mark.skipif(DJANGO_VERSION < (1, 9), reason="Requires Django >= 1.9")
def test_cache_spans_templatetag(
sentry_init, client, capture_events, use_django_caching
):
sentry_init(
integrations=[
DjangoIntegration(
cache_spans=True,
middleware_spans=False,
signals_spans=False,
)
],
traces_sample_rate=1.0,
)
events = capture_events()
client.get(reverse("view_with_cached_template_fragment"))
client.get(reverse("view_with_cached_template_fragment"))
(first_event, second_event) = events
assert len(first_event["spans"]) == 1
assert first_event["spans"][0]["op"] == "cache.get_item"
assert first_event["spans"][0]["description"].startswith(
"get template.cache.some_identifier."
)
assert first_event["spans"][0]["data"] == {"cache.hit": False}
assert len(second_event["spans"]) == 1
assert second_event["spans"][0]["op"] == "cache.get_item"
assert second_event["spans"][0]["description"].startswith(
"get template.cache.some_identifier."
)
assert second_event["spans"][0]["data"]["cache.hit"]
assert "cache.item_size" in second_event["spans"][0]["data"]
@pytest.mark.parametrize(
"method_name, args, kwargs, expected_description",
[
("get", None, None, "get "),
("get", [], {}, "get "),
("get", ["bla", "blub", "foo"], {}, "get bla"),
(
"get_many",
[["bla 1", "bla 2", "bla 3"], "blub", "foo"],
{},
"get_many ['bla 1', 'bla 2', 'bla 3']",
),
(
"get_many",
[["bla 1", "bla 2", "bla 3"], "blub", "foo"],
{"key": "bar"},
"get_many ['bla 1', 'bla 2', 'bla 3']",
),
("get", [], {"key": "bar"}, "get bar"),
(
"get",
"something",
{},
"get s",
), # this should never happen, just making sure that we are not raising an exception in that case.
],
)
def test_cache_spans_get_span_description(
method_name, args, kwargs, expected_description
):
assert _get_span_description(method_name, args, kwargs) == expected_description
sentry-python-1.39.2/tests/integrations/django/test_data_scrubbing.py 0000664 0000000 0000000 00000004610 14547447232 0026160 0 ustar 00root root 0000000 0000000 import pytest
from werkzeug.test import Client
from sentry_sdk.integrations.django import DjangoIntegration
from tests.conftest import werkzeug_set_cookie
from tests.integrations.django.myapp.wsgi import application
from tests.integrations.django.utils import pytest_mark_django_db_decorator
try:
from django.urls import reverse
except ImportError:
from django.core.urlresolvers import reverse
@pytest.fixture
def client():
return Client(application)
@pytest.mark.forked
@pytest_mark_django_db_decorator()
def test_scrub_django_session_cookies_removed(
sentry_init,
client,
capture_events,
):
sentry_init(integrations=[DjangoIntegration()], send_default_pii=False)
events = capture_events()
werkzeug_set_cookie(client, "localhost", "sessionid", "123")
werkzeug_set_cookie(client, "localhost", "csrftoken", "456")
werkzeug_set_cookie(client, "localhost", "foo", "bar")
client.get(reverse("view_exc"))
(event,) = events
assert "cookies" not in event["request"]
@pytest.mark.forked
@pytest_mark_django_db_decorator()
def test_scrub_django_session_cookies_filtered(
sentry_init,
client,
capture_events,
):
sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
events = capture_events()
werkzeug_set_cookie(client, "localhost", "sessionid", "123")
werkzeug_set_cookie(client, "localhost", "csrftoken", "456")
werkzeug_set_cookie(client, "localhost", "foo", "bar")
client.get(reverse("view_exc"))
(event,) = events
assert event["request"]["cookies"] == {
"sessionid": "[Filtered]",
"csrftoken": "[Filtered]",
"foo": "bar",
}
@pytest.mark.forked
@pytest_mark_django_db_decorator()
def test_scrub_django_custom_session_cookies_filtered(
sentry_init,
client,
capture_events,
settings,
):
settings.SESSION_COOKIE_NAME = "my_sess"
settings.CSRF_COOKIE_NAME = "csrf_secret"
sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
events = capture_events()
werkzeug_set_cookie(client, "localhost", "my_sess", "123")
werkzeug_set_cookie(client, "localhost", "csrf_secret", "456")
werkzeug_set_cookie(client, "localhost", "foo", "bar")
client.get(reverse("view_exc"))
(event,) = events
assert event["request"]["cookies"] == {
"my_sess": "[Filtered]",
"csrf_secret": "[Filtered]",
"foo": "bar",
}
sentry-python-1.39.2/tests/integrations/django/test_db_query_data.py 0000664 0000000 0000000 00000017045 14547447232 0026022 0 ustar 00root root 0000000 0000000 from __future__ import absolute_import
import os
import pytest
from django import VERSION as DJANGO_VERSION
from django.db import connections
try:
from django.urls import reverse
except ImportError:
from django.core.urlresolvers import reverse
from werkzeug.test import Client
from sentry_sdk.consts import SPANDATA
from sentry_sdk.integrations.django import DjangoIntegration
from tests.conftest import unpack_werkzeug_response
from tests.integrations.django.utils import pytest_mark_django_db_decorator
from tests.integrations.django.myapp.wsgi import application
@pytest.fixture
def client():
return Client(application)
@pytest.mark.forked
@pytest_mark_django_db_decorator(transaction=True)
@pytest.mark.parametrize("enable_db_query_source", [None, False])
def test_query_source_disabled(
sentry_init, client, capture_events, enable_db_query_source
):
sentry_options = {
"integrations": [DjangoIntegration()],
"send_default_pii": True,
"traces_sample_rate": 1.0,
}
if enable_db_query_source is not None:
sentry_options["enable_db_query_source"] = enable_db_query_source
sentry_options["db_query_source_threshold_ms"] = 0
sentry_init(**sentry_options)
if "postgres" not in connections:
pytest.skip("postgres tests disabled")
# trigger Django to open a new connection by marking the existing one as None.
connections["postgres"].connection = None
events = capture_events()
_, status, _ = unpack_werkzeug_response(client.get(reverse("postgres_select_orm")))
assert status == "200 OK"
(event,) = events
for span in event["spans"]:
if span.get("op") == "db" and "auth_user" in span.get("description"):
data = span.get("data", {})
assert SPANDATA.CODE_LINENO not in data
assert SPANDATA.CODE_NAMESPACE not in data
assert SPANDATA.CODE_FILEPATH not in data
assert SPANDATA.CODE_FUNCTION not in data
break
else:
raise AssertionError("No db span found")
@pytest.mark.forked
@pytest_mark_django_db_decorator(transaction=True)
def test_query_source(sentry_init, client, capture_events):
sentry_init(
integrations=[DjangoIntegration()],
send_default_pii=True,
traces_sample_rate=1.0,
enable_db_query_source=True,
db_query_source_threshold_ms=0,
)
if "postgres" not in connections:
pytest.skip("postgres tests disabled")
# trigger Django to open a new connection by marking the existing one as None.
connections["postgres"].connection = None
events = capture_events()
_, status, _ = unpack_werkzeug_response(client.get(reverse("postgres_select_orm")))
assert status == "200 OK"
(event,) = events
for span in event["spans"]:
if span.get("op") == "db" and "auth_user" in span.get("description"):
data = span.get("data", {})
assert SPANDATA.CODE_LINENO in data
assert SPANDATA.CODE_NAMESPACE in data
assert SPANDATA.CODE_FILEPATH in data
assert SPANDATA.CODE_FUNCTION in data
assert type(data.get(SPANDATA.CODE_LINENO)) == int
assert data.get(SPANDATA.CODE_LINENO) > 0
assert (
data.get(SPANDATA.CODE_NAMESPACE)
== "tests.integrations.django.myapp.views"
)
assert data.get(SPANDATA.CODE_FILEPATH).endswith(
"tests/integrations/django/myapp/views.py"
)
is_relative_path = data.get(SPANDATA.CODE_FILEPATH)[0] != os.sep
assert is_relative_path
assert data.get(SPANDATA.CODE_FUNCTION) == "postgres_select_orm"
break
else:
raise AssertionError("No db span found")
@pytest.mark.forked
@pytest_mark_django_db_decorator(transaction=True)
def test_query_source_with_in_app_exclude(sentry_init, client, capture_events):
sentry_init(
integrations=[DjangoIntegration()],
send_default_pii=True,
traces_sample_rate=1.0,
enable_db_query_source=True,
db_query_source_threshold_ms=0,
in_app_exclude=["tests.integrations.django.myapp.views"],
)
if "postgres" not in connections:
pytest.skip("postgres tests disabled")
# trigger Django to open a new connection by marking the existing one as None.
connections["postgres"].connection = None
events = capture_events()
_, status, _ = unpack_werkzeug_response(client.get(reverse("postgres_select_orm")))
assert status == "200 OK"
(event,) = events
for span in event["spans"]:
if span.get("op") == "db" and "auth_user" in span.get("description"):
data = span.get("data", {})
assert SPANDATA.CODE_LINENO in data
assert SPANDATA.CODE_NAMESPACE in data
assert SPANDATA.CODE_FILEPATH in data
assert SPANDATA.CODE_FUNCTION in data
assert type(data.get(SPANDATA.CODE_LINENO)) == int
assert data.get(SPANDATA.CODE_LINENO) > 0
if DJANGO_VERSION >= (1, 11):
assert (
data.get(SPANDATA.CODE_NAMESPACE)
== "tests.integrations.django.myapp.settings"
)
assert data.get(SPANDATA.CODE_FILEPATH).endswith(
"tests/integrations/django/myapp/settings.py"
)
assert data.get(SPANDATA.CODE_FUNCTION) == "middleware"
else:
assert (
data.get(SPANDATA.CODE_NAMESPACE)
== "tests.integrations.django.test_db_query_data"
)
assert data.get(SPANDATA.CODE_FILEPATH).endswith(
"tests/integrations/django/test_db_query_data.py"
)
assert (
data.get(SPANDATA.CODE_FUNCTION)
== "test_query_source_with_in_app_exclude"
)
break
else:
raise AssertionError("No db span found")
@pytest.mark.forked
@pytest_mark_django_db_decorator(transaction=True)
def test_query_source_with_in_app_include(sentry_init, client, capture_events):
sentry_init(
integrations=[DjangoIntegration()],
send_default_pii=True,
traces_sample_rate=1.0,
enable_db_query_source=True,
db_query_source_threshold_ms=0,
in_app_include=["django"],
)
if "postgres" not in connections:
pytest.skip("postgres tests disabled")
# trigger Django to open a new connection by marking the existing one as None.
connections["postgres"].connection = None
events = capture_events()
_, status, _ = unpack_werkzeug_response(client.get(reverse("postgres_select_orm")))
assert status == "200 OK"
(event,) = events
for span in event["spans"]:
if span.get("op") == "db" and "auth_user" in span.get("description"):
data = span.get("data", {})
assert SPANDATA.CODE_LINENO in data
assert SPANDATA.CODE_NAMESPACE in data
assert SPANDATA.CODE_FILEPATH in data
assert SPANDATA.CODE_FUNCTION in data
assert type(data.get(SPANDATA.CODE_LINENO)) == int
assert data.get(SPANDATA.CODE_LINENO) > 0
assert data.get(SPANDATA.CODE_NAMESPACE) == "django.db.models.sql.compiler"
assert data.get(SPANDATA.CODE_FILEPATH).endswith(
"django/db/models/sql/compiler.py"
)
assert data.get(SPANDATA.CODE_FUNCTION) == "execute_sql"
break
else:
raise AssertionError("No db span found")
sentry-python-1.39.2/tests/integrations/django/test_transactions.py 0000664 0000000 0000000 00000007755 14547447232 0025736 0 ustar 00root root 0000000 0000000 from __future__ import absolute_import
import pytest
import django
try:
from unittest import mock # python 3.3 and above
except ImportError:
import mock # python < 3.3
# django<2.0 has only `url` with regex based patterns.
# django>=2.0 renames `url` to `re_path`, and additionally introduces `path`
# for new style URL patterns, e.g. .
if django.VERSION >= (2, 0):
from django.urls import path, re_path
from django.urls.converters import PathConverter
from django.conf.urls import include
else:
from django.conf.urls import url as re_path, include
if django.VERSION < (1, 9):
included_url_conf = (re_path(r"^foo/bar/(?P[\w]+)", lambda x: ""),), "", ""
else:
included_url_conf = ((re_path(r"^foo/bar/(?P[\w]+)", lambda x: ""),), "")
from sentry_sdk.integrations.django.transactions import RavenResolver
example_url_conf = (
re_path(r"^api/(?P[\w_-]+)/store/$", lambda x: ""),
re_path(r"^api/(?P(v1|v2))/author/$", lambda x: ""),
re_path(
r"^api/(?P[^\/]+)/product/(?P(?:\d+|[A-Fa-f0-9-]{32,36}))/$",
lambda x: "",
),
re_path(r"^report/", lambda x: ""),
re_path(r"^example/", include(included_url_conf)),
)
def test_resolver_no_match():
resolver = RavenResolver()
result = resolver.resolve("/foo/bar", example_url_conf)
assert result is None
def test_resolver_re_path_complex_match():
resolver = RavenResolver()
result = resolver.resolve("/api/1234/store/", example_url_conf)
assert result == "/api/{project_id}/store/"
def test_resolver_re_path_complex_either_match():
resolver = RavenResolver()
result = resolver.resolve("/api/v1/author/", example_url_conf)
assert result == "/api/{version}/author/"
result = resolver.resolve("/api/v2/author/", example_url_conf)
assert result == "/api/{version}/author/"
def test_resolver_re_path_included_match():
resolver = RavenResolver()
result = resolver.resolve("/example/foo/bar/baz", example_url_conf)
assert result == "/example/foo/bar/{param}"
def test_resolver_re_path_multiple_groups():
resolver = RavenResolver()
result = resolver.resolve(
"/api/myproject/product/cb4ef1caf3554c34ae134f3c1b3d605f/", example_url_conf
)
assert result == "/api/{project_id}/product/{pid}/"
@pytest.mark.skipif(
django.VERSION < (2, 0),
reason="Django>=2.0 required for patterns",
)
def test_resolver_path_group():
url_conf = (path("api/v2//store/", lambda x: ""),)
resolver = RavenResolver()
result = resolver.resolve("/api/v2/1234/store/", url_conf)
assert result == "/api/v2/{project_id}/store/"
@pytest.mark.skipif(
django.VERSION < (2, 0),
reason="Django>=2.0 required for patterns",
)
def test_resolver_path_multiple_groups():
url_conf = (path("api/v2//product/", lambda x: ""),)
resolver = RavenResolver()
result = resolver.resolve("/api/v2/myproject/product/5689", url_conf)
assert result == "/api/v2/{project_id}/product/{pid}"
@pytest.mark.skipif(
django.VERSION < (2, 0),
reason="Django>=2.0 required for patterns",
)
def test_resolver_path_complex_path():
class CustomPathConverter(PathConverter):
regex = r"[^/]+(/[^/]+){0,2}"
with mock.patch(
"django.urls.resolvers.get_converter", return_value=CustomPathConverter
):
url_conf = (path("api/v3/", lambda x: ""),)
resolver = RavenResolver()
result = resolver.resolve("/api/v3/abc/def/ghi", url_conf)
assert result == "/api/v3/{my_path}"
@pytest.mark.skipif(
django.VERSION < (2, 0),
reason="Django>=2.0 required for patterns",
)
def test_resolver_path_no_converter():
url_conf = (path("api/v4/", lambda x: ""),)
resolver = RavenResolver()
result = resolver.resolve("/api/v4/myproject", url_conf)
assert result == "/api/v4/{project_id}"
sentry-python-1.39.2/tests/integrations/django/utils.py 0000664 0000000 0000000 00000001331 14547447232 0023307 0 ustar 00root root 0000000 0000000 from functools import partial
import pytest
import pytest_django
# Hack to prevent from experimental feature introduced in version `4.3.0` in `pytest-django` that
# requires explicit database allow from failing the test
pytest_mark_django_db_decorator = partial(pytest.mark.django_db)
try:
pytest_version = tuple(map(int, pytest_django.__version__.split(".")))
if pytest_version > (4, 2, 0):
pytest_mark_django_db_decorator = partial(
pytest.mark.django_db, databases="__all__"
)
except ValueError:
if "dev" in pytest_django.__version__:
pytest_mark_django_db_decorator = partial(
pytest.mark.django_db, databases="__all__"
)
except AttributeError:
pass
sentry-python-1.39.2/tests/integrations/excepthook/ 0000775 0000000 0000000 00000000000 14547447232 0022506 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/excepthook/test_excepthook.py 0000664 0000000 0000000 00000003305 14547447232 0026271 0 ustar 00root root 0000000 0000000 import pytest
import sys
import subprocess
from textwrap import dedent
def test_excepthook(tmpdir):
app = tmpdir.join("app.py")
app.write(
dedent(
"""
from sentry_sdk import init, transport
def send_event(self, event):
print("capture event was called")
print(event)
transport.HttpTransport._send_event = send_event
init("http://foobar@localhost/123")
frame_value = "LOL"
1/0
"""
)
)
with pytest.raises(subprocess.CalledProcessError) as excinfo:
subprocess.check_output([sys.executable, str(app)], stderr=subprocess.STDOUT)
output = excinfo.value.output
print(output)
assert b"ZeroDivisionError" in output
assert b"LOL" in output
assert b"capture event was called" in output
def test_always_value_excepthook(tmpdir):
app = tmpdir.join("app.py")
app.write(
dedent(
"""
import sys
from sentry_sdk import init, transport
from sentry_sdk.integrations.excepthook import ExcepthookIntegration
def send_event(self, event):
print("capture event was called")
print(event)
transport.HttpTransport._send_event = send_event
sys.ps1 = "always_value_test"
init("http://foobar@localhost/123",
integrations=[ExcepthookIntegration(always_run=True)]
)
frame_value = "LOL"
1/0
"""
)
)
with pytest.raises(subprocess.CalledProcessError) as excinfo:
subprocess.check_output([sys.executable, str(app)], stderr=subprocess.STDOUT)
output = excinfo.value.output
print(output)
assert b"ZeroDivisionError" in output
assert b"LOL" in output
assert b"capture event was called" in output
sentry-python-1.39.2/tests/integrations/falcon/ 0000775 0000000 0000000 00000000000 14547447232 0021577 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/falcon/__init__.py 0000664 0000000 0000000 00000000055 14547447232 0023710 0 ustar 00root root 0000000 0000000 import pytest
pytest.importorskip("falcon")
sentry-python-1.39.2/tests/integrations/falcon/test_falcon.py 0000664 0000000 0000000 00000030223 14547447232 0024452 0 ustar 00root root 0000000 0000000 from __future__ import absolute_import
import logging
import pytest
import falcon
import falcon.testing
import sentry_sdk
from sentry_sdk.integrations.falcon import FalconIntegration
from sentry_sdk.integrations.logging import LoggingIntegration
from sentry_sdk.utils import parse_version
try:
import falcon.asgi
except ImportError:
pass
else:
import falcon.inspect # We only need this module for the ASGI test
FALCON_VERSION = parse_version(falcon.__version__)
@pytest.fixture
def make_app(sentry_init):
def inner():
class MessageResource:
def on_get(self, req, resp):
sentry_sdk.capture_message("hi")
resp.media = "hi"
class MessageByIdResource:
def on_get(self, req, resp, message_id):
sentry_sdk.capture_message("hi")
resp.media = "hi"
class CustomError(Exception):
pass
class CustomErrorResource:
def on_get(self, req, resp):
raise CustomError()
def custom_error_handler(*args, **kwargs):
raise falcon.HTTPError(status=falcon.HTTP_400)
app = falcon.API()
app.add_route("/message", MessageResource())
app.add_route("/message/{message_id:int}", MessageByIdResource())
app.add_route("/custom-error", CustomErrorResource())
app.add_error_handler(CustomError, custom_error_handler)
return app
return inner
@pytest.fixture
def make_client(make_app):
def inner():
app = make_app()
return falcon.testing.TestClient(app)
return inner
def test_has_context(sentry_init, capture_events, make_client):
sentry_init(integrations=[FalconIntegration()])
events = capture_events()
client = make_client()
response = client.simulate_get("/message")
assert response.status == falcon.HTTP_200
(event,) = events
assert event["transaction"] == "/message" # Falcon URI template
assert "data" not in event["request"]
assert event["request"]["url"] == "http://falconframework.org/message"
@pytest.mark.parametrize(
"url,transaction_style,expected_transaction,expected_source",
[
("/message", "uri_template", "/message", "route"),
("/message", "path", "/message", "url"),
("/message/123456", "uri_template", "/message/{message_id:int}", "route"),
("/message/123456", "path", "/message/123456", "url"),
],
)
def test_transaction_style(
sentry_init,
make_client,
capture_events,
url,
transaction_style,
expected_transaction,
expected_source,
):
integration = FalconIntegration(transaction_style=transaction_style)
sentry_init(integrations=[integration])
events = capture_events()
client = make_client()
response = client.simulate_get(url)
assert response.status == falcon.HTTP_200
(event,) = events
assert event["transaction"] == expected_transaction
assert event["transaction_info"] == {"source": expected_source}
def test_unhandled_errors(sentry_init, capture_exceptions, capture_events):
sentry_init(integrations=[FalconIntegration()], debug=True)
class Resource:
def on_get(self, req, resp):
1 / 0
app = falcon.API()
app.add_route("/", Resource())
exceptions = capture_exceptions()
events = capture_events()
client = falcon.testing.TestClient(app)
try:
client.simulate_get("/")
except ZeroDivisionError:
pass
(exc,) = exceptions
assert isinstance(exc, ZeroDivisionError)
(event,) = events
assert event["exception"]["values"][0]["mechanism"]["type"] == "falcon"
assert " by zero" in event["exception"]["values"][0]["value"]
def test_raised_5xx_errors(sentry_init, capture_exceptions, capture_events):
sentry_init(integrations=[FalconIntegration()], debug=True)
class Resource:
def on_get(self, req, resp):
raise falcon.HTTPError(falcon.HTTP_502)
app = falcon.API()
app.add_route("/", Resource())
exceptions = capture_exceptions()
events = capture_events()
client = falcon.testing.TestClient(app)
client.simulate_get("/")
(exc,) = exceptions
assert isinstance(exc, falcon.HTTPError)
(event,) = events
assert event["exception"]["values"][0]["mechanism"]["type"] == "falcon"
assert event["exception"]["values"][0]["type"] == "HTTPError"
def test_raised_4xx_errors(sentry_init, capture_exceptions, capture_events):
sentry_init(integrations=[FalconIntegration()], debug=True)
class Resource:
def on_get(self, req, resp):
raise falcon.HTTPError(falcon.HTTP_400)
app = falcon.API()
app.add_route("/", Resource())
exceptions = capture_exceptions()
events = capture_events()
client = falcon.testing.TestClient(app)
client.simulate_get("/")
assert len(exceptions) == 0
assert len(events) == 0
def test_http_status(sentry_init, capture_exceptions, capture_events):
"""
This just demonstrates, that if Falcon raises a HTTPStatus with code 500
(instead of a HTTPError with code 500) Sentry will not capture it.
"""
sentry_init(integrations=[FalconIntegration()], debug=True)
class Resource:
def on_get(self, req, resp):
raise falcon.http_status.HTTPStatus(falcon.HTTP_508)
app = falcon.API()
app.add_route("/", Resource())
exceptions = capture_exceptions()
events = capture_events()
client = falcon.testing.TestClient(app)
client.simulate_get("/")
assert len(exceptions) == 0
assert len(events) == 0
def test_falcon_large_json_request(sentry_init, capture_events):
sentry_init(integrations=[FalconIntegration()])
data = {"foo": {"bar": "a" * 2000}}
class Resource:
def on_post(self, req, resp):
assert req.media == data
sentry_sdk.capture_message("hi")
resp.media = "ok"
app = falcon.API()
app.add_route("/", Resource())
events = capture_events()
client = falcon.testing.TestClient(app)
response = client.simulate_post("/", json=data)
assert response.status == falcon.HTTP_200
(event,) = events
assert event["_meta"]["request"]["data"]["foo"]["bar"] == {
"": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
}
assert len(event["request"]["data"]["foo"]["bar"]) == 1024
@pytest.mark.parametrize("data", [{}, []], ids=["empty-dict", "empty-list"])
def test_falcon_empty_json_request(sentry_init, capture_events, data):
sentry_init(integrations=[FalconIntegration()])
class Resource:
def on_post(self, req, resp):
assert req.media == data
sentry_sdk.capture_message("hi")
resp.media = "ok"
app = falcon.API()
app.add_route("/", Resource())
events = capture_events()
client = falcon.testing.TestClient(app)
response = client.simulate_post("/", json=data)
assert response.status == falcon.HTTP_200
(event,) = events
assert event["request"]["data"] == data
def test_falcon_raw_data_request(sentry_init, capture_events):
sentry_init(integrations=[FalconIntegration()])
class Resource:
def on_post(self, req, resp):
sentry_sdk.capture_message("hi")
resp.media = "ok"
app = falcon.API()
app.add_route("/", Resource())
events = capture_events()
client = falcon.testing.TestClient(app)
response = client.simulate_post("/", body="hi")
assert response.status == falcon.HTTP_200
(event,) = events
assert event["request"]["headers"]["Content-Length"] == "2"
assert event["request"]["data"] == ""
def test_logging(sentry_init, capture_events):
sentry_init(
integrations=[FalconIntegration(), LoggingIntegration(event_level="ERROR")]
)
logger = logging.getLogger()
app = falcon.API()
class Resource:
def on_get(self, req, resp):
logger.error("hi")
resp.media = "ok"
app.add_route("/", Resource())
events = capture_events()
client = falcon.testing.TestClient(app)
client.simulate_get("/")
(event,) = events
assert event["level"] == "error"
def test_500(sentry_init, capture_events):
sentry_init(integrations=[FalconIntegration()])
app = falcon.API()
class Resource:
def on_get(self, req, resp):
1 / 0
app.add_route("/", Resource())
def http500_handler(ex, req, resp, params):
sentry_sdk.capture_exception(ex)
resp.media = {"message": "Sentry error: %s" % sentry_sdk.last_event_id()}
app.add_error_handler(Exception, http500_handler)
events = capture_events()
client = falcon.testing.TestClient(app)
response = client.simulate_get("/")
(event,) = events
assert response.json == {"message": "Sentry error: %s" % event["event_id"]}
def test_error_in_errorhandler(sentry_init, capture_events):
sentry_init(integrations=[FalconIntegration()])
app = falcon.API()
class Resource:
def on_get(self, req, resp):
raise ValueError()
app.add_route("/", Resource())
def http500_handler(ex, req, resp, params):
1 / 0
app.add_error_handler(Exception, http500_handler)
events = capture_events()
client = falcon.testing.TestClient(app)
with pytest.raises(ZeroDivisionError):
client.simulate_get("/")
(event,) = events
last_ex_values = event["exception"]["values"][-1]
assert last_ex_values["type"] == "ZeroDivisionError"
assert last_ex_values["stacktrace"]["frames"][-1]["vars"]["ex"] == "ValueError()"
def test_bad_request_not_captured(sentry_init, capture_events):
sentry_init(integrations=[FalconIntegration()])
events = capture_events()
app = falcon.API()
class Resource:
def on_get(self, req, resp):
raise falcon.HTTPBadRequest()
app.add_route("/", Resource())
client = falcon.testing.TestClient(app)
client.simulate_get("/")
assert not events
def test_does_not_leak_scope(sentry_init, capture_events):
sentry_init(integrations=[FalconIntegration()])
events = capture_events()
with sentry_sdk.configure_scope() as scope:
scope.set_tag("request_data", False)
app = falcon.API()
class Resource:
def on_get(self, req, resp):
with sentry_sdk.configure_scope() as scope:
scope.set_tag("request_data", True)
def generator():
for row in range(1000):
with sentry_sdk.configure_scope() as scope:
assert scope._tags["request_data"]
yield (str(row) + "\n").encode()
resp.stream = generator()
app.add_route("/", Resource())
client = falcon.testing.TestClient(app)
response = client.simulate_get("/")
expected_response = "".join(str(row) + "\n" for row in range(1000))
assert response.text == expected_response
assert not events
with sentry_sdk.configure_scope() as scope:
assert not scope._tags["request_data"]
@pytest.mark.skipif(
not hasattr(falcon, "asgi"), reason="This Falcon version lacks ASGI support."
)
def test_falcon_not_breaking_asgi(sentry_init):
"""
This test simply verifies that the Falcon integration does not break ASGI
Falcon apps.
The test does not verify ASGI Falcon support, since our Falcon integration
currently lacks support for ASGI Falcon apps.
"""
sentry_init(integrations=[FalconIntegration()])
asgi_app = falcon.asgi.App()
try:
falcon.inspect.inspect_app(asgi_app)
except TypeError:
pytest.fail("Falcon integration causing errors in ASGI apps.")
@pytest.mark.skipif(
(FALCON_VERSION or ()) < (3,),
reason="The Sentry Falcon integration only supports custom error handlers on Falcon 3+",
)
def test_falcon_custom_error_handler(sentry_init, make_app, capture_events):
"""
When a custom error handler handles what otherwise would have resulted in a 5xx error,
changing the HTTP status to a non-5xx status, no error event should be sent to Sentry.
"""
sentry_init(integrations=[FalconIntegration()])
events = capture_events()
app = make_app()
client = falcon.testing.TestClient(app)
client.simulate_get("/custom-error")
assert len(events) == 0
sentry-python-1.39.2/tests/integrations/fastapi/ 0000775 0000000 0000000 00000000000 14547447232 0021764 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/fastapi/__init__.py 0000664 0000000 0000000 00000000056 14547447232 0024076 0 ustar 00root root 0000000 0000000 import pytest
pytest.importorskip("fastapi")
sentry-python-1.39.2/tests/integrations/fastapi/test_fastapi.py 0000664 0000000 0000000 00000034001 14547447232 0025022 0 ustar 00root root 0000000 0000000 import json
import logging
import threading
import pytest
from sentry_sdk.integrations.fastapi import FastApiIntegration
from fastapi import FastAPI, Request
from fastapi.testclient import TestClient
from fastapi.middleware.trustedhost import TrustedHostMiddleware
from sentry_sdk import capture_message
from sentry_sdk.integrations.starlette import StarletteIntegration
from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
try:
from unittest import mock # python 3.3 and above
except ImportError:
import mock # python < 3.3
def fastapi_app_factory():
app = FastAPI()
@app.get("/error")
async def _error():
capture_message("Hi")
1 / 0
return {"message": "Hi"}
@app.get("/message")
async def _message():
capture_message("Hi")
return {"message": "Hi"}
@app.get("/message/{message_id}")
async def _message_with_id(message_id):
capture_message("Hi")
return {"message": "Hi"}
@app.get("/sync/thread_ids")
def _thread_ids_sync():
return {
"main": str(threading.main_thread().ident),
"active": str(threading.current_thread().ident),
}
@app.get("/async/thread_ids")
async def _thread_ids_async():
return {
"main": str(threading.main_thread().ident),
"active": str(threading.current_thread().ident),
}
return app
@pytest.mark.asyncio
async def test_response(sentry_init, capture_events):
# FastAPI is heavily based on Starlette so we also need
# to enable StarletteIntegration.
# In the future this will be auto enabled.
sentry_init(
integrations=[StarletteIntegration(), FastApiIntegration()],
traces_sample_rate=1.0,
send_default_pii=True,
debug=True,
)
app = fastapi_app_factory()
events = capture_events()
client = TestClient(app)
response = client.get("/message")
assert response.json() == {"message": "Hi"}
assert len(events) == 2
(message_event, transaction_event) = events
assert message_event["message"] == "Hi"
assert transaction_event["transaction"] == "/message"
@pytest.mark.parametrize(
"url,transaction_style,expected_transaction,expected_source",
[
(
"/message",
"url",
"/message",
"route",
),
(
"/message",
"endpoint",
"tests.integrations.fastapi.test_fastapi.fastapi_app_factory.._message",
"component",
),
(
"/message/123456",
"url",
"/message/{message_id}",
"route",
),
(
"/message/123456",
"endpoint",
"tests.integrations.fastapi.test_fastapi.fastapi_app_factory.._message_with_id",
"component",
),
],
)
def test_transaction_style(
sentry_init,
capture_events,
url,
transaction_style,
expected_transaction,
expected_source,
):
sentry_init(
integrations=[
StarletteIntegration(transaction_style=transaction_style),
FastApiIntegration(transaction_style=transaction_style),
],
)
app = fastapi_app_factory()
events = capture_events()
client = TestClient(app)
client.get(url)
(event,) = events
assert event["transaction"] == expected_transaction
assert event["transaction_info"] == {"source": expected_source}
# Assert that state is not leaked
events.clear()
capture_message("foo")
(event,) = events
assert "request" not in event
assert "transaction" not in event
def test_legacy_setup(
sentry_init,
capture_events,
):
# Check that behaviour does not change
# if the user just adds the new Integrations
# and forgets to remove SentryAsgiMiddleware
sentry_init()
app = fastapi_app_factory()
asgi_app = SentryAsgiMiddleware(app)
events = capture_events()
client = TestClient(asgi_app)
client.get("/message/123456")
(event,) = events
assert event["transaction"] == "/message/{message_id}"
@pytest.mark.parametrize("endpoint", ["/sync/thread_ids", "/async/thread_ids"])
@mock.patch("sentry_sdk.profiler.PROFILE_MINIMUM_SAMPLES", 0)
def test_active_thread_id(sentry_init, capture_envelopes, teardown_profiling, endpoint):
sentry_init(
traces_sample_rate=1.0,
_experiments={"profiles_sample_rate": 1.0},
)
app = fastapi_app_factory()
asgi_app = SentryAsgiMiddleware(app)
envelopes = capture_envelopes()
client = TestClient(asgi_app)
response = client.get(endpoint)
assert response.status_code == 200
data = json.loads(response.content)
envelopes = [envelope for envelope in envelopes]
assert len(envelopes) == 1
profiles = [item for item in envelopes[0].items if item.type == "profile"]
assert len(profiles) == 1
for profile in profiles:
transactions = profile.payload.json["transactions"]
assert len(transactions) == 1
assert str(data["active"]) == transactions[0]["active_thread_id"]
@pytest.mark.asyncio
async def test_original_request_not_scrubbed(sentry_init, capture_events):
sentry_init(
integrations=[StarletteIntegration(), FastApiIntegration()],
traces_sample_rate=1.0,
debug=True,
)
app = FastAPI()
@app.post("/error")
async def _error(request: Request):
logging.critical("Oh no!")
assert request.headers["Authorization"] == "Bearer ohno"
assert await request.json() == {"password": "secret"}
return {"error": "Oh no!"}
events = capture_events()
client = TestClient(app)
client.post(
"/error", json={"password": "secret"}, headers={"Authorization": "Bearer ohno"}
)
event = events[0]
assert event["request"]["data"] == {"password": "[Filtered]"}
assert event["request"]["headers"]["authorization"] == "[Filtered]"
@pytest.mark.asyncio
def test_response_status_code_ok_in_transaction_context(sentry_init, capture_envelopes):
"""
Tests that the response status code is added to the transaction "response" context.
"""
sentry_init(
integrations=[StarletteIntegration(), FastApiIntegration()],
traces_sample_rate=1.0,
release="demo-release",
)
envelopes = capture_envelopes()
app = fastapi_app_factory()
client = TestClient(app)
client.get("/message")
(_, transaction_envelope) = envelopes
transaction = transaction_envelope.get_transaction_event()
assert transaction["type"] == "transaction"
assert len(transaction["contexts"]) > 0
assert (
"response" in transaction["contexts"].keys()
), "Response context not found in transaction"
assert transaction["contexts"]["response"]["status_code"] == 200
@pytest.mark.asyncio
def test_response_status_code_error_in_transaction_context(
sentry_init,
capture_envelopes,
):
"""
Tests that the response status code is added to the transaction "response" context.
"""
sentry_init(
integrations=[StarletteIntegration(), FastApiIntegration()],
traces_sample_rate=1.0,
release="demo-release",
)
envelopes = capture_envelopes()
app = fastapi_app_factory()
client = TestClient(app)
with pytest.raises(ZeroDivisionError):
client.get("/error")
(
_,
_,
transaction_envelope,
) = envelopes
transaction = transaction_envelope.get_transaction_event()
assert transaction["type"] == "transaction"
assert len(transaction["contexts"]) > 0
assert (
"response" in transaction["contexts"].keys()
), "Response context not found in transaction"
assert transaction["contexts"]["response"]["status_code"] == 500
@pytest.mark.asyncio
def test_response_status_code_not_found_in_transaction_context(
sentry_init,
capture_envelopes,
):
"""
Tests that the response status code is added to the transaction "response" context.
"""
sentry_init(
integrations=[StarletteIntegration(), FastApiIntegration()],
traces_sample_rate=1.0,
release="demo-release",
)
envelopes = capture_envelopes()
app = fastapi_app_factory()
client = TestClient(app)
client.get("/non-existing-route-123")
(transaction_envelope,) = envelopes
transaction = transaction_envelope.get_transaction_event()
assert transaction["type"] == "transaction"
assert len(transaction["contexts"]) > 0
assert (
"response" in transaction["contexts"].keys()
), "Response context not found in transaction"
assert transaction["contexts"]["response"]["status_code"] == 404
@pytest.mark.parametrize(
"request_url,transaction_style,expected_transaction_name,expected_transaction_source",
[
(
"/message/123456",
"endpoint",
"tests.integrations.fastapi.test_fastapi.fastapi_app_factory.._message_with_id",
"component",
),
(
"/message/123456",
"url",
"/message/{message_id}",
"route",
),
],
)
def test_transaction_name(
sentry_init,
request_url,
transaction_style,
expected_transaction_name,
expected_transaction_source,
capture_envelopes,
):
"""
Tests that the transaction name is something meaningful.
"""
sentry_init(
auto_enabling_integrations=False, # Make sure that httpx integration is not added, because it adds tracing information to the starlette test clients request.
integrations=[
StarletteIntegration(transaction_style=transaction_style),
FastApiIntegration(transaction_style=transaction_style),
],
traces_sample_rate=1.0,
debug=True,
)
envelopes = capture_envelopes()
app = fastapi_app_factory()
client = TestClient(app)
client.get(request_url)
(_, transaction_envelope) = envelopes
transaction_event = transaction_envelope.get_transaction_event()
assert transaction_event["transaction"] == expected_transaction_name
assert (
transaction_event["transaction_info"]["source"] == expected_transaction_source
)
def test_route_endpoint_equal_dependant_call(sentry_init):
"""
Tests that the route endpoint name is equal to the wrapped dependant call name.
"""
sentry_init(
auto_enabling_integrations=False, # Make sure that httpx integration is not added, because it adds tracing information to the starlette test clients request.
integrations=[
StarletteIntegration(),
FastApiIntegration(),
],
traces_sample_rate=1.0,
debug=True,
)
app = fastapi_app_factory()
for route in app.router.routes:
if not hasattr(route, "dependant"):
continue
assert route.endpoint.__qualname__ == route.dependant.call.__qualname__
@pytest.mark.parametrize(
"request_url,transaction_style,expected_transaction_name,expected_transaction_source",
[
(
"/message/123456",
"endpoint",
"http://testserver/message/123456",
"url",
),
(
"/message/123456",
"url",
"http://testserver/message/123456",
"url",
),
],
)
def test_transaction_name_in_traces_sampler(
sentry_init,
request_url,
transaction_style,
expected_transaction_name,
expected_transaction_source,
):
"""
Tests that a custom traces_sampler retrieves a meaningful transaction name.
In this case the URL or endpoint, because we do not have the route yet.
"""
def dummy_traces_sampler(sampling_context):
assert (
sampling_context["transaction_context"]["name"] == expected_transaction_name
)
assert (
sampling_context["transaction_context"]["source"]
== expected_transaction_source
)
sentry_init(
auto_enabling_integrations=False, # Make sure that httpx integration is not added, because it adds tracing information to the starlette test clients request.
integrations=[StarletteIntegration(transaction_style=transaction_style)],
traces_sampler=dummy_traces_sampler,
traces_sample_rate=1.0,
debug=True,
)
app = fastapi_app_factory()
client = TestClient(app)
client.get(request_url)
@pytest.mark.parametrize(
"request_url,transaction_style,expected_transaction_name,expected_transaction_source",
[
(
"/message/123456",
"endpoint",
"starlette.middleware.trustedhost.TrustedHostMiddleware",
"component",
),
(
"/message/123456",
"url",
"http://testserver/message/123456",
"url",
),
],
)
def test_transaction_name_in_middleware(
sentry_init,
request_url,
transaction_style,
expected_transaction_name,
expected_transaction_source,
capture_envelopes,
):
"""
Tests that the transaction name is something meaningful.
"""
sentry_init(
auto_enabling_integrations=False, # Make sure that httpx integration is not added, because it adds tracing information to the starlette test clients request.
integrations=[
StarletteIntegration(transaction_style=transaction_style),
FastApiIntegration(transaction_style=transaction_style),
],
traces_sample_rate=1.0,
debug=True,
)
envelopes = capture_envelopes()
app = fastapi_app_factory()
app.add_middleware(
TrustedHostMiddleware,
allowed_hosts=[
"example.com",
],
)
client = TestClient(app)
client.get(request_url)
(transaction_envelope,) = envelopes
transaction_event = transaction_envelope.get_transaction_event()
assert transaction_event["contexts"]["response"]["status_code"] == 400
assert transaction_event["transaction"] == expected_transaction_name
assert (
transaction_event["transaction_info"]["source"] == expected_transaction_source
)
sentry-python-1.39.2/tests/integrations/flask/ 0000775 0000000 0000000 00000000000 14547447232 0021435 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/flask/__init__.py 0000664 0000000 0000000 00000000054 14547447232 0023545 0 ustar 00root root 0000000 0000000 import pytest
pytest.importorskip("flask")
sentry-python-1.39.2/tests/integrations/flask/test_flask.py 0000664 0000000 0000000 00000065412 14547447232 0024156 0 ustar 00root root 0000000 0000000 import json
import re
import logging
from io import BytesIO
import pytest
from flask import (
Flask,
Response,
request,
abort,
stream_with_context,
render_template_string,
)
from flask.views import View
from flask_login import LoginManager, login_user
try:
from werkzeug.wrappers.request import UnsupportedMediaType
except ImportError:
UnsupportedMediaType = None
import sentry_sdk.integrations.flask as flask_sentry
from sentry_sdk import (
set_tag,
configure_scope,
capture_message,
capture_exception,
last_event_id,
Hub,
)
from sentry_sdk.integrations.logging import LoggingIntegration
from sentry_sdk.serializer import MAX_DATABAG_BREADTH
login_manager = LoginManager()
@pytest.fixture
def app():
app = Flask(__name__)
app.config["TESTING"] = True
app.secret_key = "haha"
login_manager.init_app(app)
@app.route("/message")
def hi():
capture_message("hi")
return "ok"
@app.route("/message/")
def hi_with_id(message_id):
capture_message("hi again")
return "ok"
return app
@pytest.fixture(params=("auto", "manual"))
def integration_enabled_params(request):
if request.param == "auto":
return {"auto_enabling_integrations": True}
elif request.param == "manual":
return {"integrations": [flask_sentry.FlaskIntegration()]}
else:
raise ValueError(request.param)
def test_has_context(sentry_init, app, capture_events):
sentry_init(integrations=[flask_sentry.FlaskIntegration()])
events = capture_events()
client = app.test_client()
response = client.get("/message")
assert response.status_code == 200
(event,) = events
assert event["transaction"] == "hi"
assert "data" not in event["request"]
assert event["request"]["url"] == "http://localhost/message"
@pytest.mark.parametrize(
"url,transaction_style,expected_transaction,expected_source",
[
("/message", "endpoint", "hi", "component"),
("/message", "url", "/message", "route"),
("/message/123456", "endpoint", "hi_with_id", "component"),
("/message/123456", "url", "/message/", "route"),
],
)
def test_transaction_style(
sentry_init,
app,
capture_events,
url,
transaction_style,
expected_transaction,
expected_source,
):
sentry_init(
integrations=[
flask_sentry.FlaskIntegration(transaction_style=transaction_style)
]
)
events = capture_events()
client = app.test_client()
response = client.get(url)
assert response.status_code == 200
(event,) = events
assert event["transaction"] == expected_transaction
assert event["transaction_info"] == {"source": expected_source}
@pytest.mark.parametrize("debug", (True, False))
@pytest.mark.parametrize("testing", (True, False))
def test_errors(
sentry_init,
capture_exceptions,
capture_events,
app,
debug,
testing,
integration_enabled_params,
):
sentry_init(debug=True, **integration_enabled_params)
app.debug = debug
app.testing = testing
@app.route("/")
def index():
1 / 0
exceptions = capture_exceptions()
events = capture_events()
client = app.test_client()
try:
client.get("/")
except ZeroDivisionError:
pass
(exc,) = exceptions
assert isinstance(exc, ZeroDivisionError)
(event,) = events
assert event["exception"]["values"][0]["mechanism"]["type"] == "flask"
def test_flask_login_not_installed(
sentry_init, app, capture_events, monkeypatch, integration_enabled_params
):
sentry_init(**integration_enabled_params)
monkeypatch.setattr(flask_sentry, "flask_login", None)
events = capture_events()
client = app.test_client()
client.get("/message")
(event,) = events
assert event.get("user", {}).get("id") is None
def test_flask_login_not_configured(
sentry_init, app, capture_events, monkeypatch, integration_enabled_params
):
sentry_init(**integration_enabled_params)
assert flask_sentry.flask_login
events = capture_events()
client = app.test_client()
client.get("/message")
(event,) = events
assert event.get("user", {}).get("id") is None
def test_flask_login_partially_configured(
sentry_init, app, capture_events, monkeypatch, integration_enabled_params
):
sentry_init(**integration_enabled_params)
events = capture_events()
login_manager = LoginManager()
login_manager.init_app(app)
client = app.test_client()
client.get("/message")
(event,) = events
assert event.get("user", {}).get("id") is None
@pytest.mark.parametrize("send_default_pii", [True, False])
@pytest.mark.parametrize("user_id", [None, "42", 3])
def test_flask_login_configured(
send_default_pii,
sentry_init,
app,
user_id,
capture_events,
monkeypatch,
integration_enabled_params,
):
sentry_init(send_default_pii=send_default_pii, **integration_enabled_params)
class User(object):
is_authenticated = is_active = True
is_anonymous = user_id is not None
def get_id(self):
return str(user_id)
@login_manager.user_loader
def load_user(user_id):
if user_id is not None:
return User()
@app.route("/login")
def login():
if user_id is not None:
login_user(User())
return "ok"
events = capture_events()
client = app.test_client()
assert client.get("/login").status_code == 200
assert not events
assert client.get("/message").status_code == 200
(event,) = events
if user_id is None or not send_default_pii:
assert event.get("user", {}).get("id") is None
else:
assert event["user"]["id"] == str(user_id)
def test_flask_large_json_request(sentry_init, capture_events, app):
sentry_init(integrations=[flask_sentry.FlaskIntegration()])
data = {"foo": {"bar": "a" * 2000}}
@app.route("/", methods=["POST"])
def index():
assert request.get_json() == data
assert request.get_data() == json.dumps(data).encode("ascii")
assert not request.form
capture_message("hi")
return "ok"
events = capture_events()
client = app.test_client()
response = client.post("/", content_type="application/json", data=json.dumps(data))
assert response.status_code == 200
(event,) = events
assert event["_meta"]["request"]["data"]["foo"]["bar"] == {
"": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
}
assert len(event["request"]["data"]["foo"]["bar"]) == 1024
def test_flask_session_tracking(sentry_init, capture_envelopes, app):
sentry_init(
integrations=[flask_sentry.FlaskIntegration()],
release="demo-release",
)
@app.route("/")
def index():
with configure_scope() as scope:
scope.set_user({"ip_address": "1.2.3.4", "id": "42"})
try:
raise ValueError("stuff")
except Exception:
logging.exception("stuff happened")
1 / 0
envelopes = capture_envelopes()
with app.test_client() as client:
try:
client.get("/", headers={"User-Agent": "blafasel/1.0"})
except ZeroDivisionError:
pass
Hub.current.client.flush()
(first_event, error_event, session) = envelopes
first_event = first_event.get_event()
error_event = error_event.get_event()
session = session.items[0].payload.json
aggregates = session["aggregates"]
assert first_event["exception"]["values"][0]["type"] == "ValueError"
assert error_event["exception"]["values"][0]["type"] == "ZeroDivisionError"
assert len(aggregates) == 1
assert aggregates[0]["crashed"] == 1
assert aggregates[0]["started"]
assert session["attrs"]["release"] == "demo-release"
@pytest.mark.parametrize("data", [{}, []], ids=["empty-dict", "empty-list"])
def test_flask_empty_json_request(sentry_init, capture_events, app, data):
sentry_init(integrations=[flask_sentry.FlaskIntegration()])
@app.route("/", methods=["POST"])
def index():
assert request.get_json() == data
assert request.get_data() == json.dumps(data).encode("ascii")
assert not request.form
capture_message("hi")
return "ok"
events = capture_events()
client = app.test_client()
response = client.post("/", content_type="application/json", data=json.dumps(data))
assert response.status_code == 200
(event,) = events
assert event["request"]["data"] == data
def test_flask_medium_formdata_request(sentry_init, capture_events, app):
sentry_init(integrations=[flask_sentry.FlaskIntegration()])
data = {"foo": "a" * 2000}
@app.route("/", methods=["POST"])
def index():
assert request.form["foo"] == data["foo"]
assert not request.get_data()
try:
assert not request.get_json()
except UnsupportedMediaType:
# flask/werkzeug 3
pass
capture_message("hi")
return "ok"
events = capture_events()
client = app.test_client()
response = client.post("/", data=data)
assert response.status_code == 200
(event,) = events
assert event["_meta"]["request"]["data"]["foo"] == {
"": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
}
assert len(event["request"]["data"]["foo"]) == 1024
def test_flask_formdata_request_appear_transaction_body(
sentry_init, capture_events, app
):
"""
Test that ensures that transaction request data contains body, even if no exception was raised
"""
sentry_init(integrations=[flask_sentry.FlaskIntegration()], traces_sample_rate=1.0)
data = {"username": "sentry-user", "age": "26"}
@app.route("/", methods=["POST"])
def index():
assert request.form["username"] == data["username"]
assert request.form["age"] == data["age"]
assert not request.get_data()
try:
assert not request.get_json()
except UnsupportedMediaType:
# flask/werkzeug 3
pass
set_tag("view", "yes")
capture_message("hi")
return "ok"
events = capture_events()
client = app.test_client()
response = client.post("/", data=data)
assert response.status_code == 200
event, transaction_event = events
assert "request" in transaction_event
assert "data" in transaction_event["request"]
assert transaction_event["request"]["data"] == data
@pytest.mark.parametrize("input_char", ["a", b"a"])
def test_flask_too_large_raw_request(sentry_init, input_char, capture_events, app):
sentry_init(
integrations=[flask_sentry.FlaskIntegration()], max_request_body_size="small"
)
data = input_char * 2000
@app.route("/", methods=["POST"])
def index():
assert not request.form
if isinstance(data, bytes):
assert request.get_data() == data
else:
assert request.get_data() == data.encode("ascii")
try:
assert not request.get_json()
except UnsupportedMediaType:
# flask/werkzeug 3
pass
capture_message("hi")
return "ok"
events = capture_events()
client = app.test_client()
response = client.post("/", data=data)
assert response.status_code == 200
(event,) = events
assert event["_meta"]["request"]["data"] == {"": {"rem": [["!config", "x"]]}}
assert not event["request"]["data"]
def test_flask_files_and_form(sentry_init, capture_events, app):
sentry_init(
integrations=[flask_sentry.FlaskIntegration()], max_request_body_size="always"
)
data = {"foo": "a" * 2000, "file": (BytesIO(b"hello"), "hello.txt")}
@app.route("/", methods=["POST"])
def index():
assert list(request.form) == ["foo"]
assert list(request.files) == ["file"]
try:
assert not request.get_json()
except UnsupportedMediaType:
# flask/werkzeug 3
pass
capture_message("hi")
return "ok"
events = capture_events()
client = app.test_client()
response = client.post("/", data=data)
assert response.status_code == 200
(event,) = events
assert event["_meta"]["request"]["data"]["foo"] == {
"": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
}
assert len(event["request"]["data"]["foo"]) == 1024
assert event["_meta"]["request"]["data"]["file"] == {"": {"rem": [["!raw", "x"]]}}
assert not event["request"]["data"]["file"]
def test_json_not_truncated_if_max_request_body_size_is_always(
sentry_init, capture_events, app
):
sentry_init(
integrations=[flask_sentry.FlaskIntegration()], max_request_body_size="always"
)
data = {
"key{}".format(i): "value{}".format(i) for i in range(MAX_DATABAG_BREADTH + 10)
}
@app.route("/", methods=["POST"])
def index():
assert request.get_json() == data
assert request.get_data() == json.dumps(data).encode("ascii")
capture_message("hi")
return "ok"
events = capture_events()
client = app.test_client()
response = client.post("/", content_type="application/json", data=json.dumps(data))
assert response.status_code == 200
(event,) = events
assert event["request"]["data"] == data
@pytest.mark.parametrize(
"integrations",
[
[flask_sentry.FlaskIntegration()],
[flask_sentry.FlaskIntegration(), LoggingIntegration(event_level="ERROR")],
],
)
def test_errors_not_reported_twice(sentry_init, integrations, capture_events, app):
sentry_init(integrations=integrations)
@app.route("/")
def index():
try:
1 / 0
except Exception as e:
app.logger.exception(e)
raise e
events = capture_events()
client = app.test_client()
with pytest.raises(ZeroDivisionError):
client.get("/")
assert len(events) == 1
def test_logging(sentry_init, capture_events, app):
# ensure that Flask's logger magic doesn't break ours
sentry_init(
integrations=[
flask_sentry.FlaskIntegration(),
LoggingIntegration(event_level="ERROR"),
]
)
@app.route("/")
def index():
app.logger.error("hi")
return "ok"
events = capture_events()
client = app.test_client()
client.get("/")
(event,) = events
assert event["level"] == "error"
def test_no_errors_without_request(app, sentry_init):
sentry_init(integrations=[flask_sentry.FlaskIntegration()])
with app.app_context():
capture_exception(ValueError())
def test_cli_commands_raise(app):
if not hasattr(app, "cli"):
pytest.skip("Too old flask version")
from flask.cli import ScriptInfo
@app.cli.command()
def foo():
1 / 0
def create_app(*_):
return app
with pytest.raises(ZeroDivisionError):
app.cli.main(
args=["foo"], prog_name="myapp", obj=ScriptInfo(create_app=create_app)
)
def test_wsgi_level_error_is_caught(
app, capture_exceptions, capture_events, sentry_init
):
sentry_init(integrations=[flask_sentry.FlaskIntegration()])
def wsgi_app(environ, start_response):
1 / 0
app.wsgi_app = wsgi_app
client = app.test_client()
exceptions = capture_exceptions()
events = capture_events()
with pytest.raises(ZeroDivisionError) as exc:
client.get("/")
(error,) = exceptions
assert error is exc.value
(event,) = events
assert event["exception"]["values"][0]["mechanism"]["type"] == "wsgi"
def test_500(sentry_init, capture_events, app):
sentry_init(integrations=[flask_sentry.FlaskIntegration()])
app.debug = False
app.testing = False
@app.route("/")
def index():
1 / 0
@app.errorhandler(500)
def error_handler(err):
return "Sentry error: %s" % last_event_id()
events = capture_events()
client = app.test_client()
response = client.get("/")
(event,) = events
assert response.data.decode("utf-8") == "Sentry error: %s" % event["event_id"]
def test_error_in_errorhandler(sentry_init, capture_events, app):
sentry_init(integrations=[flask_sentry.FlaskIntegration()])
app.debug = False
app.testing = False
@app.route("/")
def index():
raise ValueError()
@app.errorhandler(500)
def error_handler(err):
1 / 0
events = capture_events()
client = app.test_client()
with pytest.raises(ZeroDivisionError):
client.get("/")
event1, event2 = events
(exception,) = event1["exception"]["values"]
assert exception["type"] == "ValueError"
exception = event2["exception"]["values"][-1]
assert exception["type"] == "ZeroDivisionError"
def test_bad_request_not_captured(sentry_init, capture_events, app):
sentry_init(integrations=[flask_sentry.FlaskIntegration()])
events = capture_events()
@app.route("/")
def index():
abort(400)
client = app.test_client()
client.get("/")
assert not events
def test_does_not_leak_scope(sentry_init, capture_events, app):
sentry_init(integrations=[flask_sentry.FlaskIntegration()])
events = capture_events()
with configure_scope() as scope:
scope.set_tag("request_data", False)
@app.route("/")
def index():
with configure_scope() as scope:
scope.set_tag("request_data", True)
def generate():
for row in range(1000):
with configure_scope() as scope:
assert scope._tags["request_data"]
yield str(row) + "\n"
return Response(stream_with_context(generate()), mimetype="text/csv")
client = app.test_client()
response = client.get("/")
assert response.data.decode() == "".join(str(row) + "\n" for row in range(1000))
assert not events
with configure_scope() as scope:
assert not scope._tags["request_data"]
def test_scoped_test_client(sentry_init, app):
sentry_init(integrations=[flask_sentry.FlaskIntegration()])
@app.route("/")
def index():
return "ok"
with app.test_client() as client:
response = client.get("/")
assert response.status_code == 200
@pytest.mark.parametrize("exc_cls", [ZeroDivisionError, Exception])
def test_errorhandler_for_exception_swallows_exception(
sentry_init, app, capture_events, exc_cls
):
# In contrast to error handlers for a status code, error
# handlers for exceptions can swallow the exception (this is
# just how the Flask signal works)
sentry_init(integrations=[flask_sentry.FlaskIntegration()])
events = capture_events()
@app.route("/")
def index():
1 / 0
@app.errorhandler(exc_cls)
def zerodivision(e):
return "ok"
with app.test_client() as client:
response = client.get("/")
assert response.status_code == 200
assert not events
def test_tracing_success(sentry_init, capture_events, app):
sentry_init(traces_sample_rate=1.0, integrations=[flask_sentry.FlaskIntegration()])
@app.before_request
def _():
set_tag("before_request", "yes")
@app.route("/message_tx")
def hi_tx():
set_tag("view", "yes")
capture_message("hi")
return "ok"
events = capture_events()
with app.test_client() as client:
response = client.get("/message_tx")
assert response.status_code == 200
message_event, transaction_event = events
assert transaction_event["type"] == "transaction"
assert transaction_event["transaction"] == "hi_tx"
assert transaction_event["contexts"]["trace"]["status"] == "ok"
assert transaction_event["tags"]["view"] == "yes"
assert transaction_event["tags"]["before_request"] == "yes"
assert message_event["message"] == "hi"
assert message_event["transaction"] == "hi_tx"
assert message_event["tags"]["view"] == "yes"
assert message_event["tags"]["before_request"] == "yes"
def test_tracing_error(sentry_init, capture_events, app):
sentry_init(traces_sample_rate=1.0, integrations=[flask_sentry.FlaskIntegration()])
events = capture_events()
@app.route("/error")
def error():
1 / 0
with pytest.raises(ZeroDivisionError):
with app.test_client() as client:
response = client.get("/error")
assert response.status_code == 500
error_event, transaction_event = events
assert transaction_event["type"] == "transaction"
assert transaction_event["transaction"] == "error"
assert transaction_event["contexts"]["trace"]["status"] == "internal_error"
assert error_event["transaction"] == "error"
(exception,) = error_event["exception"]["values"]
assert exception["type"] == "ZeroDivisionError"
def test_error_has_trace_context_if_tracing_disabled(sentry_init, capture_events, app):
sentry_init(integrations=[flask_sentry.FlaskIntegration()])
events = capture_events()
@app.route("/error")
def error():
1 / 0
with pytest.raises(ZeroDivisionError):
with app.test_client() as client:
response = client.get("/error")
assert response.status_code == 500
(error_event,) = events
assert error_event["contexts"]["trace"]
def test_class_based_views(sentry_init, app, capture_events):
sentry_init(integrations=[flask_sentry.FlaskIntegration()])
events = capture_events()
@app.route("/")
class HelloClass(View):
def dispatch_request(self):
capture_message("hi")
return "ok"
app.add_url_rule("/hello-class/", view_func=HelloClass.as_view("hello_class"))
with app.test_client() as client:
response = client.get("/hello-class/")
assert response.status_code == 200
(event,) = events
assert event["message"] == "hi"
assert event["transaction"] == "hello_class"
@pytest.mark.parametrize(
"template_string", ["{{ sentry_trace }}", "{{ sentry_trace_meta }}"]
)
def test_template_tracing_meta(sentry_init, app, capture_events, template_string):
sentry_init(integrations=[flask_sentry.FlaskIntegration()])
events = capture_events()
@app.route("/")
def index():
hub = Hub.current
capture_message(hub.get_traceparent() + "\n" + hub.get_baggage())
return render_template_string(template_string)
with app.test_client() as client:
response = client.get("/")
assert response.status_code == 200
rendered_meta = response.data.decode("utf-8")
traceparent, baggage = events[0]["message"].split("\n")
assert traceparent != ""
assert baggage != ""
match = re.match(
r'^',
rendered_meta,
)
assert match is not None
assert match.group(1) == traceparent
# Python 2 does not preserve sort order
rendered_baggage = match.group(2)
assert sorted(rendered_baggage.split(",")) == sorted(baggage.split(","))
def test_dont_override_sentry_trace_context(sentry_init, app):
sentry_init(integrations=[flask_sentry.FlaskIntegration()])
@app.route("/")
def index():
return render_template_string("{{ sentry_trace }}", sentry_trace="hi")
with app.test_client() as client:
response = client.get("/")
assert response.status_code == 200
assert response.data == b"hi"
def test_request_not_modified_by_reference(sentry_init, capture_events, app):
sentry_init(integrations=[flask_sentry.FlaskIntegration()])
@app.route("/", methods=["POST"])
def index():
logging.critical("oops")
assert request.get_json() == {"password": "ohno"}
assert request.headers["Authorization"] == "Bearer ohno"
return "ok"
events = capture_events()
client = app.test_client()
client.post(
"/", json={"password": "ohno"}, headers={"Authorization": "Bearer ohno"}
)
(event,) = events
assert event["request"]["data"]["password"] == "[Filtered]"
assert event["request"]["headers"]["Authorization"] == "[Filtered]"
@pytest.mark.parametrize("traces_sample_rate", [None, 1.0])
def test_replay_event_context(sentry_init, capture_events, app, traces_sample_rate):
"""
Tests that the replay context is added to the event context.
This is not strictly a Flask integration test, but it's the easiest way to test this.
"""
sentry_init(traces_sample_rate=traces_sample_rate)
@app.route("/error")
def error():
return 1 / 0
events = capture_events()
client = app.test_client()
headers = {
"baggage": "other-vendor-value-1=foo;bar;baz,sentry-trace_id=771a43a4192642f0b136d5159a501700,sentry-public_key=49d0f7386ad645858ae85020e393bef3, sentry-sample_rate=0.01337,sentry-user_id=Am%C3%A9lie,other-vendor-value-2=foo;bar,sentry-replay_id=12312012123120121231201212312012",
"sentry-trace": "771a43a4192642f0b136d5159a501700-1234567890abcdef-1",
}
with pytest.raises(ZeroDivisionError):
client.get("/error", headers=headers)
event = events[0]
assert event["contexts"]
assert event["contexts"]["replay"]
assert (
event["contexts"]["replay"]["replay_id"] == "12312012123120121231201212312012"
)
def test_response_status_code_ok_in_transaction_context(
sentry_init, capture_envelopes, app
):
"""
Tests that the response status code is added to the transaction context.
This also works for when there is an Exception during the request, but somehow the test flask app doesn't seem to trigger that.
"""
sentry_init(
integrations=[flask_sentry.FlaskIntegration()],
traces_sample_rate=1.0,
release="demo-release",
)
envelopes = capture_envelopes()
client = app.test_client()
client.get("/message")
Hub.current.client.flush()
(_, transaction_envelope, _) = envelopes
transaction = transaction_envelope.get_transaction_event()
assert transaction["type"] == "transaction"
assert len(transaction["contexts"]) > 0
assert (
"response" in transaction["contexts"].keys()
), "Response context not found in transaction"
assert transaction["contexts"]["response"]["status_code"] == 200
def test_response_status_code_not_found_in_transaction_context(
sentry_init, capture_envelopes, app
):
sentry_init(
integrations=[flask_sentry.FlaskIntegration()],
traces_sample_rate=1.0,
release="demo-release",
)
envelopes = capture_envelopes()
client = app.test_client()
client.get("/not-existing-route")
Hub.current.client.flush()
(transaction_envelope, _) = envelopes
transaction = transaction_envelope.get_transaction_event()
assert transaction["type"] == "transaction"
assert len(transaction["contexts"]) > 0
assert (
"response" in transaction["contexts"].keys()
), "Response context not found in transaction"
assert transaction["contexts"]["response"]["status_code"] == 404
sentry-python-1.39.2/tests/integrations/gcp/ 0000775 0000000 0000000 00000000000 14547447232 0021106 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/gcp/test_gcp.py 0000664 0000000 0000000 00000042150 14547447232 0023272 0 ustar 00root root 0000000 0000000 """
# GCP Cloud Functions unit tests
"""
import json
from textwrap import dedent
import tempfile
import sys
import subprocess
import pytest
import os.path
import os
pytestmark = pytest.mark.skipif(
not hasattr(tempfile, "TemporaryDirectory"), reason="need Python 3.2+"
)
FUNCTIONS_PRELUDE = """
from unittest.mock import Mock
import __main__ as gcp_functions
import os
# Initializing all the necessary environment variables
os.environ["FUNCTION_TIMEOUT_SEC"] = "3"
os.environ["FUNCTION_NAME"] = "Google Cloud function"
os.environ["ENTRY_POINT"] = "cloud_function"
os.environ["FUNCTION_IDENTITY"] = "func_ID"
os.environ["FUNCTION_REGION"] = "us-central1"
os.environ["GCP_PROJECT"] = "serverless_project"
def log_return_value(func):
def inner(*args, **kwargs):
rv = func(*args, **kwargs)
print("\\nRETURN VALUE: {}\\n".format(json.dumps(rv)))
return rv
return inner
gcp_functions.worker_v1 = Mock()
gcp_functions.worker_v1.FunctionHandler = Mock()
gcp_functions.worker_v1.FunctionHandler.invoke_user_function = log_return_value(cloud_function)
import sentry_sdk
from sentry_sdk.integrations.gcp import GcpIntegration
import json
import time
from sentry_sdk.transport import HttpTransport
def event_processor(event):
# Adding delay which would allow us to capture events.
time.sleep(1)
return event
def envelope_processor(envelope):
(item,) = envelope.items
return item.get_bytes()
class TestTransport(HttpTransport):
def _send_event(self, event):
event = event_processor(event)
# Writing a single string to stdout holds the GIL (seems like) and
# therefore cannot be interleaved with other threads. This is why we
# explicitly add a newline at the end even though `print` would provide
# us one.
print("\\nEVENT: {}\\n".format(json.dumps(event)))
def _send_envelope(self, envelope):
envelope = envelope_processor(envelope)
print("\\nENVELOPE: {}\\n".format(envelope.decode(\"utf-8\")))
def init_sdk(timeout_warning=False, **extra_init_args):
sentry_sdk.init(
dsn="https://123abc@example.com/123",
transport=TestTransport,
integrations=[GcpIntegration(timeout_warning=timeout_warning)],
shutdown_timeout=10,
# excepthook -> dedupe -> event_processor client report gets added
# which we don't really care about for these tests
send_client_reports=False,
**extra_init_args
)
"""
@pytest.fixture
def run_cloud_function():
def inner(code, subprocess_kwargs=()):
events = []
envelopes = []
return_value = None
# STEP : Create a zip of cloud function
subprocess_kwargs = dict(subprocess_kwargs)
with tempfile.TemporaryDirectory() as tmpdir:
main_py = os.path.join(tmpdir, "main.py")
with open(main_py, "w") as f:
f.write(code)
setup_cfg = os.path.join(tmpdir, "setup.cfg")
with open(setup_cfg, "w") as f:
f.write("[install]\nprefix=")
subprocess.check_call(
[sys.executable, "setup.py", "sdist", "-d", os.path.join(tmpdir, "..")],
**subprocess_kwargs
)
subprocess.check_call(
"pip install ../*.tar.gz -t .",
cwd=tmpdir,
shell=True,
**subprocess_kwargs
)
stream = os.popen("python {}/main.py".format(tmpdir))
stream_data = stream.read()
stream.close()
for line in stream_data.splitlines():
print("GCP:", line)
if line.startswith("EVENT: "):
line = line[len("EVENT: ") :]
events.append(json.loads(line))
elif line.startswith("ENVELOPE: "):
line = line[len("ENVELOPE: ") :]
envelopes.append(json.loads(line))
elif line.startswith("RETURN VALUE: "):
line = line[len("RETURN VALUE: ") :]
return_value = json.loads(line)
else:
continue
stream.close()
return envelopes, events, return_value
return inner
def test_handled_exception(run_cloud_function):
_, events, return_value = run_cloud_function(
dedent(
"""
functionhandler = None
event = {}
def cloud_function(functionhandler, event):
raise Exception("something went wrong")
"""
)
+ FUNCTIONS_PRELUDE
+ dedent(
"""
init_sdk(timeout_warning=False)
gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event)
"""
)
)
assert events[0]["level"] == "error"
(exception,) = events[0]["exception"]["values"]
assert exception["type"] == "Exception"
assert exception["value"] == "something went wrong"
assert exception["mechanism"]["type"] == "gcp"
assert not exception["mechanism"]["handled"]
def test_unhandled_exception(run_cloud_function):
_, events, _ = run_cloud_function(
dedent(
"""
functionhandler = None
event = {}
def cloud_function(functionhandler, event):
x = 3/0
return "3"
"""
)
+ FUNCTIONS_PRELUDE
+ dedent(
"""
init_sdk(timeout_warning=False)
gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event)
"""
)
)
assert events[0]["level"] == "error"
(exception,) = events[0]["exception"]["values"]
assert exception["type"] == "ZeroDivisionError"
assert exception["value"] == "division by zero"
assert exception["mechanism"]["type"] == "gcp"
assert not exception["mechanism"]["handled"]
def test_timeout_error(run_cloud_function):
_, events, _ = run_cloud_function(
dedent(
"""
functionhandler = None
event = {}
def cloud_function(functionhandler, event):
time.sleep(10)
return "3"
"""
)
+ FUNCTIONS_PRELUDE
+ dedent(
"""
init_sdk(timeout_warning=True)
gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event)
"""
)
)
assert events[0]["level"] == "error"
(exception,) = events[0]["exception"]["values"]
assert exception["type"] == "ServerlessTimeoutWarning"
assert (
exception["value"]
== "WARNING : Function is expected to get timed out. Configured timeout duration = 3 seconds."
)
assert exception["mechanism"]["type"] == "threading"
assert not exception["mechanism"]["handled"]
def test_performance_no_error(run_cloud_function):
envelopes, _, _ = run_cloud_function(
dedent(
"""
functionhandler = None
event = {}
def cloud_function(functionhandler, event):
return "test_string"
"""
)
+ FUNCTIONS_PRELUDE
+ dedent(
"""
init_sdk(traces_sample_rate=1.0)
gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event)
"""
)
)
assert envelopes[0]["type"] == "transaction"
assert envelopes[0]["contexts"]["trace"]["op"] == "function.gcp"
assert envelopes[0]["transaction"].startswith("Google Cloud function")
assert envelopes[0]["transaction_info"] == {"source": "component"}
assert envelopes[0]["transaction"] in envelopes[0]["request"]["url"]
def test_performance_error(run_cloud_function):
envelopes, events, _ = run_cloud_function(
dedent(
"""
functionhandler = None
event = {}
def cloud_function(functionhandler, event):
raise Exception("something went wrong")
"""
)
+ FUNCTIONS_PRELUDE
+ dedent(
"""
init_sdk(traces_sample_rate=1.0)
gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event)
"""
)
)
assert envelopes[0]["level"] == "error"
(exception,) = envelopes[0]["exception"]["values"]
assert exception["type"] == "Exception"
assert exception["value"] == "something went wrong"
assert exception["mechanism"]["type"] == "gcp"
assert not exception["mechanism"]["handled"]
assert envelopes[1]["type"] == "transaction"
assert envelopes[1]["contexts"]["trace"]["op"] == "function.gcp"
assert envelopes[1]["transaction"].startswith("Google Cloud function")
assert envelopes[1]["transaction"] in envelopes[0]["request"]["url"]
def test_traces_sampler_gets_correct_values_in_sampling_context(
run_cloud_function, DictionaryContaining # noqa:N803
):
# TODO: There are some decent sized hacks below. For more context, see the
# long comment in the test of the same name in the AWS integration. The
# situations there and here aren't identical, but they're similar enough
# that solving one would probably solve both.
import inspect
envelopes, events, return_value = run_cloud_function(
dedent(
"""
functionhandler = None
event = {
"type": "chase",
"chasers": ["Maisey", "Charlie"],
"num_squirrels": 2,
}
def cloud_function(functionhandler, event):
# this runs after the transaction has started, which means we
# can make assertions about traces_sampler
try:
traces_sampler.assert_any_call(
DictionaryContaining({
"gcp_env": DictionaryContaining({
"function_name": "chase_into_tree",
"function_region": "dogpark",
"function_project": "SquirrelChasing",
}),
"gcp_event": {
"type": "chase",
"chasers": ["Maisey", "Charlie"],
"num_squirrels": 2,
},
})
)
except AssertionError:
# catch the error and return it because the error itself will
# get swallowed by the SDK as an "internal exception"
return {"AssertionError raised": True,}
return {"AssertionError raised": False,}
"""
)
+ FUNCTIONS_PRELUDE
+ dedent(inspect.getsource(DictionaryContaining))
+ dedent(
"""
os.environ["FUNCTION_NAME"] = "chase_into_tree"
os.environ["FUNCTION_REGION"] = "dogpark"
os.environ["GCP_PROJECT"] = "SquirrelChasing"
def _safe_is_equal(x, y):
# copied from conftest.py - see docstring and comments there
try:
is_equal = x.__eq__(y)
except AttributeError:
is_equal = NotImplemented
if is_equal == NotImplemented:
return x == y
return is_equal
traces_sampler = Mock(return_value=True)
init_sdk(
traces_sampler=traces_sampler,
)
gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event)
"""
)
)
assert return_value["AssertionError raised"] is False
def test_error_has_new_trace_context_performance_enabled(run_cloud_function):
"""
Check if an 'trace' context is added to errros and transactions when performance monitoring is enabled.
"""
envelopes, _, _ = run_cloud_function(
dedent(
"""
functionhandler = None
event = {}
def cloud_function(functionhandler, event):
sentry_sdk.capture_message("hi")
x = 3/0
return "3"
"""
)
+ FUNCTIONS_PRELUDE
+ dedent(
"""
init_sdk(traces_sample_rate=1.0)
gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event)
"""
)
)
(msg_event, error_event, transaction_event) = envelopes
assert "trace" in msg_event["contexts"]
assert "trace_id" in msg_event["contexts"]["trace"]
assert "trace" in error_event["contexts"]
assert "trace_id" in error_event["contexts"]["trace"]
assert "trace" in transaction_event["contexts"]
assert "trace_id" in transaction_event["contexts"]["trace"]
assert (
msg_event["contexts"]["trace"]["trace_id"]
== error_event["contexts"]["trace"]["trace_id"]
== transaction_event["contexts"]["trace"]["trace_id"]
)
def test_error_has_new_trace_context_performance_disabled(run_cloud_function):
"""
Check if an 'trace' context is added to errros and transactions when performance monitoring is disabled.
"""
_, events, _ = run_cloud_function(
dedent(
"""
functionhandler = None
event = {}
def cloud_function(functionhandler, event):
sentry_sdk.capture_message("hi")
x = 3/0
return "3"
"""
)
+ FUNCTIONS_PRELUDE
+ dedent(
"""
init_sdk(traces_sample_rate=None), # this is the default, just added for clarity
gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event)
"""
)
)
(msg_event, error_event) = events
assert "trace" in msg_event["contexts"]
assert "trace_id" in msg_event["contexts"]["trace"]
assert "trace" in error_event["contexts"]
assert "trace_id" in error_event["contexts"]["trace"]
assert (
msg_event["contexts"]["trace"]["trace_id"]
== error_event["contexts"]["trace"]["trace_id"]
)
def test_error_has_existing_trace_context_performance_enabled(run_cloud_function):
"""
Check if an 'trace' context is added to errros and transactions
from the incoming 'sentry-trace' header when performance monitoring is enabled.
"""
trace_id = "471a43a4192642f0b136d5159a501701"
parent_span_id = "6e8f22c393e68f19"
parent_sampled = 1
sentry_trace_header = "{}-{}-{}".format(trace_id, parent_span_id, parent_sampled)
envelopes, _, _ = run_cloud_function(
dedent(
"""
functionhandler = None
from collections import namedtuple
GCPEvent = namedtuple("GCPEvent", ["headers"])
event = GCPEvent(headers={"sentry-trace": "%s"})
def cloud_function(functionhandler, event):
sentry_sdk.capture_message("hi")
x = 3/0
return "3"
"""
% sentry_trace_header
)
+ FUNCTIONS_PRELUDE
+ dedent(
"""
init_sdk(traces_sample_rate=1.0)
gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event)
"""
)
)
(msg_event, error_event, transaction_event) = envelopes
assert "trace" in msg_event["contexts"]
assert "trace_id" in msg_event["contexts"]["trace"]
assert "trace" in error_event["contexts"]
assert "trace_id" in error_event["contexts"]["trace"]
assert "trace" in transaction_event["contexts"]
assert "trace_id" in transaction_event["contexts"]["trace"]
assert (
msg_event["contexts"]["trace"]["trace_id"]
== error_event["contexts"]["trace"]["trace_id"]
== transaction_event["contexts"]["trace"]["trace_id"]
== "471a43a4192642f0b136d5159a501701"
)
def test_error_has_existing_trace_context_performance_disabled(run_cloud_function):
"""
Check if an 'trace' context is added to errros and transactions
from the incoming 'sentry-trace' header when performance monitoring is disabled.
"""
trace_id = "471a43a4192642f0b136d5159a501701"
parent_span_id = "6e8f22c393e68f19"
parent_sampled = 1
sentry_trace_header = "{}-{}-{}".format(trace_id, parent_span_id, parent_sampled)
_, events, _ = run_cloud_function(
dedent(
"""
functionhandler = None
from collections import namedtuple
GCPEvent = namedtuple("GCPEvent", ["headers"])
event = GCPEvent(headers={"sentry-trace": "%s"})
def cloud_function(functionhandler, event):
sentry_sdk.capture_message("hi")
x = 3/0
return "3"
"""
% sentry_trace_header
)
+ FUNCTIONS_PRELUDE
+ dedent(
"""
init_sdk(traces_sample_rate=None), # this is the default, just added for clarity
gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event)
"""
)
)
(msg_event, error_event) = events
assert "trace" in msg_event["contexts"]
assert "trace_id" in msg_event["contexts"]["trace"]
assert "trace" in error_event["contexts"]
assert "trace_id" in error_event["contexts"]["trace"]
assert (
msg_event["contexts"]["trace"]["trace_id"]
== error_event["contexts"]["trace"]["trace_id"]
== "471a43a4192642f0b136d5159a501701"
)
sentry-python-1.39.2/tests/integrations/gql/ 0000775 0000000 0000000 00000000000 14547447232 0021120 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/gql/__init__.py 0000664 0000000 0000000 00000000052 14547447232 0023226 0 ustar 00root root 0000000 0000000 import pytest
pytest.importorskip("gql")
sentry-python-1.39.2/tests/integrations/gql/test_gql.py 0000664 0000000 0000000 00000016306 14547447232 0023322 0 ustar 00root root 0000000 0000000 import pytest
import responses
from gql import gql
from gql import Client
from gql.transport.exceptions import TransportQueryError
from gql.transport.requests import RequestsHTTPTransport
from graphql import DocumentNode
from sentry_sdk.integrations.gql import GQLIntegration
from unittest.mock import MagicMock, patch
class _MockClientBase(MagicMock):
"""
Mocked version of GQL Client class, following same spec as GQL Client.
"""
def __init__(self, *args, **kwargs):
kwargs["spec"] = Client
super().__init__(*args, **kwargs)
transport = MagicMock()
@responses.activate
def _execute_mock_query(response_json):
url = "http://example.com/graphql"
query_string = """
query Example {
example
}
"""
# Mock the GraphQL server response
responses.add(
method=responses.POST,
url=url,
json=response_json,
status=200,
)
transport = RequestsHTTPTransport(url=url)
client = Client(transport=transport)
query = gql(query_string)
return client.execute(query)
def _make_erroneous_query(capture_events):
"""
Make an erroneous GraphQL query, and assert that the error was reraised, that
exactly one event was recorded, and that the exception recorded was a
TransportQueryError. Then, return the event to allow further verifications.
"""
events = capture_events()
response_json = {"errors": ["something bad happened"]}
with pytest.raises(TransportQueryError):
_execute_mock_query(response_json)
assert (
len(events) == 1
), "the sdk captured %d events, but 1 event was expected" % len(events)
(event,) = events
(exception,) = event["exception"]["values"]
assert (
exception["type"] == "TransportQueryError"
), "%s was captured, but we expected a TransportQueryError" % exception(type)
assert "request" in event
return event
def test_gql_init(sentry_init):
"""
Integration test to ensure we can initialize the SDK with the GQL Integration
"""
sentry_init(integrations=[GQLIntegration()])
@patch("sentry_sdk.integrations.gql.Hub")
def test_setup_once_patches_execute_and_patched_function_calls_original(_):
"""
Unit test which ensures the following:
1. The GQLIntegration setup_once function patches the gql.Client.execute method
2. The patched gql.Client.execute method still calls the original method, and it
forwards its arguments to the original method.
3. The patched gql.Client.execute method returns the same value that the original
method returns.
"""
original_method_return_value = MagicMock()
class OriginalMockClient(_MockClientBase):
"""
This mock client always returns the mock original_method_return_value when a query
is executed. This can be used to simulate successful GraphQL queries.
"""
execute = MagicMock(
spec=Client.execute, return_value=original_method_return_value
)
original_execute_method = OriginalMockClient.execute
with patch(
"sentry_sdk.integrations.gql.gql.Client", new=OriginalMockClient
) as PatchedMockClient: # noqa: N806
# Below line should patch the PatchedMockClient with Sentry SDK magic
GQLIntegration.setup_once()
# We expect GQLIntegration.setup_once to patch the execute method.
assert (
PatchedMockClient.execute is not original_execute_method
), "execute method not patched"
# Now, let's instantiate a client and send it a query. Original execute still should get called.
mock_query = MagicMock(spec=DocumentNode)
client_instance = PatchedMockClient()
patched_method_return_value = client_instance.execute(mock_query)
# Here, we check that the original execute was called
original_execute_method.assert_called_once_with(client_instance, mock_query)
# Also, let's verify that the patched execute returns the expected value.
assert (
patched_method_return_value is original_method_return_value
), "pathced execute method returns a different value than the original execute method"
@patch("sentry_sdk.integrations.gql.event_from_exception")
@patch("sentry_sdk.integrations.gql.Hub")
def test_patched_gql_execute_captures_and_reraises_graphql_exception(
mock_hub, mock_event_from_exception
):
"""
Unit test which ensures that in the case that calling the execute method results in a
TransportQueryError (which gql raises when a GraphQL error occurs), the patched method
captures the event on the current Hub and it reraises the error.
"""
mock_event_from_exception.return_value = (dict(), MagicMock())
class OriginalMockClient(_MockClientBase):
"""
This mock client always raises a TransportQueryError when a GraphQL query is attempted.
This simulates a GraphQL query which results in errors.
"""
execute = MagicMock(
spec=Client.execute, side_effect=TransportQueryError("query failed")
)
with patch(
"sentry_sdk.integrations.gql.gql.Client", new=OriginalMockClient
) as PatchedMockClient: # noqa: N806
# Below line should patch the PatchedMockClient with Sentry SDK magic
GQLIntegration.setup_once()
mock_query = MagicMock(spec=DocumentNode)
client_instance = PatchedMockClient()
# The error should still get raised even though we have instrumented the execute method.
with pytest.raises(TransportQueryError):
client_instance.execute(mock_query)
# However, we should have also captured the error on the hub.
mock_capture_event = mock_hub.current.capture_event
mock_capture_event.assert_called_once()
def test_real_gql_request_no_error(sentry_init, capture_events):
"""
Integration test verifying that the GQLIntegration works as expected with successful query.
"""
sentry_init(integrations=[GQLIntegration()])
events = capture_events()
response_data = {"example": "This is the example"}
response_json = {"data": response_data}
result = _execute_mock_query(response_json)
assert (
result == response_data
), "client.execute returned a different value from what it received from the server"
assert (
len(events) == 0
), "the sdk captured an event, even though the query was successful"
def test_real_gql_request_with_error_no_pii(sentry_init, capture_events):
"""
Integration test verifying that the GQLIntegration works as expected with query resulting
in a GraphQL error, and that PII is not sent.
"""
sentry_init(integrations=[GQLIntegration()])
event = _make_erroneous_query(capture_events)
assert "data" not in event["request"]
assert "response" not in event["contexts"]
def test_real_gql_request_with_error_with_pii(sentry_init, capture_events):
"""
Integration test verifying that the GQLIntegration works as expected with query resulting
in a GraphQL error, and that PII is not sent.
"""
sentry_init(integrations=[GQLIntegration()], send_default_pii=True)
event = _make_erroneous_query(capture_events)
assert "data" in event["request"]
assert "response" in event["contexts"]
sentry-python-1.39.2/tests/integrations/graphene/ 0000775 0000000 0000000 00000000000 14547447232 0022126 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/graphene/__init__.py 0000664 0000000 0000000 00000000153 14547447232 0024236 0 ustar 00root root 0000000 0000000 import pytest
pytest.importorskip("graphene")
pytest.importorskip("fastapi")
pytest.importorskip("flask")
sentry-python-1.39.2/tests/integrations/graphene/test_graphene_py3.py 0000664 0000000 0000000 00000013037 14547447232 0026127 0 ustar 00root root 0000000 0000000 from fastapi import FastAPI, Request
from fastapi.testclient import TestClient
from flask import Flask, request, jsonify
from graphene import ObjectType, String, Schema
from sentry_sdk.integrations.fastapi import FastApiIntegration
from sentry_sdk.integrations.flask import FlaskIntegration
from sentry_sdk.integrations.graphene import GrapheneIntegration
from sentry_sdk.integrations.starlette import StarletteIntegration
class Query(ObjectType):
hello = String(first_name=String(default_value="stranger"))
goodbye = String()
def resolve_hello(root, info, first_name): # noqa: N805
return "Hello {}!".format(first_name)
def resolve_goodbye(root, info): # noqa: N805
raise RuntimeError("oh no!")
def test_capture_request_if_available_and_send_pii_is_on_async(
sentry_init, capture_events
):
sentry_init(
send_default_pii=True,
integrations=[
GrapheneIntegration(),
FastApiIntegration(),
StarletteIntegration(),
],
)
events = capture_events()
schema = Schema(query=Query)
async_app = FastAPI()
@async_app.post("/graphql")
async def graphql_server_async(request: Request):
data = await request.json()
result = await schema.execute_async(data["query"])
return result.data
query = {"query": "query ErrorQuery {goodbye}"}
client = TestClient(async_app)
client.post("/graphql", json=query)
assert len(events) == 1
(event,) = events
assert event["exception"]["values"][0]["mechanism"]["type"] == "graphene"
assert event["request"]["api_target"] == "graphql"
assert event["request"]["data"] == query
def test_capture_request_if_available_and_send_pii_is_on_sync(
sentry_init, capture_events
):
sentry_init(
send_default_pii=True,
integrations=[GrapheneIntegration(), FlaskIntegration()],
)
events = capture_events()
schema = Schema(query=Query)
sync_app = Flask(__name__)
@sync_app.route("/graphql", methods=["POST"])
def graphql_server_sync():
data = request.get_json()
result = schema.execute(data["query"])
return jsonify(result.data), 200
query = {"query": "query ErrorQuery {goodbye}"}
client = sync_app.test_client()
client.post("/graphql", json=query)
assert len(events) == 1
(event,) = events
assert event["exception"]["values"][0]["mechanism"]["type"] == "graphene"
assert event["request"]["api_target"] == "graphql"
assert event["request"]["data"] == query
def test_do_not_capture_request_if_send_pii_is_off_async(sentry_init, capture_events):
sentry_init(
integrations=[
GrapheneIntegration(),
FastApiIntegration(),
StarletteIntegration(),
],
)
events = capture_events()
schema = Schema(query=Query)
async_app = FastAPI()
@async_app.post("/graphql")
async def graphql_server_async(request: Request):
data = await request.json()
result = await schema.execute_async(data["query"])
return result.data
query = {"query": "query ErrorQuery {goodbye}"}
client = TestClient(async_app)
client.post("/graphql", json=query)
assert len(events) == 1
(event,) = events
assert event["exception"]["values"][0]["mechanism"]["type"] == "graphene"
assert "data" not in event["request"]
assert "response" not in event["contexts"]
def test_do_not_capture_request_if_send_pii_is_off_sync(sentry_init, capture_events):
sentry_init(
integrations=[GrapheneIntegration(), FlaskIntegration()],
)
events = capture_events()
schema = Schema(query=Query)
sync_app = Flask(__name__)
@sync_app.route("/graphql", methods=["POST"])
def graphql_server_sync():
data = request.get_json()
result = schema.execute(data["query"])
return jsonify(result.data), 200
query = {"query": "query ErrorQuery {goodbye}"}
client = sync_app.test_client()
client.post("/graphql", json=query)
assert len(events) == 1
(event,) = events
assert event["exception"]["values"][0]["mechanism"]["type"] == "graphene"
assert "data" not in event["request"]
assert "response" not in event["contexts"]
def test_no_event_if_no_errors_async(sentry_init, capture_events):
sentry_init(
integrations=[
GrapheneIntegration(),
FastApiIntegration(),
StarletteIntegration(),
],
)
events = capture_events()
schema = Schema(query=Query)
async_app = FastAPI()
@async_app.post("/graphql")
async def graphql_server_async(request: Request):
data = await request.json()
result = await schema.execute_async(data["query"])
return result.data
query = {
"query": "query GreetingQuery { hello }",
}
client = TestClient(async_app)
client.post("/graphql", json=query)
assert len(events) == 0
def test_no_event_if_no_errors_sync(sentry_init, capture_events):
sentry_init(
integrations=[
GrapheneIntegration(),
FlaskIntegration(),
],
)
events = capture_events()
schema = Schema(query=Query)
sync_app = Flask(__name__)
@sync_app.route("/graphql", methods=["POST"])
def graphql_server_sync():
data = request.get_json()
result = schema.execute(data["query"])
return jsonify(result.data), 200
query = {
"query": "query GreetingQuery { hello }",
}
client = sync_app.test_client()
client.post("/graphql", json=query)
assert len(events) == 0
sentry-python-1.39.2/tests/integrations/grpc/ 0000775 0000000 0000000 00000000000 14547447232 0021270 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/grpc/__init__.py 0000664 0000000 0000000 00000000261 14547447232 0023400 0 ustar 00root root 0000000 0000000 import sys
from pathlib import Path
import pytest
# For imports inside gRPC autogenerated code to work
sys.path.append(str(Path(__file__).parent))
pytest.importorskip("grpc")
sentry-python-1.39.2/tests/integrations/grpc/compile_test_services.sh 0000775 0000000 0000000 00000000623 14547447232 0026222 0 ustar 00root root 0000000 0000000 #!/usr/bin/env bash
# Run this script from the project root to generate the python code
TARGET_PATH=./tests/integrations/grpc
# Create python file
python -m grpc_tools.protoc \
--proto_path=$TARGET_PATH/protos/ \
--python_out=$TARGET_PATH/ \
--pyi_out=$TARGET_PATH/ \
--grpc_python_out=$TARGET_PATH/ \
$TARGET_PATH/protos/grpc_test_service.proto
echo Code generation successfull
sentry-python-1.39.2/tests/integrations/grpc/grpc_test_service_pb2.py 0000664 0000000 0000000 00000003110 14547447232 0026112 0 ustar 00root root 0000000 0000000 # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: grpc_test_service.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import symbol_database as _symbol_database
from google.protobuf.internal import builder as _builder
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x17grpc_test_service.proto\x12\x10grpc_test_server\"\x1f\n\x0fgRPCTestMessage\x12\x0c\n\x04text\x18\x01 \x01(\t2\xf8\x02\n\x0fgRPCTestService\x12Q\n\tTestServe\x12!.grpc_test_server.gRPCTestMessage\x1a!.grpc_test_server.gRPCTestMessage\x12Y\n\x0fTestUnaryStream\x12!.grpc_test_server.gRPCTestMessage\x1a!.grpc_test_server.gRPCTestMessage0\x01\x12\\\n\x10TestStreamStream\x12!.grpc_test_server.gRPCTestMessage\x1a!.grpc_test_server.gRPCTestMessage(\x01\x30\x01\x12Y\n\x0fTestStreamUnary\x12!.grpc_test_server.gRPCTestMessage\x1a!.grpc_test_server.gRPCTestMessage(\x01\x62\x06proto3')
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'grpc_test_service_pb2', _globals)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
_globals['_GRPCTESTMESSAGE']._serialized_start=45
_globals['_GRPCTESTMESSAGE']._serialized_end=76
_globals['_GRPCTESTSERVICE']._serialized_start=79
_globals['_GRPCTESTSERVICE']._serialized_end=455
# @@protoc_insertion_point(module_scope)
sentry-python-1.39.2/tests/integrations/grpc/grpc_test_service_pb2.pyi 0000664 0000000 0000000 00000000605 14547447232 0026271 0 ustar 00root root 0000000 0000000 from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from typing import ClassVar as _ClassVar, Optional as _Optional
DESCRIPTOR: _descriptor.FileDescriptor
class gRPCTestMessage(_message.Message):
__slots__ = ["text"]
TEXT_FIELD_NUMBER: _ClassVar[int]
text: str
def __init__(self, text: _Optional[str] = ...) -> None: ...
sentry-python-1.39.2/tests/integrations/grpc/grpc_test_service_pb2_grpc.py 0000664 0000000 0000000 00000016605 14547447232 0027142 0 ustar 00root root 0000000 0000000 # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
import grpc_test_service_pb2 as grpc__test__service__pb2
class gRPCTestServiceStub(object):
"""Missing associated documentation comment in .proto file."""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.TestServe = channel.unary_unary(
'/grpc_test_server.gRPCTestService/TestServe',
request_serializer=grpc__test__service__pb2.gRPCTestMessage.SerializeToString,
response_deserializer=grpc__test__service__pb2.gRPCTestMessage.FromString,
)
self.TestUnaryStream = channel.unary_stream(
'/grpc_test_server.gRPCTestService/TestUnaryStream',
request_serializer=grpc__test__service__pb2.gRPCTestMessage.SerializeToString,
response_deserializer=grpc__test__service__pb2.gRPCTestMessage.FromString,
)
self.TestStreamStream = channel.stream_stream(
'/grpc_test_server.gRPCTestService/TestStreamStream',
request_serializer=grpc__test__service__pb2.gRPCTestMessage.SerializeToString,
response_deserializer=grpc__test__service__pb2.gRPCTestMessage.FromString,
)
self.TestStreamUnary = channel.stream_unary(
'/grpc_test_server.gRPCTestService/TestStreamUnary',
request_serializer=grpc__test__service__pb2.gRPCTestMessage.SerializeToString,
response_deserializer=grpc__test__service__pb2.gRPCTestMessage.FromString,
)
class gRPCTestServiceServicer(object):
"""Missing associated documentation comment in .proto file."""
def TestServe(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def TestUnaryStream(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def TestStreamStream(self, request_iterator, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def TestStreamUnary(self, request_iterator, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_gRPCTestServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'TestServe': grpc.unary_unary_rpc_method_handler(
servicer.TestServe,
request_deserializer=grpc__test__service__pb2.gRPCTestMessage.FromString,
response_serializer=grpc__test__service__pb2.gRPCTestMessage.SerializeToString,
),
'TestUnaryStream': grpc.unary_stream_rpc_method_handler(
servicer.TestUnaryStream,
request_deserializer=grpc__test__service__pb2.gRPCTestMessage.FromString,
response_serializer=grpc__test__service__pb2.gRPCTestMessage.SerializeToString,
),
'TestStreamStream': grpc.stream_stream_rpc_method_handler(
servicer.TestStreamStream,
request_deserializer=grpc__test__service__pb2.gRPCTestMessage.FromString,
response_serializer=grpc__test__service__pb2.gRPCTestMessage.SerializeToString,
),
'TestStreamUnary': grpc.stream_unary_rpc_method_handler(
servicer.TestStreamUnary,
request_deserializer=grpc__test__service__pb2.gRPCTestMessage.FromString,
response_serializer=grpc__test__service__pb2.gRPCTestMessage.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'grpc_test_server.gRPCTestService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class gRPCTestService(object):
"""Missing associated documentation comment in .proto file."""
@staticmethod
def TestServe(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/grpc_test_server.gRPCTestService/TestServe',
grpc__test__service__pb2.gRPCTestMessage.SerializeToString,
grpc__test__service__pb2.gRPCTestMessage.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def TestUnaryStream(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_stream(request, target, '/grpc_test_server.gRPCTestService/TestUnaryStream',
grpc__test__service__pb2.gRPCTestMessage.SerializeToString,
grpc__test__service__pb2.gRPCTestMessage.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def TestStreamStream(request_iterator,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.stream_stream(request_iterator, target, '/grpc_test_server.gRPCTestService/TestStreamStream',
grpc__test__service__pb2.gRPCTestMessage.SerializeToString,
grpc__test__service__pb2.gRPCTestMessage.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def TestStreamUnary(request_iterator,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.stream_unary(request_iterator, target, '/grpc_test_server.gRPCTestService/TestStreamUnary',
grpc__test__service__pb2.gRPCTestMessage.SerializeToString,
grpc__test__service__pb2.gRPCTestMessage.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
sentry-python-1.39.2/tests/integrations/grpc/protos/ 0000775 0000000 0000000 00000000000 14547447232 0022616 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/grpc/protos/grpc_test_service.proto 0000664 0000000 0000000 00000000631 14547447232 0027415 0 ustar 00root root 0000000 0000000 syntax = "proto3";
package grpc_test_server;
service gRPCTestService{
rpc TestServe(gRPCTestMessage) returns (gRPCTestMessage);
rpc TestUnaryStream(gRPCTestMessage) returns (stream gRPCTestMessage);
rpc TestStreamStream(stream gRPCTestMessage) returns (stream gRPCTestMessage);
rpc TestStreamUnary(stream gRPCTestMessage) returns (gRPCTestMessage);
}
message gRPCTestMessage {
string text = 1;
}
sentry-python-1.39.2/tests/integrations/grpc/test_grpc.py 0000664 0000000 0000000 00000024234 14547447232 0023641 0 ustar 00root root 0000000 0000000 from __future__ import absolute_import
import os
from typing import List, Optional
from concurrent import futures
from unittest.mock import Mock
import grpc
import pytest
from sentry_sdk import Hub, start_transaction
from sentry_sdk.consts import OP
from sentry_sdk.integrations.grpc import GRPCIntegration
from tests.integrations.grpc.grpc_test_service_pb2 import gRPCTestMessage
from tests.integrations.grpc.grpc_test_service_pb2_grpc import (
gRPCTestServiceServicer,
add_gRPCTestServiceServicer_to_server,
gRPCTestServiceStub,
)
PORT = 50051
PORT += os.getpid() % 100 # avoid port conflicts when running tests in parallel
@pytest.mark.forked
def test_grpc_server_starts_transaction(sentry_init, capture_events_forksafe):
sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()])
events = capture_events_forksafe()
server = _set_up()
with grpc.insecure_channel("localhost:{}".format(PORT)) as channel:
stub = gRPCTestServiceStub(channel)
stub.TestServe(gRPCTestMessage(text="test"))
_tear_down(server=server)
events.write_file.close()
event = events.read_event()
span = event["spans"][0]
assert event["type"] == "transaction"
assert event["transaction_info"] == {
"source": "custom",
}
assert event["contexts"]["trace"]["op"] == OP.GRPC_SERVER
assert span["op"] == "test"
@pytest.mark.forked
def test_grpc_server_other_interceptors(sentry_init, capture_events_forksafe):
"""Ensure compatibility with additional server interceptors."""
sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()])
events = capture_events_forksafe()
mock_intercept = lambda continuation, handler_call_details: continuation(
handler_call_details
)
mock_interceptor = Mock()
mock_interceptor.intercept_service.side_effect = mock_intercept
server = _set_up(interceptors=[mock_interceptor])
with grpc.insecure_channel("localhost:{}".format(PORT)) as channel:
stub = gRPCTestServiceStub(channel)
stub.TestServe(gRPCTestMessage(text="test"))
_tear_down(server=server)
mock_interceptor.intercept_service.assert_called_once()
events.write_file.close()
event = events.read_event()
span = event["spans"][0]
assert event["type"] == "transaction"
assert event["transaction_info"] == {
"source": "custom",
}
assert event["contexts"]["trace"]["op"] == OP.GRPC_SERVER
assert span["op"] == "test"
@pytest.mark.forked
def test_grpc_server_continues_transaction(sentry_init, capture_events_forksafe):
sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()])
events = capture_events_forksafe()
server = _set_up()
with grpc.insecure_channel("localhost:{}".format(PORT)) as channel:
stub = gRPCTestServiceStub(channel)
with start_transaction() as transaction:
metadata = (
(
"baggage",
"sentry-trace_id={trace_id},sentry-environment=test,"
"sentry-transaction=test-transaction,sentry-sample_rate=1.0".format(
trace_id=transaction.trace_id
),
),
(
"sentry-trace",
"{trace_id}-{parent_span_id}-{sampled}".format(
trace_id=transaction.trace_id,
parent_span_id=transaction.span_id,
sampled=1,
),
),
)
stub.TestServe(gRPCTestMessage(text="test"), metadata=metadata)
_tear_down(server=server)
events.write_file.close()
event = events.read_event()
span = event["spans"][0]
assert event["type"] == "transaction"
assert event["transaction_info"] == {
"source": "custom",
}
assert event["contexts"]["trace"]["op"] == OP.GRPC_SERVER
assert event["contexts"]["trace"]["trace_id"] == transaction.trace_id
assert span["op"] == "test"
@pytest.mark.forked
def test_grpc_client_starts_span(sentry_init, capture_events_forksafe):
sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()])
events = capture_events_forksafe()
server = _set_up()
with grpc.insecure_channel("localhost:{}".format(PORT)) as channel:
stub = gRPCTestServiceStub(channel)
with start_transaction():
stub.TestServe(gRPCTestMessage(text="test"))
_tear_down(server=server)
events.write_file.close()
events.read_event()
local_transaction = events.read_event()
span = local_transaction["spans"][0]
assert len(local_transaction["spans"]) == 1
assert span["op"] == OP.GRPC_CLIENT
assert (
span["description"]
== "unary unary call to /grpc_test_server.gRPCTestService/TestServe"
)
assert span["data"] == {
"type": "unary unary",
"method": "/grpc_test_server.gRPCTestService/TestServe",
"code": "OK",
}
@pytest.mark.forked
def test_grpc_client_unary_stream_starts_span(sentry_init, capture_events_forksafe):
sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()])
events = capture_events_forksafe()
server = _set_up()
with grpc.insecure_channel("localhost:{}".format(PORT)) as channel:
stub = gRPCTestServiceStub(channel)
with start_transaction():
[el for el in stub.TestUnaryStream(gRPCTestMessage(text="test"))]
_tear_down(server=server)
events.write_file.close()
local_transaction = events.read_event()
span = local_transaction["spans"][0]
assert len(local_transaction["spans"]) == 1
assert span["op"] == OP.GRPC_CLIENT
assert (
span["description"]
== "unary stream call to /grpc_test_server.gRPCTestService/TestUnaryStream"
)
assert span["data"] == {
"type": "unary stream",
"method": "/grpc_test_server.gRPCTestService/TestUnaryStream",
}
# using unittest.mock.Mock not possible because grpc verifies
# that the interceptor is of the correct type
class MockClientInterceptor(grpc.UnaryUnaryClientInterceptor):
call_counter = 0
def intercept_unary_unary(self, continuation, client_call_details, request):
self.__class__.call_counter += 1
return continuation(client_call_details, request)
@pytest.mark.forked
def test_grpc_client_other_interceptor(sentry_init, capture_events_forksafe):
"""Ensure compatibility with additional client interceptors."""
sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()])
events = capture_events_forksafe()
server = _set_up()
with grpc.insecure_channel("localhost:{}".format(PORT)) as channel:
channel = grpc.intercept_channel(channel, MockClientInterceptor())
stub = gRPCTestServiceStub(channel)
with start_transaction():
stub.TestServe(gRPCTestMessage(text="test"))
_tear_down(server=server)
assert MockClientInterceptor.call_counter == 1
events.write_file.close()
events.read_event()
local_transaction = events.read_event()
span = local_transaction["spans"][0]
assert len(local_transaction["spans"]) == 1
assert span["op"] == OP.GRPC_CLIENT
assert (
span["description"]
== "unary unary call to /grpc_test_server.gRPCTestService/TestServe"
)
assert span["data"] == {
"type": "unary unary",
"method": "/grpc_test_server.gRPCTestService/TestServe",
"code": "OK",
}
@pytest.mark.forked
def test_grpc_client_and_servers_interceptors_integration(
sentry_init, capture_events_forksafe
):
sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()])
events = capture_events_forksafe()
server = _set_up()
with grpc.insecure_channel("localhost:{}".format(PORT)) as channel:
stub = gRPCTestServiceStub(channel)
with start_transaction():
stub.TestServe(gRPCTestMessage(text="test"))
_tear_down(server=server)
events.write_file.close()
server_transaction = events.read_event()
local_transaction = events.read_event()
assert (
server_transaction["contexts"]["trace"]["trace_id"]
== local_transaction["contexts"]["trace"]["trace_id"]
)
@pytest.mark.forked
def test_stream_stream(sentry_init):
sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()])
_set_up()
with grpc.insecure_channel("localhost:{}".format(PORT)) as channel:
stub = gRPCTestServiceStub(channel)
response_iterator = stub.TestStreamStream(iter((gRPCTestMessage(text="test"),)))
for response in response_iterator:
assert response.text == "test"
def test_stream_unary(sentry_init):
"""Test to verify stream-stream works.
Tracing not supported for it yet.
"""
sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()])
_set_up()
with grpc.insecure_channel("localhost:{}".format(PORT)) as channel:
stub = gRPCTestServiceStub(channel)
response = stub.TestStreamUnary(iter((gRPCTestMessage(text="test"),)))
assert response.text == "test"
def _set_up(interceptors: Optional[List[grpc.ServerInterceptor]] = None):
server = grpc.server(
futures.ThreadPoolExecutor(max_workers=2),
interceptors=interceptors,
)
add_gRPCTestServiceServicer_to_server(TestService(), server)
server.add_insecure_port("[::]:{}".format(PORT))
server.start()
return server
def _tear_down(server: grpc.Server):
server.stop(None)
def _find_name(request):
return request.__class__
class TestService(gRPCTestServiceServicer):
events = []
@staticmethod
def TestServe(request, context): # noqa: N802
hub = Hub.current
with hub.start_span(op="test", description="test"):
pass
return gRPCTestMessage(text=request.text)
@staticmethod
def TestUnaryStream(request, context): # noqa: N802
for _ in range(3):
yield gRPCTestMessage(text=request.text)
@staticmethod
def TestStreamStream(request, context): # noqa: N802
for r in request:
yield r
@staticmethod
def TestStreamUnary(request, context): # noqa: N802
requests = [r for r in request]
return requests.pop()
sentry-python-1.39.2/tests/integrations/grpc/test_grpc_aio.py 0000664 0000000 0000000 00000017603 14547447232 0024473 0 ustar 00root root 0000000 0000000 from __future__ import absolute_import
import asyncio
import os
import grpc
import pytest
import pytest_asyncio
import sentry_sdk
from sentry_sdk import Hub, start_transaction
from sentry_sdk.consts import OP
from sentry_sdk.integrations.grpc import GRPCIntegration
from tests.integrations.grpc.grpc_test_service_pb2 import gRPCTestMessage
from tests.integrations.grpc.grpc_test_service_pb2_grpc import (
gRPCTestServiceServicer,
add_gRPCTestServiceServicer_to_server,
gRPCTestServiceStub,
)
AIO_PORT = 50052
AIO_PORT += os.getpid() % 100 # avoid port conflicts when running tests in parallel
@pytest.fixture(scope="function")
def event_loop(request):
"""Create an instance of the default event loop for each test case."""
loop = asyncio.new_event_loop()
yield loop
loop.close()
@pytest_asyncio.fixture(scope="function")
async def grpc_server(sentry_init, event_loop):
sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()])
server = grpc.aio.server()
server.add_insecure_port("[::]:{}".format(AIO_PORT))
add_gRPCTestServiceServicer_to_server(TestService, server)
await event_loop.create_task(server.start())
try:
yield server
finally:
await server.stop(None)
@pytest.mark.asyncio
async def test_grpc_server_starts_transaction(capture_events, grpc_server):
events = capture_events()
async with grpc.aio.insecure_channel("localhost:{}".format(AIO_PORT)) as channel:
stub = gRPCTestServiceStub(channel)
await stub.TestServe(gRPCTestMessage(text="test"))
(event,) = events
span = event["spans"][0]
assert event["type"] == "transaction"
assert event["transaction_info"] == {
"source": "custom",
}
assert event["contexts"]["trace"]["op"] == OP.GRPC_SERVER
assert span["op"] == "test"
@pytest.mark.asyncio
async def test_grpc_server_continues_transaction(capture_events, grpc_server):
events = capture_events()
async with grpc.aio.insecure_channel("localhost:{}".format(AIO_PORT)) as channel:
stub = gRPCTestServiceStub(channel)
with sentry_sdk.start_transaction() as transaction:
metadata = (
(
"baggage",
"sentry-trace_id={trace_id},sentry-environment=test,"
"sentry-transaction=test-transaction,sentry-sample_rate=1.0".format(
trace_id=transaction.trace_id
),
),
(
"sentry-trace",
"{trace_id}-{parent_span_id}-{sampled}".format(
trace_id=transaction.trace_id,
parent_span_id=transaction.span_id,
sampled=1,
),
),
)
await stub.TestServe(gRPCTestMessage(text="test"), metadata=metadata)
(event, _) = events
span = event["spans"][0]
assert event["type"] == "transaction"
assert event["transaction_info"] == {
"source": "custom",
}
assert event["contexts"]["trace"]["op"] == OP.GRPC_SERVER
assert event["contexts"]["trace"]["trace_id"] == transaction.trace_id
assert span["op"] == "test"
@pytest.mark.asyncio
async def test_grpc_server_exception(capture_events, grpc_server):
events = capture_events()
async with grpc.aio.insecure_channel("localhost:{}".format(AIO_PORT)) as channel:
stub = gRPCTestServiceStub(channel)
try:
await stub.TestServe(gRPCTestMessage(text="exception"))
raise AssertionError()
except Exception:
pass
(event, _) = events
assert event["exception"]["values"][0]["type"] == "TestService.TestException"
assert event["exception"]["values"][0]["value"] == "test"
assert event["exception"]["values"][0]["mechanism"]["handled"] is False
assert event["exception"]["values"][0]["mechanism"]["type"] == "grpc"
@pytest.mark.asyncio
async def test_grpc_server_abort(capture_events, grpc_server):
events = capture_events()
async with grpc.aio.insecure_channel("localhost:{}".format(AIO_PORT)) as channel:
stub = gRPCTestServiceStub(channel)
try:
await stub.TestServe(gRPCTestMessage(text="abort"))
raise AssertionError()
except Exception:
pass
assert len(events) == 1
@pytest.mark.asyncio
async def test_grpc_client_starts_span(
grpc_server, sentry_init, capture_events_forksafe
):
events = capture_events_forksafe()
async with grpc.aio.insecure_channel("localhost:{}".format(AIO_PORT)) as channel:
stub = gRPCTestServiceStub(channel)
with start_transaction():
await stub.TestServe(gRPCTestMessage(text="test"))
events.write_file.close()
events.read_event()
local_transaction = events.read_event()
span = local_transaction["spans"][0]
assert len(local_transaction["spans"]) == 1
assert span["op"] == OP.GRPC_CLIENT
assert (
span["description"]
== "unary unary call to /grpc_test_server.gRPCTestService/TestServe"
)
assert span["data"] == {
"type": "unary unary",
"method": "/grpc_test_server.gRPCTestService/TestServe",
"code": "OK",
}
@pytest.mark.asyncio
async def test_grpc_client_unary_stream_starts_span(
grpc_server, capture_events_forksafe
):
events = capture_events_forksafe()
async with grpc.aio.insecure_channel("localhost:{}".format(AIO_PORT)) as channel:
stub = gRPCTestServiceStub(channel)
with start_transaction():
response = stub.TestUnaryStream(gRPCTestMessage(text="test"))
[_ async for _ in response]
events.write_file.close()
local_transaction = events.read_event()
span = local_transaction["spans"][0]
assert len(local_transaction["spans"]) == 1
assert span["op"] == OP.GRPC_CLIENT
assert (
span["description"]
== "unary stream call to /grpc_test_server.gRPCTestService/TestUnaryStream"
)
assert span["data"] == {
"type": "unary stream",
"method": "/grpc_test_server.gRPCTestService/TestUnaryStream",
}
@pytest.mark.asyncio
async def test_stream_stream(grpc_server):
"""Test to verify stream-stream works.
Tracing not supported for it yet.
"""
async with grpc.aio.insecure_channel("localhost:{}".format(AIO_PORT)) as channel:
stub = gRPCTestServiceStub(channel)
response = stub.TestStreamStream((gRPCTestMessage(text="test"),))
async for r in response:
assert r.text == "test"
@pytest.mark.asyncio
async def test_stream_unary(grpc_server):
"""Test to verify stream-stream works.
Tracing not supported for it yet.
"""
async with grpc.aio.insecure_channel("localhost:{}".format(AIO_PORT)) as channel:
stub = gRPCTestServiceStub(channel)
response = await stub.TestStreamUnary((gRPCTestMessage(text="test"),))
assert response.text == "test"
class TestService(gRPCTestServiceServicer):
class TestException(Exception):
def __init__(self):
super().__init__("test")
@classmethod
async def TestServe(cls, request, context): # noqa: N802
hub = Hub.current
with hub.start_span(op="test", description="test"):
pass
if request.text == "exception":
raise cls.TestException()
if request.text == "abort":
await context.abort(grpc.StatusCode.ABORTED)
return gRPCTestMessage(text=request.text)
@classmethod
async def TestUnaryStream(cls, request, context): # noqa: N802
for _ in range(3):
yield gRPCTestMessage(text=request.text)
@classmethod
async def TestStreamStream(cls, request, context): # noqa: N802
async for r in request:
yield r
@classmethod
async def TestStreamUnary(cls, request, context): # noqa: N802
requests = [r async for r in request]
return requests.pop()
sentry-python-1.39.2/tests/integrations/httpx/ 0000775 0000000 0000000 00000000000 14547447232 0021504 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/httpx/__init__.py 0000664 0000000 0000000 00000000054 14547447232 0023614 0 ustar 00root root 0000000 0000000 import pytest
pytest.importorskip("httpx")
sentry-python-1.39.2/tests/integrations/httpx/test_httpx.py 0000664 0000000 0000000 00000020611 14547447232 0024264 0 ustar 00root root 0000000 0000000 import asyncio
import pytest
import httpx
import responses
from sentry_sdk import capture_message, start_transaction
from sentry_sdk.consts import MATCH_ALL, SPANDATA
from sentry_sdk.integrations.httpx import HttpxIntegration
try:
from unittest import mock # python 3.3 and above
except ImportError:
import mock # python < 3.3
@pytest.mark.parametrize(
"httpx_client",
(httpx.Client(), httpx.AsyncClient()),
)
def test_crumb_capture_and_hint(sentry_init, capture_events, httpx_client):
def before_breadcrumb(crumb, hint):
crumb["data"]["extra"] = "foo"
return crumb
sentry_init(integrations=[HttpxIntegration()], before_breadcrumb=before_breadcrumb)
url = "http://example.com/"
responses.add(responses.GET, url, status=200)
with start_transaction():
events = capture_events()
if asyncio.iscoroutinefunction(httpx_client.get):
response = asyncio.get_event_loop().run_until_complete(
httpx_client.get(url)
)
else:
response = httpx_client.get(url)
assert response.status_code == 200
capture_message("Testing!")
(event,) = events
crumb = event["breadcrumbs"]["values"][0]
assert crumb["type"] == "http"
assert crumb["category"] == "httplib"
assert crumb["data"] == {
"url": url,
SPANDATA.HTTP_METHOD: "GET",
SPANDATA.HTTP_FRAGMENT: "",
SPANDATA.HTTP_QUERY: "",
SPANDATA.HTTP_STATUS_CODE: 200,
"reason": "OK",
"extra": "foo",
}
@pytest.mark.parametrize(
"httpx_client",
(httpx.Client(), httpx.AsyncClient()),
)
def test_outgoing_trace_headers(sentry_init, httpx_client):
sentry_init(traces_sample_rate=1.0, integrations=[HttpxIntegration()])
url = "http://example.com/"
responses.add(responses.GET, url, status=200)
with start_transaction(
name="/interactions/other-dogs/new-dog",
op="greeting.sniff",
trace_id="01234567890123456789012345678901",
) as transaction:
if asyncio.iscoroutinefunction(httpx_client.get):
response = asyncio.get_event_loop().run_until_complete(
httpx_client.get(url)
)
else:
response = httpx_client.get(url)
request_span = transaction._span_recorder.spans[-1]
assert response.request.headers[
"sentry-trace"
] == "{trace_id}-{parent_span_id}-{sampled}".format(
trace_id=transaction.trace_id,
parent_span_id=request_span.span_id,
sampled=1,
)
@pytest.mark.parametrize(
"httpx_client",
(httpx.Client(), httpx.AsyncClient()),
)
def test_outgoing_trace_headers_append_to_baggage(sentry_init, httpx_client):
sentry_init(
traces_sample_rate=1.0,
integrations=[HttpxIntegration()],
release="d08ebdb9309e1b004c6f52202de58a09c2268e42",
)
url = "http://example.com/"
responses.add(responses.GET, url, status=200)
with start_transaction(
name="/interactions/other-dogs/new-dog",
op="greeting.sniff",
trace_id="01234567890123456789012345678901",
) as transaction:
if asyncio.iscoroutinefunction(httpx_client.get):
response = asyncio.get_event_loop().run_until_complete(
httpx_client.get(url, headers={"baGGage": "custom=data"})
)
else:
response = httpx_client.get(url, headers={"baGGage": "custom=data"})
request_span = transaction._span_recorder.spans[-1]
assert response.request.headers[
"sentry-trace"
] == "{trace_id}-{parent_span_id}-{sampled}".format(
trace_id=transaction.trace_id,
parent_span_id=request_span.span_id,
sampled=1,
)
assert (
response.request.headers["baggage"]
== "custom=data,sentry-trace_id=01234567890123456789012345678901,sentry-environment=production,sentry-release=d08ebdb9309e1b004c6f52202de58a09c2268e42,sentry-transaction=/interactions/other-dogs/new-dog,sentry-sample_rate=1.0,sentry-sampled=true"
)
@pytest.mark.parametrize(
"httpx_client,trace_propagation_targets,url,trace_propagated",
[
[
httpx.Client(),
None,
"https://example.com/",
False,
],
[
httpx.Client(),
[],
"https://example.com/",
False,
],
[
httpx.Client(),
[MATCH_ALL],
"https://example.com/",
True,
],
[
httpx.Client(),
["https://example.com/"],
"https://example.com/",
True,
],
[
httpx.Client(),
["https://example.com/"],
"https://example.com",
False,
],
[
httpx.Client(),
["https://example.com"],
"https://example.com",
True,
],
[
httpx.Client(),
["https://example.com", r"https?:\/\/[\w\-]+(\.[\w\-]+)+\.net"],
"https://example.net",
False,
],
[
httpx.Client(),
["https://example.com", r"https?:\/\/[\w\-]+(\.[\w\-]+)+\.net"],
"https://good.example.net",
True,
],
[
httpx.Client(),
["https://example.com", r"https?:\/\/[\w\-]+(\.[\w\-]+)+\.net"],
"https://good.example.net/some/thing",
True,
],
[
httpx.AsyncClient(),
None,
"https://example.com/",
False,
],
[
httpx.AsyncClient(),
[],
"https://example.com/",
False,
],
[
httpx.AsyncClient(),
[MATCH_ALL],
"https://example.com/",
True,
],
[
httpx.AsyncClient(),
["https://example.com/"],
"https://example.com/",
True,
],
[
httpx.AsyncClient(),
["https://example.com/"],
"https://example.com",
False,
],
[
httpx.AsyncClient(),
["https://example.com"],
"https://example.com",
True,
],
[
httpx.AsyncClient(),
["https://example.com", r"https?:\/\/[\w\-]+(\.[\w\-]+)+\.net"],
"https://example.net",
False,
],
[
httpx.AsyncClient(),
["https://example.com", r"https?:\/\/[\w\-]+(\.[\w\-]+)+\.net"],
"https://good.example.net",
True,
],
[
httpx.AsyncClient(),
["https://example.com", r"https?:\/\/[\w\-]+(\.[\w\-]+)+\.net"],
"https://good.example.net/some/thing",
True,
],
],
)
def test_option_trace_propagation_targets(
sentry_init,
httpx_client,
httpx_mock, # this comes from pytest-httpx
trace_propagation_targets,
url,
trace_propagated,
):
httpx_mock.add_response()
sentry_init(
release="test",
trace_propagation_targets=trace_propagation_targets,
traces_sample_rate=1.0,
integrations=[HttpxIntegration()],
)
if asyncio.iscoroutinefunction(httpx_client.get):
asyncio.get_event_loop().run_until_complete(httpx_client.get(url))
else:
httpx_client.get(url)
request_headers = httpx_mock.get_request().headers
if trace_propagated:
assert "sentry-trace" in request_headers
else:
assert "sentry-trace" not in request_headers
@pytest.mark.tests_internal_exceptions
def test_omit_url_data_if_parsing_fails(sentry_init, capture_events):
sentry_init(integrations=[HttpxIntegration()])
httpx_client = httpx.Client()
url = "http://example.com"
responses.add(responses.GET, url, status=200)
events = capture_events()
with mock.patch(
"sentry_sdk.integrations.httpx.parse_url",
side_effect=ValueError,
):
response = httpx_client.get(url)
assert response.status_code == 200
capture_message("Testing!")
(event,) = events
assert event["breadcrumbs"]["values"][0]["data"] == {
SPANDATA.HTTP_METHOD: "GET",
SPANDATA.HTTP_STATUS_CODE: 200,
"reason": "OK",
# no url related data
}
sentry-python-1.39.2/tests/integrations/huey/ 0000775 0000000 0000000 00000000000 14547447232 0021307 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/huey/__init__.py 0000664 0000000 0000000 00000000053 14547447232 0023416 0 ustar 00root root 0000000 0000000 import pytest
pytest.importorskip("huey")
sentry-python-1.39.2/tests/integrations/huey/test_huey.py 0000664 0000000 0000000 00000010763 14547447232 0023701 0 ustar 00root root 0000000 0000000 import pytest
from decimal import DivisionByZero
from sentry_sdk import start_transaction
from sentry_sdk.integrations.huey import HueyIntegration
from sentry_sdk.utils import parse_version
from huey import __version__ as HUEY_VERSION
from huey.api import MemoryHuey, Result
from huey.exceptions import RetryTask
HUEY_VERSION = parse_version(HUEY_VERSION)
@pytest.fixture
def init_huey(sentry_init):
def inner():
sentry_init(
integrations=[HueyIntegration()],
traces_sample_rate=1.0,
send_default_pii=True,
debug=True,
)
return MemoryHuey(name="sentry_sdk")
return inner
@pytest.fixture(autouse=True)
def flush_huey_tasks(init_huey):
huey = init_huey()
huey.flush()
def execute_huey_task(huey, func, *args, **kwargs):
exceptions = kwargs.pop("exceptions", None)
result = func(*args, **kwargs)
task = huey.dequeue()
if exceptions is not None:
try:
huey.execute(task)
except exceptions:
pass
else:
huey.execute(task)
return result
def test_task_result(init_huey):
huey = init_huey()
@huey.task()
def increase(num):
return num + 1
result = increase(3)
assert isinstance(result, Result)
assert len(huey) == 1
task = huey.dequeue()
assert huey.execute(task) == 4
assert result.get() == 4
@pytest.mark.parametrize("task_fails", [True, False], ids=["error", "success"])
def test_task_transaction(capture_events, init_huey, task_fails):
huey = init_huey()
@huey.task()
def division(a, b):
return a / b
events = capture_events()
execute_huey_task(
huey, division, 1, int(not task_fails), exceptions=(DivisionByZero,)
)
if task_fails:
error_event = events.pop(0)
assert error_event["exception"]["values"][0]["type"] == "ZeroDivisionError"
assert error_event["exception"]["values"][0]["mechanism"]["type"] == "huey"
(event,) = events
assert event["type"] == "transaction"
assert event["transaction"] == "division"
assert event["transaction_info"] == {"source": "task"}
if task_fails:
assert event["contexts"]["trace"]["status"] == "internal_error"
else:
assert event["contexts"]["trace"]["status"] == "ok"
assert "huey_task_id" in event["tags"]
assert "huey_task_retry" in event["tags"]
def test_task_retry(capture_events, init_huey):
huey = init_huey()
context = {"retry": True}
@huey.task()
def retry_task(context):
if context["retry"]:
context["retry"] = False
raise RetryTask()
events = capture_events()
result = execute_huey_task(huey, retry_task, context)
(event,) = events
assert event["transaction"] == "retry_task"
assert event["tags"]["huey_task_id"] == result.task.id
assert len(huey) == 1
task = huey.dequeue()
huey.execute(task)
(event, _) = events
assert event["transaction"] == "retry_task"
assert event["tags"]["huey_task_id"] == result.task.id
assert len(huey) == 0
@pytest.mark.parametrize("lock_name", ["lock.a", "lock.b"], ids=["locked", "unlocked"])
@pytest.mark.skipif(HUEY_VERSION < (2, 5), reason="is_locked was added in 2.5")
def test_task_lock(capture_events, init_huey, lock_name):
huey = init_huey()
task_lock_name = "lock.a"
should_be_locked = task_lock_name == lock_name
@huey.task()
@huey.lock_task(task_lock_name)
def maybe_locked_task():
pass
events = capture_events()
with huey.lock_task(lock_name):
assert huey.is_locked(task_lock_name) == should_be_locked
result = execute_huey_task(huey, maybe_locked_task)
(event,) = events
assert event["transaction"] == "maybe_locked_task"
assert event["tags"]["huey_task_id"] == result.task.id
assert (
event["contexts"]["trace"]["status"] == "aborted" if should_be_locked else "ok"
)
assert len(huey) == 0
def test_huey_enqueue(init_huey, capture_events):
huey = init_huey()
@huey.task(name="different_task_name")
def dummy_task():
pass
events = capture_events()
with start_transaction() as transaction:
dummy_task()
(event,) = events
assert event["contexts"]["trace"]["trace_id"] == transaction.trace_id
assert event["contexts"]["trace"]["span_id"] == transaction.span_id
assert len(event["spans"])
assert event["spans"][0]["op"] == "queue.submit.huey"
assert event["spans"][0]["description"] == "different_task_name"
sentry-python-1.39.2/tests/integrations/logging/ 0000775 0000000 0000000 00000000000 14547447232 0021763 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/logging/test_logging.py 0000664 0000000 0000000 00000015400 14547447232 0025022 0 ustar 00root root 0000000 0000000 # coding: utf-8
import sys
import pytest
import logging
import warnings
from sentry_sdk.integrations.logging import LoggingIntegration, ignore_logger
other_logger = logging.getLogger("testfoo")
logger = logging.getLogger(__name__)
@pytest.fixture(autouse=True)
def reset_level():
other_logger.setLevel(logging.DEBUG)
logger.setLevel(logging.DEBUG)
@pytest.mark.parametrize("logger", [logger, other_logger])
def test_logging_works_with_many_loggers(sentry_init, capture_events, logger):
sentry_init(integrations=[LoggingIntegration(event_level="ERROR")])
events = capture_events()
logger.info("bread")
logger.critical("LOL")
(event,) = events
assert event["level"] == "fatal"
assert not event["logentry"]["params"]
assert event["logentry"]["message"] == "LOL"
assert any(crumb["message"] == "bread" for crumb in event["breadcrumbs"]["values"])
@pytest.mark.parametrize("integrations", [None, [], [LoggingIntegration()]])
@pytest.mark.parametrize(
"kwargs", [{"exc_info": None}, {}, {"exc_info": 0}, {"exc_info": False}]
)
def test_logging_defaults(integrations, sentry_init, capture_events, kwargs):
sentry_init(integrations=integrations)
events = capture_events()
logger.info("bread")
logger.critical("LOL", **kwargs)
(event,) = events
assert event["level"] == "fatal"
assert any(crumb["message"] == "bread" for crumb in event["breadcrumbs"]["values"])
assert not any(
crumb["message"] == "LOL" for crumb in event["breadcrumbs"]["values"]
)
assert "threads" not in event
def test_logging_extra_data(sentry_init, capture_events):
sentry_init(integrations=[LoggingIntegration()], default_integrations=False)
events = capture_events()
logger.info("bread", extra=dict(foo=42))
logger.critical("lol", extra=dict(bar=69))
(event,) = events
assert event["level"] == "fatal"
assert event["extra"] == {"bar": 69}
assert any(
crumb["message"] == "bread" and crumb["data"] == {"foo": 42}
for crumb in event["breadcrumbs"]["values"]
)
def test_logging_extra_data_integer_keys(sentry_init, capture_events):
sentry_init(integrations=[LoggingIntegration()], default_integrations=False)
events = capture_events()
logger.critical("integer in extra keys", extra={1: 1})
(event,) = events
assert event["extra"] == {"1": 1}
@pytest.mark.xfail(sys.version_info[:2] == (3, 4), reason="buggy logging module")
def test_logging_stack(sentry_init, capture_events):
sentry_init(integrations=[LoggingIntegration()], default_integrations=False)
events = capture_events()
logger.error("first", exc_info=True)
logger.error("second")
(
event_with,
event_without,
) = events
assert event_with["level"] == "error"
assert event_with["threads"]["values"][0]["stacktrace"]["frames"]
assert event_without["level"] == "error"
assert "threads" not in event_without
def test_logging_level(sentry_init, capture_events):
sentry_init(integrations=[LoggingIntegration()], default_integrations=False)
events = capture_events()
logger.setLevel(logging.WARNING)
logger.error("hi")
(event,) = events
assert event["level"] == "error"
assert event["logentry"]["message"] == "hi"
del events[:]
logger.setLevel(logging.ERROR)
logger.warning("hi")
assert not events
def test_custom_log_level_names(sentry_init, capture_events):
levels = {
logging.DEBUG: "debug",
logging.INFO: "info",
logging.WARN: "warning",
logging.WARNING: "warning",
logging.ERROR: "error",
logging.CRITICAL: "fatal",
logging.FATAL: "fatal",
}
# set custom log level names
# fmt: off
logging.addLevelName(logging.DEBUG, u"custom level debüg: ")
# fmt: on
logging.addLevelName(logging.INFO, "")
logging.addLevelName(logging.WARN, "custom level warn: ")
logging.addLevelName(logging.WARNING, "custom level warning: ")
logging.addLevelName(logging.ERROR, None)
logging.addLevelName(logging.CRITICAL, "custom level critical: ")
logging.addLevelName(logging.FATAL, "custom level 🔥: ")
for logging_level, sentry_level in levels.items():
logger.setLevel(logging_level)
sentry_init(
integrations=[LoggingIntegration(event_level=logging_level)],
default_integrations=False,
)
events = capture_events()
logger.log(logging_level, "Trying level %s", logging_level)
assert events
assert events[0]["level"] == sentry_level
assert events[0]["logentry"]["message"] == "Trying level %s"
assert events[0]["logentry"]["params"] == [logging_level]
del events[:]
def test_logging_filters(sentry_init, capture_events):
sentry_init(integrations=[LoggingIntegration()], default_integrations=False)
events = capture_events()
should_log = False
class MyFilter(logging.Filter):
def filter(self, record):
return should_log
logger.addFilter(MyFilter())
logger.error("hi")
assert not events
should_log = True
logger.error("hi")
(event,) = events
assert event["logentry"]["message"] == "hi"
def test_logging_captured_warnings(sentry_init, capture_events, recwarn):
sentry_init(
integrations=[LoggingIntegration(event_level="WARNING")],
default_integrations=False,
)
events = capture_events()
logging.captureWarnings(True)
warnings.warn("first", stacklevel=2)
warnings.warn("second", stacklevel=2)
logging.captureWarnings(False)
warnings.warn("third", stacklevel=2)
assert len(events) == 2
assert events[0]["level"] == "warning"
# Captured warnings start with the path where the warning was raised
assert "UserWarning: first" in events[0]["logentry"]["message"]
assert events[0]["logentry"]["params"] == []
assert events[1]["level"] == "warning"
assert "UserWarning: second" in events[1]["logentry"]["message"]
assert events[1]["logentry"]["params"] == []
# Using recwarn suppresses the "third" warning in the test output
assert len(recwarn) == 1
assert str(recwarn[0].message) == "third"
def test_ignore_logger(sentry_init, capture_events):
sentry_init(integrations=[LoggingIntegration()], default_integrations=False)
events = capture_events()
ignore_logger("testfoo")
other_logger.error("hi")
assert not events
def test_ignore_logger_wildcard(sentry_init, capture_events):
sentry_init(integrations=[LoggingIntegration()], default_integrations=False)
events = capture_events()
ignore_logger("testfoo.*")
nested_logger = logging.getLogger("testfoo.submodule")
logger.error("hi")
nested_logger.error("bye")
(event,) = events
assert event["logentry"]["message"] == "hi"
sentry-python-1.39.2/tests/integrations/loguru/ 0000775 0000000 0000000 00000000000 14547447232 0021652 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/loguru/__init__.py 0000664 0000000 0000000 00000000055 14547447232 0023763 0 ustar 00root root 0000000 0000000 import pytest
pytest.importorskip("loguru")
sentry-python-1.39.2/tests/integrations/loguru/test_loguru.py 0000664 0000000 0000000 00000006302 14547447232 0024601 0 ustar 00root root 0000000 0000000 import pytest
from loguru import logger
import sentry_sdk
from sentry_sdk.integrations.loguru import LoguruIntegration, LoggingLevels
logger.remove(0) # don't print to console
@pytest.mark.parametrize(
"level,created_event",
[
# None - no breadcrumb
# False - no event
# True - event created
(LoggingLevels.TRACE, None),
(LoggingLevels.DEBUG, None),
(LoggingLevels.INFO, False),
(LoggingLevels.SUCCESS, False),
(LoggingLevels.WARNING, False),
(LoggingLevels.ERROR, True),
(LoggingLevels.CRITICAL, True),
],
)
@pytest.mark.parametrize("disable_breadcrumbs", [True, False])
@pytest.mark.parametrize("disable_events", [True, False])
def test_just_log(
sentry_init,
capture_events,
level,
created_event,
disable_breadcrumbs,
disable_events,
):
sentry_init(
integrations=[
LoguruIntegration(
level=None if disable_breadcrumbs else LoggingLevels.INFO.value,
event_level=None if disable_events else LoggingLevels.ERROR.value,
)
],
default_integrations=False,
)
events = capture_events()
getattr(logger, level.name.lower())("test")
formatted_message = (
" | "
+ "{:9}".format(level.name.upper())
+ "| tests.integrations.loguru.test_loguru:test_just_log:46 - test"
)
if not created_event:
assert not events
breadcrumbs = sentry_sdk.Hub.current.scope._breadcrumbs
if (
not disable_breadcrumbs and created_event is not None
): # not None == not TRACE or DEBUG level
(breadcrumb,) = breadcrumbs
assert breadcrumb["level"] == level.name.lower()
assert breadcrumb["category"] == "tests.integrations.loguru.test_loguru"
assert breadcrumb["message"][23:] == formatted_message
else:
assert not breadcrumbs
return
if disable_events:
assert not events
return
(event,) = events
assert event["level"] == (level.name.lower())
assert event["logger"] == "tests.integrations.loguru.test_loguru"
assert event["logentry"]["message"][23:] == formatted_message
def test_breadcrumb_format(sentry_init, capture_events):
sentry_init(
integrations=[
LoguruIntegration(
level=LoggingLevels.INFO.value,
event_level=None,
breadcrumb_format="{message}",
)
],
default_integrations=False,
)
logger.info("test")
formatted_message = "test"
breadcrumbs = sentry_sdk.Hub.current.scope._breadcrumbs
(breadcrumb,) = breadcrumbs
assert breadcrumb["message"] == formatted_message
def test_event_format(sentry_init, capture_events):
sentry_init(
integrations=[
LoguruIntegration(
level=None,
event_level=LoggingLevels.ERROR.value,
event_format="{message}",
)
],
default_integrations=False,
)
events = capture_events()
logger.error("test")
formatted_message = "test"
(event,) = events
assert event["logentry"]["message"] == formatted_message
sentry-python-1.39.2/tests/integrations/modules/ 0000775 0000000 0000000 00000000000 14547447232 0022005 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/modules/test_modules.py 0000664 0000000 0000000 00000000557 14547447232 0025075 0 ustar 00root root 0000000 0000000 import sentry_sdk
from sentry_sdk.integrations.modules import ModulesIntegration
def test_basic(sentry_init, capture_events):
sentry_init(integrations=[ModulesIntegration()])
events = capture_events()
sentry_sdk.capture_exception(ValueError())
(event,) = events
assert "sentry-sdk" in event["modules"]
assert "pytest" in event["modules"]
sentry-python-1.39.2/tests/integrations/opentelemetry/ 0000775 0000000 0000000 00000000000 14547447232 0023231 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/opentelemetry/__init__.py 0000664 0000000 0000000 00000000064 14547447232 0025342 0 ustar 00root root 0000000 0000000 import pytest
pytest.importorskip("opentelemetry")
sentry-python-1.39.2/tests/integrations/opentelemetry/test_experimental.py 0000664 0000000 0000000 00000001741 14547447232 0027342 0 ustar 00root root 0000000 0000000 try:
# python 3.3 and above
from unittest.mock import MagicMock
except ImportError:
# python < 3.3
from mock import MagicMock
from sentry_sdk.integrations.opentelemetry.integration import OpenTelemetryIntegration
def test_integration_enabled_if_option_is_on(sentry_init):
OpenTelemetryIntegration.setup_once = MagicMock()
sentry_init(
_experiments={
"otel_powered_performance": True,
}
)
OpenTelemetryIntegration.setup_once.assert_called_once()
def test_integration_not_enabled_if_option_is_off(sentry_init):
OpenTelemetryIntegration.setup_once = MagicMock()
sentry_init(
_experiments={
"otel_powered_performance": False,
}
)
OpenTelemetryIntegration.setup_once.assert_not_called()
def test_integration_not_enabled_if_option_is_missing(sentry_init):
OpenTelemetryIntegration.setup_once = MagicMock()
sentry_init()
OpenTelemetryIntegration.setup_once.assert_not_called()
sentry-python-1.39.2/tests/integrations/opentelemetry/test_propagator.py 0000664 0000000 0000000 00000017417 14547447232 0027032 0 ustar 00root root 0000000 0000000 try:
from unittest import mock # python 3.3 and above
from unittest.mock import MagicMock
except ImportError:
import mock # python < 3.3
from mock import MagicMock
from opentelemetry.context import get_current
from opentelemetry.trace.propagation import get_current_span
from opentelemetry.trace import (
set_span_in_context,
TraceFlags,
SpanContext,
)
from sentry_sdk.integrations.opentelemetry.consts import (
SENTRY_BAGGAGE_KEY,
SENTRY_TRACE_KEY,
)
from sentry_sdk.integrations.opentelemetry.propagator import SentryPropagator
from sentry_sdk.integrations.opentelemetry.span_processor import SentrySpanProcessor
from sentry_sdk.tracing_utils import Baggage
def test_extract_no_context_no_sentry_trace_header():
"""
No context and NO Sentry trace data in getter.
Extract should return empty context.
"""
carrier = None
context = None
getter = MagicMock()
getter.get.return_value = None
modified_context = SentryPropagator().extract(carrier, context, getter)
assert modified_context == {}
def test_extract_context_no_sentry_trace_header():
"""
Context but NO Sentry trace data in getter.
Extract should return context as is.
"""
carrier = None
context = {"some": "value"}
getter = MagicMock()
getter.get.return_value = None
modified_context = SentryPropagator().extract(carrier, context, getter)
assert modified_context == context
def test_extract_empty_context_sentry_trace_header_no_baggage():
"""
Empty context but Sentry trace data but NO Baggage in getter.
Extract should return context that has empty baggage in it and also a NoopSpan with span_id and trace_id.
"""
carrier = None
context = {}
getter = MagicMock()
getter.get.side_effect = [
["1234567890abcdef1234567890abcdef-1234567890abcdef-1"],
None,
]
modified_context = SentryPropagator().extract(carrier, context, getter)
assert len(modified_context.keys()) == 3
assert modified_context[SENTRY_TRACE_KEY] == {
"trace_id": "1234567890abcdef1234567890abcdef",
"parent_span_id": "1234567890abcdef",
"parent_sampled": True,
}
assert modified_context[SENTRY_BAGGAGE_KEY].serialize() == ""
span_context = get_current_span(modified_context).get_span_context()
assert span_context.span_id == int("1234567890abcdef", 16)
assert span_context.trace_id == int("1234567890abcdef1234567890abcdef", 16)
def test_extract_context_sentry_trace_header_baggage():
"""
Empty context but Sentry trace data and Baggage in getter.
Extract should return context that has baggage in it and also a NoopSpan with span_id and trace_id.
"""
baggage_header = (
"other-vendor-value-1=foo;bar;baz, sentry-trace_id=771a43a4192642f0b136d5159a501700, "
"sentry-public_key=49d0f7386ad645858ae85020e393bef3, sentry-sample_rate=0.01337, "
"sentry-user_id=Am%C3%A9lie, other-vendor-value-2=foo;bar;"
)
carrier = None
context = {"some": "value"}
getter = MagicMock()
getter.get.side_effect = [
["1234567890abcdef1234567890abcdef-1234567890abcdef-1"],
[baggage_header],
]
modified_context = SentryPropagator().extract(carrier, context, getter)
assert len(modified_context.keys()) == 4
assert modified_context[SENTRY_TRACE_KEY] == {
"trace_id": "1234567890abcdef1234567890abcdef",
"parent_span_id": "1234567890abcdef",
"parent_sampled": True,
}
assert modified_context[SENTRY_BAGGAGE_KEY].serialize() == (
"sentry-trace_id=771a43a4192642f0b136d5159a501700,"
"sentry-public_key=49d0f7386ad645858ae85020e393bef3,"
"sentry-sample_rate=0.01337,sentry-user_id=Am%C3%A9lie"
)
span_context = get_current_span(modified_context).get_span_context()
assert span_context.span_id == int("1234567890abcdef", 16)
assert span_context.trace_id == int("1234567890abcdef1234567890abcdef", 16)
def test_inject_empty_otel_span_map():
"""
Empty otel_span_map.
So there is no sentry_span to be found in inject()
and the function is returned early and no setters are called.
"""
carrier = None
context = get_current()
setter = MagicMock()
setter.set = MagicMock()
span_context = SpanContext(
trace_id=int("1234567890abcdef1234567890abcdef", 16),
span_id=int("1234567890abcdef", 16),
trace_flags=TraceFlags(TraceFlags.SAMPLED),
is_remote=True,
)
span = MagicMock()
span.get_span_context.return_value = span_context
with mock.patch(
"sentry_sdk.integrations.opentelemetry.propagator.trace.get_current_span",
return_value=span,
):
full_context = set_span_in_context(span, context)
SentryPropagator().inject(carrier, full_context, setter)
setter.set.assert_not_called()
def test_inject_sentry_span_no_baggage():
"""
Inject a sentry span with no baggage.
"""
carrier = None
context = get_current()
setter = MagicMock()
setter.set = MagicMock()
trace_id = "1234567890abcdef1234567890abcdef"
span_id = "1234567890abcdef"
span_context = SpanContext(
trace_id=int(trace_id, 16),
span_id=int(span_id, 16),
trace_flags=TraceFlags(TraceFlags.SAMPLED),
is_remote=True,
)
span = MagicMock()
span.get_span_context.return_value = span_context
sentry_span = MagicMock()
sentry_span.to_traceparent = mock.Mock(
return_value="1234567890abcdef1234567890abcdef-1234567890abcdef-1"
)
sentry_span.containing_transaction.get_baggage = mock.Mock(return_value=None)
span_processor = SentrySpanProcessor()
span_processor.otel_span_map[span_id] = sentry_span
with mock.patch(
"sentry_sdk.integrations.opentelemetry.propagator.trace.get_current_span",
return_value=span,
):
full_context = set_span_in_context(span, context)
SentryPropagator().inject(carrier, full_context, setter)
setter.set.assert_called_once_with(
carrier,
"sentry-trace",
"1234567890abcdef1234567890abcdef-1234567890abcdef-1",
)
def test_inject_sentry_span_baggage():
"""
Inject a sentry span with baggage.
"""
carrier = None
context = get_current()
setter = MagicMock()
setter.set = MagicMock()
trace_id = "1234567890abcdef1234567890abcdef"
span_id = "1234567890abcdef"
span_context = SpanContext(
trace_id=int(trace_id, 16),
span_id=int(span_id, 16),
trace_flags=TraceFlags(TraceFlags.SAMPLED),
is_remote=True,
)
span = MagicMock()
span.get_span_context.return_value = span_context
sentry_span = MagicMock()
sentry_span.to_traceparent = mock.Mock(
return_value="1234567890abcdef1234567890abcdef-1234567890abcdef-1"
)
sentry_items = {
"sentry-trace_id": "771a43a4192642f0b136d5159a501700",
"sentry-public_key": "49d0f7386ad645858ae85020e393bef3",
"sentry-sample_rate": 0.01337,
"sentry-user_id": "Amélie",
}
baggage = Baggage(sentry_items=sentry_items)
sentry_span.containing_transaction.get_baggage = MagicMock(return_value=baggage)
span_processor = SentrySpanProcessor()
span_processor.otel_span_map[span_id] = sentry_span
with mock.patch(
"sentry_sdk.integrations.opentelemetry.propagator.trace.get_current_span",
return_value=span,
):
full_context = set_span_in_context(span, context)
SentryPropagator().inject(carrier, full_context, setter)
setter.set.assert_any_call(
carrier,
"sentry-trace",
"1234567890abcdef1234567890abcdef-1234567890abcdef-1",
)
setter.set.assert_any_call(
carrier,
"baggage",
baggage.serialize(),
)
sentry-python-1.39.2/tests/integrations/opentelemetry/test_span_processor.py 0000664 0000000 0000000 00000043243 14547447232 0027710 0 ustar 00root root 0000000 0000000 from datetime import datetime
from datetime import timezone
import time
import pytest
try:
from unittest import mock # python 3.3 and above
from unittest.mock import MagicMock
except ImportError:
import mock
from mock import MagicMock # python < 3.3
from sentry_sdk.integrations.opentelemetry.span_processor import (
SentrySpanProcessor,
link_trace_context_to_error_event,
)
from sentry_sdk.tracing import Span, Transaction
from opentelemetry.trace import SpanKind, SpanContext, Status, StatusCode
from sentry_sdk.tracing_utils import extract_sentrytrace_data
def test_is_sentry_span():
otel_span = MagicMock()
hub = MagicMock()
hub.client = None
span_processor = SentrySpanProcessor()
assert not span_processor._is_sentry_span(hub, otel_span)
client = MagicMock()
client.options = {"instrumenter": "otel"}
client.dsn = "https://1234567890abcdef@o123456.ingest.sentry.io/123456"
hub.client = client
assert not span_processor._is_sentry_span(hub, otel_span)
otel_span.attributes = {
"http.url": "https://example.com",
}
assert not span_processor._is_sentry_span(hub, otel_span)
otel_span.attributes = {
"http.url": "https://o123456.ingest.sentry.io/api/123/envelope",
}
assert span_processor._is_sentry_span(hub, otel_span)
def test_get_otel_context():
otel_span = MagicMock()
otel_span.attributes = {"foo": "bar"}
otel_span.resource = MagicMock()
otel_span.resource.attributes = {"baz": "qux"}
span_processor = SentrySpanProcessor()
otel_context = span_processor._get_otel_context(otel_span)
assert otel_context == {
"attributes": {"foo": "bar"},
"resource": {"baz": "qux"},
}
def test_get_trace_data_with_span_and_trace():
otel_span = MagicMock()
span_context = SpanContext(
trace_id=int("1234567890abcdef1234567890abcdef", 16),
span_id=int("1234567890abcdef", 16),
is_remote=True,
)
otel_span.get_span_context.return_value = span_context
otel_span.parent = None
parent_context = {}
span_processor = SentrySpanProcessor()
sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context)
assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef"
assert sentry_trace_data["span_id"] == "1234567890abcdef"
assert sentry_trace_data["parent_span_id"] is None
assert sentry_trace_data["parent_sampled"] is None
assert sentry_trace_data["baggage"] is None
def test_get_trace_data_with_span_and_trace_and_parent():
otel_span = MagicMock()
span_context = SpanContext(
trace_id=int("1234567890abcdef1234567890abcdef", 16),
span_id=int("1234567890abcdef", 16),
is_remote=True,
)
otel_span.get_span_context.return_value = span_context
otel_span.parent = MagicMock()
otel_span.parent.span_id = int("abcdef1234567890", 16)
parent_context = {}
span_processor = SentrySpanProcessor()
sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context)
assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef"
assert sentry_trace_data["span_id"] == "1234567890abcdef"
assert sentry_trace_data["parent_span_id"] == "abcdef1234567890"
assert sentry_trace_data["parent_sampled"] is None
assert sentry_trace_data["baggage"] is None
def test_get_trace_data_with_sentry_trace():
otel_span = MagicMock()
span_context = SpanContext(
trace_id=int("1234567890abcdef1234567890abcdef", 16),
span_id=int("1234567890abcdef", 16),
is_remote=True,
)
otel_span.get_span_context.return_value = span_context
otel_span.parent = MagicMock()
otel_span.parent.span_id = int("abcdef1234567890", 16)
parent_context = {}
with mock.patch(
"sentry_sdk.integrations.opentelemetry.span_processor.get_value",
side_effect=[
extract_sentrytrace_data(
"1234567890abcdef1234567890abcdef-1234567890abcdef-1"
),
None,
],
):
span_processor = SentrySpanProcessor()
sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context)
assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef"
assert sentry_trace_data["span_id"] == "1234567890abcdef"
assert sentry_trace_data["parent_span_id"] == "abcdef1234567890"
assert sentry_trace_data["parent_sampled"] is True
assert sentry_trace_data["baggage"] is None
with mock.patch(
"sentry_sdk.integrations.opentelemetry.span_processor.get_value",
side_effect=[
extract_sentrytrace_data(
"1234567890abcdef1234567890abcdef-1234567890abcdef-0"
),
None,
],
):
span_processor = SentrySpanProcessor()
sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context)
assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef"
assert sentry_trace_data["span_id"] == "1234567890abcdef"
assert sentry_trace_data["parent_span_id"] == "abcdef1234567890"
assert sentry_trace_data["parent_sampled"] is False
assert sentry_trace_data["baggage"] is None
def test_get_trace_data_with_sentry_trace_and_baggage():
otel_span = MagicMock()
span_context = SpanContext(
trace_id=int("1234567890abcdef1234567890abcdef", 16),
span_id=int("1234567890abcdef", 16),
is_remote=True,
)
otel_span.get_span_context.return_value = span_context
otel_span.parent = MagicMock()
otel_span.parent.span_id = int("abcdef1234567890", 16)
parent_context = {}
baggage = (
"sentry-trace_id=771a43a4192642f0b136d5159a501700,"
"sentry-public_key=49d0f7386ad645858ae85020e393bef3,"
"sentry-sample_rate=0.01337,sentry-user_id=Am%C3%A9lie"
)
with mock.patch(
"sentry_sdk.integrations.opentelemetry.span_processor.get_value",
side_effect=[
extract_sentrytrace_data(
"1234567890abcdef1234567890abcdef-1234567890abcdef-1"
),
baggage,
],
):
span_processor = SentrySpanProcessor()
sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context)
assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef"
assert sentry_trace_data["span_id"] == "1234567890abcdef"
assert sentry_trace_data["parent_span_id"] == "abcdef1234567890"
assert sentry_trace_data["parent_sampled"]
assert sentry_trace_data["baggage"] == baggage
def test_update_span_with_otel_data_http_method():
sentry_span = Span()
otel_span = MagicMock()
otel_span.name = "Test OTel Span"
otel_span.kind = SpanKind.CLIENT
otel_span.attributes = {
"http.method": "GET",
"http.status_code": 429,
"http.status_text": "xxx",
"http.user_agent": "curl/7.64.1",
"net.peer.name": "example.com",
"http.target": "/",
}
span_processor = SentrySpanProcessor()
span_processor._update_span_with_otel_data(sentry_span, otel_span)
assert sentry_span.op == "http.client"
assert sentry_span.description == "GET example.com /"
assert sentry_span.status == "resource_exhausted"
assert sentry_span._data["http.method"] == "GET"
assert sentry_span._data["http.response.status_code"] == 429
assert sentry_span._data["http.status_text"] == "xxx"
assert sentry_span._data["http.user_agent"] == "curl/7.64.1"
assert sentry_span._data["net.peer.name"] == "example.com"
assert sentry_span._data["http.target"] == "/"
@pytest.mark.parametrize(
"otel_status, expected_status",
[
pytest.param(Status(StatusCode.UNSET), None, id="unset"),
pytest.param(Status(StatusCode.OK), "ok", id="ok"),
pytest.param(Status(StatusCode.ERROR), "internal_error", id="error"),
],
)
def test_update_span_with_otel_status(otel_status, expected_status):
sentry_span = Span()
otel_span = MagicMock()
otel_span.name = "Test OTel Span"
otel_span.kind = SpanKind.INTERNAL
otel_span.status = otel_status
span_processor = SentrySpanProcessor()
span_processor._update_span_with_otel_status(sentry_span, otel_span)
assert sentry_span.get_trace_context().get("status") == expected_status
def test_update_span_with_otel_data_http_method2():
sentry_span = Span()
otel_span = MagicMock()
otel_span.name = "Test OTel Span"
otel_span.kind = SpanKind.SERVER
otel_span.attributes = {
"http.method": "GET",
"http.status_code": 429,
"http.status_text": "xxx",
"http.user_agent": "curl/7.64.1",
"http.url": "https://example.com/status/403?password=123&username=test@example.com&author=User123&auth=1234567890abcdef",
}
span_processor = SentrySpanProcessor()
span_processor._update_span_with_otel_data(sentry_span, otel_span)
assert sentry_span.op == "http.server"
assert sentry_span.description == "GET https://example.com/status/403"
assert sentry_span.status == "resource_exhausted"
assert sentry_span._data["http.method"] == "GET"
assert sentry_span._data["http.response.status_code"] == 429
assert sentry_span._data["http.status_text"] == "xxx"
assert sentry_span._data["http.user_agent"] == "curl/7.64.1"
assert (
sentry_span._data["http.url"]
== "https://example.com/status/403?password=123&username=test@example.com&author=User123&auth=1234567890abcdef"
)
def test_update_span_with_otel_data_db_query():
sentry_span = Span()
otel_span = MagicMock()
otel_span.name = "Test OTel Span"
otel_span.attributes = {
"db.system": "postgresql",
"db.statement": "SELECT * FROM table where pwd = '123456'",
}
span_processor = SentrySpanProcessor()
span_processor._update_span_with_otel_data(sentry_span, otel_span)
assert sentry_span.op == "db"
assert sentry_span.description == "SELECT * FROM table where pwd = '123456'"
assert sentry_span._data["db.system"] == "postgresql"
assert (
sentry_span._data["db.statement"] == "SELECT * FROM table where pwd = '123456'"
)
def test_on_start_transaction():
otel_span = MagicMock()
otel_span.name = "Sample OTel Span"
otel_span.start_time = time.time_ns()
span_context = SpanContext(
trace_id=int("1234567890abcdef1234567890abcdef", 16),
span_id=int("1234567890abcdef", 16),
is_remote=True,
)
otel_span.get_span_context.return_value = span_context
otel_span.parent = MagicMock()
otel_span.parent.span_id = int("abcdef1234567890", 16)
parent_context = {}
fake_client = MagicMock()
fake_client.options = {"instrumenter": "otel"}
fake_client.dsn = "https://1234567890abcdef@o123456.ingest.sentry.io/123456"
current_hub = MagicMock()
current_hub.client = fake_client
fake_hub = MagicMock()
fake_hub.current = current_hub
with mock.patch(
"sentry_sdk.integrations.opentelemetry.span_processor.Hub", fake_hub
):
span_processor = SentrySpanProcessor()
span_processor.on_start(otel_span, parent_context)
fake_hub.current.start_transaction.assert_called_once_with(
name="Sample OTel Span",
span_id="1234567890abcdef",
parent_span_id="abcdef1234567890",
trace_id="1234567890abcdef1234567890abcdef",
baggage=None,
start_timestamp=datetime.fromtimestamp(
otel_span.start_time / 1e9, timezone.utc
),
instrumenter="otel",
)
assert len(span_processor.otel_span_map.keys()) == 1
assert list(span_processor.otel_span_map.keys())[0] == "1234567890abcdef"
def test_on_start_child():
otel_span = MagicMock()
otel_span.name = "Sample OTel Span"
otel_span.start_time = time.time_ns()
span_context = SpanContext(
trace_id=int("1234567890abcdef1234567890abcdef", 16),
span_id=int("1234567890abcdef", 16),
is_remote=True,
)
otel_span.get_span_context.return_value = span_context
otel_span.parent = MagicMock()
otel_span.parent.span_id = int("abcdef1234567890", 16)
parent_context = {}
fake_client = MagicMock()
fake_client.options = {"instrumenter": "otel"}
fake_client.dsn = "https://1234567890abcdef@o123456.ingest.sentry.io/123456"
current_hub = MagicMock()
current_hub.client = fake_client
fake_hub = MagicMock()
fake_hub.current = current_hub
with mock.patch(
"sentry_sdk.integrations.opentelemetry.span_processor.Hub", fake_hub
):
fake_span = MagicMock()
span_processor = SentrySpanProcessor()
span_processor.otel_span_map["abcdef1234567890"] = fake_span
span_processor.on_start(otel_span, parent_context)
fake_span.start_child.assert_called_once_with(
span_id="1234567890abcdef",
description="Sample OTel Span",
start_timestamp=datetime.fromtimestamp(
otel_span.start_time / 1e9, timezone.utc
),
instrumenter="otel",
)
assert len(span_processor.otel_span_map.keys()) == 2
assert "abcdef1234567890" in span_processor.otel_span_map.keys()
assert "1234567890abcdef" in span_processor.otel_span_map.keys()
def test_on_end_no_sentry_span():
"""
If on_end is called on a span that is not in the otel_span_map, it should be a no-op.
"""
otel_span = MagicMock()
otel_span.name = "Sample OTel Span"
otel_span.end_time = time.time_ns()
span_context = SpanContext(
trace_id=int("1234567890abcdef1234567890abcdef", 16),
span_id=int("1234567890abcdef", 16),
is_remote=True,
)
otel_span.get_span_context.return_value = span_context
span_processor = SentrySpanProcessor()
span_processor.otel_span_map = {}
span_processor._get_otel_context = MagicMock()
span_processor._update_span_with_otel_data = MagicMock()
span_processor.on_end(otel_span)
span_processor._get_otel_context.assert_not_called()
span_processor._update_span_with_otel_data.assert_not_called()
def test_on_end_sentry_transaction():
"""
Test on_end for a sentry Transaction.
"""
otel_span = MagicMock()
otel_span.name = "Sample OTel Span"
otel_span.end_time = time.time_ns()
otel_span.status = Status(StatusCode.OK)
span_context = SpanContext(
trace_id=int("1234567890abcdef1234567890abcdef", 16),
span_id=int("1234567890abcdef", 16),
is_remote=True,
)
otel_span.get_span_context.return_value = span_context
fake_sentry_span = MagicMock(spec=Transaction)
fake_sentry_span.set_context = MagicMock()
fake_sentry_span.finish = MagicMock()
span_processor = SentrySpanProcessor()
span_processor._get_otel_context = MagicMock()
span_processor._update_span_with_otel_data = MagicMock()
span_processor.otel_span_map["1234567890abcdef"] = fake_sentry_span
span_processor.on_end(otel_span)
fake_sentry_span.set_context.assert_called_once()
span_processor._update_span_with_otel_data.assert_not_called()
fake_sentry_span.set_status.assert_called_once_with("ok")
fake_sentry_span.finish.assert_called_once()
def test_on_end_sentry_span():
"""
Test on_end for a sentry Span.
"""
otel_span = MagicMock()
otel_span.name = "Sample OTel Span"
otel_span.end_time = time.time_ns()
otel_span.status = Status(StatusCode.OK)
span_context = SpanContext(
trace_id=int("1234567890abcdef1234567890abcdef", 16),
span_id=int("1234567890abcdef", 16),
is_remote=True,
)
otel_span.get_span_context.return_value = span_context
fake_sentry_span = MagicMock(spec=Span)
fake_sentry_span.set_context = MagicMock()
fake_sentry_span.finish = MagicMock()
span_processor = SentrySpanProcessor()
span_processor._get_otel_context = MagicMock()
span_processor._update_span_with_otel_data = MagicMock()
span_processor.otel_span_map["1234567890abcdef"] = fake_sentry_span
span_processor.on_end(otel_span)
fake_sentry_span.set_context.assert_not_called()
span_processor._update_span_with_otel_data.assert_called_once_with(
fake_sentry_span, otel_span
)
fake_sentry_span.set_status.assert_called_once_with("ok")
fake_sentry_span.finish.assert_called_once()
def test_link_trace_context_to_error_event():
"""
Test that the trace context is added to the error event.
"""
fake_client = MagicMock()
fake_client.options = {"instrumenter": "otel"}
current_hub = MagicMock()
current_hub.client = fake_client
fake_hub = MagicMock()
fake_hub.current = current_hub
span_id = "1234567890abcdef"
trace_id = "1234567890abcdef1234567890abcdef"
fake_trace_context = {
"bla": "blub",
"foo": "bar",
"baz": 123,
}
sentry_span = MagicMock()
sentry_span.get_trace_context = MagicMock(return_value=fake_trace_context)
otel_span_map = {
span_id: sentry_span,
}
span_context = SpanContext(
trace_id=int(trace_id, 16),
span_id=int(span_id, 16),
is_remote=True,
)
otel_span = MagicMock()
otel_span.get_span_context = MagicMock(return_value=span_context)
fake_event = {"event_id": "1234567890abcdef1234567890abcdef"}
with mock.patch(
"sentry_sdk.integrations.opentelemetry.span_processor.get_current_span",
return_value=otel_span,
):
event = link_trace_context_to_error_event(fake_event, otel_span_map)
assert event
assert event == fake_event # the event is changed in place inside the function
assert "contexts" in event
assert "trace" in event["contexts"]
assert event["contexts"]["trace"] == fake_trace_context
sentry-python-1.39.2/tests/integrations/pure_eval/ 0000775 0000000 0000000 00000000000 14547447232 0022317 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/pure_eval/__init__.py 0000664 0000000 0000000 00000000060 14547447232 0024424 0 ustar 00root root 0000000 0000000 import pytest
pytest.importorskip("pure_eval")
sentry-python-1.39.2/tests/integrations/pure_eval/test_pure_eval.py 0000664 0000000 0000000 00000004615 14547447232 0025720 0 ustar 00root root 0000000 0000000 import sys
from types import SimpleNamespace
import pytest
from sentry_sdk import capture_exception, serializer
from sentry_sdk.integrations.pure_eval import PureEvalIntegration
@pytest.mark.parametrize("integrations", [[], [PureEvalIntegration()]])
def test_include_local_variables_enabled(sentry_init, capture_events, integrations):
sentry_init(include_local_variables=True, integrations=integrations)
events = capture_events()
def foo():
namespace = SimpleNamespace()
q = 1
w = 2
e = 3
r = 4
t = 5
y = 6
u = 7
i = 8
o = 9
p = 10
a = 11
s = 12
str((q, w, e, r, t, y, u, i, o, p, a, s)) # use variables for linter
namespace.d = {1: 2}
print(namespace.d[1] / 0)
# Appearances of variables after the main statement don't affect order
print(q)
print(s)
print(events)
try:
foo()
except Exception:
capture_exception()
(event,) = events
assert all(
frame["vars"]
for frame in event["exception"]["values"][0]["stacktrace"]["frames"]
)
frame_vars = event["exception"]["values"][0]["stacktrace"]["frames"][-1]["vars"]
if integrations:
# Values closest to the exception line appear first
# Test this order if possible given the Python version and dict order
expected_keys = [
"namespace",
"namespace.d",
"namespace.d[1]",
"s",
"a",
"p",
"o",
"i",
"u",
"y",
]
if sys.version_info[:2] == (3, 5):
assert frame_vars.keys() == set(expected_keys)
else:
assert list(frame_vars.keys()) == expected_keys
assert frame_vars["namespace.d"] == {"1": "2"}
assert frame_vars["namespace.d[1]"] == "2"
else:
# Without pure_eval, the variables are unpredictable.
# In later versions, those at the top appear first and are thus included
assert frame_vars.keys() <= {
"namespace",
"q",
"w",
"e",
"r",
"t",
"y",
"u",
"i",
"o",
"p",
"a",
"s",
"events",
}
assert len(frame_vars) == serializer.MAX_DATABAG_BREADTH
sentry-python-1.39.2/tests/integrations/pymongo/ 0000775 0000000 0000000 00000000000 14547447232 0022025 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/pymongo/__init__.py 0000664 0000000 0000000 00000000056 14547447232 0024137 0 ustar 00root root 0000000 0000000 import pytest
pytest.importorskip("pymongo")
sentry-python-1.39.2/tests/integrations/pymongo/test_pymongo.py 0000664 0000000 0000000 00000034131 14547447232 0025130 0 ustar 00root root 0000000 0000000 from sentry_sdk import capture_message, start_transaction
from sentry_sdk.consts import SPANDATA
from sentry_sdk.integrations.pymongo import PyMongoIntegration, _strip_pii
from mockupdb import MockupDB, OpQuery
from pymongo import MongoClient
import pytest
@pytest.fixture(scope="session")
def mongo_server():
server = MockupDB(verbose=True)
server.autoresponds("ismaster", maxWireVersion=6)
server.run()
server.autoresponds(
{"find": "test_collection"}, cursor={"id": 123, "firstBatch": []}
)
# Find query changed somewhere between PyMongo 3.1 and 3.12.
# This line is to respond to "find" queries sent by old PyMongo the same way it's done above.
server.autoresponds(OpQuery({"foobar": 1}), cursor={"id": 123, "firstBatch": []})
server.autoresponds({"insert": "test_collection"}, ok=1)
server.autoresponds({"insert": "erroneous"}, ok=0, errmsg="test error")
yield server
server.stop()
@pytest.mark.parametrize("with_pii", [False, True])
def test_transactions(sentry_init, capture_events, mongo_server, with_pii):
sentry_init(
integrations=[PyMongoIntegration()],
traces_sample_rate=1.0,
send_default_pii=with_pii,
)
events = capture_events()
connection = MongoClient(mongo_server.uri)
with start_transaction():
list(
connection["test_db"]["test_collection"].find({"foobar": 1})
) # force query execution
connection["test_db"]["test_collection"].insert_one({"foo": 2})
try:
connection["test_db"]["erroneous"].insert_many([{"bar": 3}, {"baz": 4}])
pytest.fail("Request should raise")
except Exception:
pass
(event,) = events
(find, insert_success, insert_fail) = event["spans"]
common_tags = {
"db.name": "test_db",
"db.system": "mongodb",
"net.peer.name": mongo_server.host,
"net.peer.port": str(mongo_server.port),
}
for span in find, insert_success, insert_fail:
assert span["data"][SPANDATA.DB_SYSTEM] == "mongodb"
assert span["data"][SPANDATA.DB_NAME] == "test_db"
assert span["data"][SPANDATA.SERVER_ADDRESS] == "localhost"
assert span["data"][SPANDATA.SERVER_PORT] == mongo_server.port
for field, value in common_tags.items():
assert span["tags"][field] == value
assert find["op"] == "db.query"
assert insert_success["op"] == "db.query"
assert insert_fail["op"] == "db.query"
assert find["tags"]["db.operation"] == "find"
assert insert_success["tags"]["db.operation"] == "insert"
assert insert_fail["tags"]["db.operation"] == "insert"
assert find["description"].startswith("find {")
assert insert_success["description"].startswith("insert {")
assert insert_fail["description"].startswith("insert {")
if with_pii:
assert "1" in find["description"]
assert "2" in insert_success["description"]
assert "3" in insert_fail["description"] and "4" in insert_fail["description"]
else:
# All values in filter replaced by "%s"
assert "1" not in find["description"]
# All keys below top level replaced by "%s"
assert "2" not in insert_success["description"]
assert (
"3" not in insert_fail["description"]
and "4" not in insert_fail["description"]
)
assert find["tags"]["status"] == "ok"
assert insert_success["tags"]["status"] == "ok"
assert insert_fail["tags"]["status"] == "internal_error"
@pytest.mark.parametrize("with_pii", [False, True])
def test_breadcrumbs(sentry_init, capture_events, mongo_server, with_pii):
sentry_init(
integrations=[PyMongoIntegration()],
traces_sample_rate=1.0,
send_default_pii=with_pii,
)
events = capture_events()
connection = MongoClient(mongo_server.uri)
list(
connection["test_db"]["test_collection"].find({"foobar": 1})
) # force query execution
capture_message("hi")
(event,) = events
(crumb,) = event["breadcrumbs"]["values"]
assert crumb["category"] == "query"
assert crumb["message"].startswith("find {")
if with_pii:
assert "1" in crumb["message"]
else:
assert "1" not in crumb["message"]
assert crumb["type"] == "db.query"
assert crumb["data"] == {
"db.name": "test_db",
"db.system": "mongodb",
"db.operation": "find",
"net.peer.name": mongo_server.host,
"net.peer.port": str(mongo_server.port),
}
@pytest.mark.parametrize(
"testcase",
[
{
"command": {
"insert": "my_collection",
"ordered": True,
"documents": [
{
"username": "anton2",
"email": "anton@somewhere.io",
"password": "c4e86722fb56d946f7ddeecdae47e1c4458bf98a0a3ee5d5113111adf7bf0175",
"_id": "635bc7403cb4f8a736f61cf2",
}
],
},
"command_stripped": {
"insert": "my_collection",
"ordered": True,
"documents": [
{"username": "%s", "email": "%s", "password": "%s", "_id": "%s"}
],
},
},
{
"command": {
"insert": "my_collection",
"ordered": True,
"documents": [
{
"username": "indiana4",
"email": "indy@jones.org",
"password": "63e86722fb56d946f7ddeecdae47e1c4458bf98a0a3ee5d5113111adf7bf016b",
"_id": "635bc7403cb4f8a736f61cf3",
}
],
},
"command_stripped": {
"insert": "my_collection",
"ordered": True,
"documents": [
{"username": "%s", "email": "%s", "password": "%s", "_id": "%s"}
],
},
},
{
"command": {
"find": "my_collection",
"filter": {},
"limit": 1,
"singleBatch": True,
},
"command_stripped": {
"find": "my_collection",
"filter": {},
"limit": 1,
"singleBatch": True,
},
},
{
"command": {
"find": "my_collection",
"filter": {"username": "notthere"},
"limit": 1,
"singleBatch": True,
},
"command_stripped": {
"find": "my_collection",
"filter": {"username": "%s"},
"limit": 1,
"singleBatch": True,
},
},
{
"command": {
"insert": "my_collection",
"ordered": True,
"documents": [
{
"username": "userx1",
"email": "x@somewhere.io",
"password": "ccc86722fb56d946f7ddeecdae47e1c4458bf98a0a3ee5d5113111adf7bf0175",
"_id": "635bc7403cb4f8a736f61cf4",
},
{
"username": "userx2",
"email": "x@somewhere.io",
"password": "xxx86722fb56d946f7ddeecdae47e1c4458bf98a0a3ee5d5113111adf7bf0175",
"_id": "635bc7403cb4f8a736f61cf5",
},
],
},
"command_stripped": {
"insert": "my_collection",
"ordered": True,
"documents": [
{"username": "%s", "email": "%s", "password": "%s", "_id": "%s"},
{"username": "%s", "email": "%s", "password": "%s", "_id": "%s"},
],
},
},
{
"command": {
"find": "my_collection",
"filter": {"email": "ada@lovelace.com"},
},
"command_stripped": {"find": "my_collection", "filter": {"email": "%s"}},
},
{
"command": {
"aggregate": "my_collection",
"pipeline": [{"$match": {}}, {"$group": {"_id": 1, "n": {"$sum": 1}}}],
"cursor": {},
},
"command_stripped": {
"aggregate": "my_collection",
"pipeline": [{"$match": {}}, {"$group": {"_id": 1, "n": {"$sum": 1}}}],
"cursor": "%s",
},
},
{
"command": {
"aggregate": "my_collection",
"pipeline": [
{"$match": {"email": "x@somewhere.io"}},
{"$group": {"_id": 1, "n": {"$sum": 1}}},
],
"cursor": {},
},
"command_stripped": {
"aggregate": "my_collection",
"pipeline": [
{"$match": {"email": "%s"}},
{"$group": {"_id": 1, "n": {"$sum": 1}}},
],
"cursor": "%s",
},
},
{
"command": {
"createIndexes": "my_collection",
"indexes": [{"name": "username_1", "key": [("username", 1)]}],
},
"command_stripped": {
"createIndexes": "my_collection",
"indexes": [{"name": "username_1", "key": [("username", 1)]}],
},
},
{
"command": {
"update": "my_collection",
"ordered": True,
"updates": [
("q", {"email": "anton@somewhere.io"}),
(
"u",
{
"email": "anton2@somwehre.io",
"extra_field": "extra_content",
"new": "bla",
},
),
("multi", False),
("upsert", False),
],
},
"command_stripped": {
"update": "my_collection",
"ordered": True,
"updates": "%s",
},
},
{
"command": {
"update": "my_collection",
"ordered": True,
"updates": [
("q", {"email": "anton2@somwehre.io"}),
("u", {"$rename": {"new": "new_field"}}),
("multi", False),
("upsert", False),
],
},
"command_stripped": {
"update": "my_collection",
"ordered": True,
"updates": "%s",
},
},
{
"command": {
"update": "my_collection",
"ordered": True,
"updates": [
("q", {"email": "x@somewhere.io"}),
("u", {"$rename": {"password": "pwd"}}),
("multi", True),
("upsert", False),
],
},
"command_stripped": {
"update": "my_collection",
"ordered": True,
"updates": "%s",
},
},
{
"command": {
"delete": "my_collection",
"ordered": True,
"deletes": [("q", {"username": "userx2"}), ("limit", 1)],
},
"command_stripped": {
"delete": "my_collection",
"ordered": True,
"deletes": "%s",
},
},
{
"command": {
"delete": "my_collection",
"ordered": True,
"deletes": [("q", {"email": "xplus@somewhere.io"}), ("limit", 0)],
},
"command_stripped": {
"delete": "my_collection",
"ordered": True,
"deletes": "%s",
},
},
{
"command": {
"findAndModify": "my_collection",
"query": {"email": "ada@lovelace.com"},
"new": False,
"remove": True,
},
"command_stripped": {
"findAndModify": "my_collection",
"query": {"email": "%s"},
"new": "%s",
"remove": "%s",
},
},
{
"command": {
"findAndModify": "my_collection",
"query": {"email": "anton2@somewhere.io"},
"new": False,
"update": {"email": "anton3@somwehre.io", "extra_field": "xxx"},
"upsert": False,
},
"command_stripped": {
"findAndModify": "my_collection",
"query": {"email": "%s"},
"new": "%s",
"update": {"email": "%s", "extra_field": "%s"},
"upsert": "%s",
},
},
{
"command": {
"findAndModify": "my_collection",
"query": {"email": "anton3@somewhere.io"},
"new": False,
"update": {"$rename": {"extra_field": "extra_field2"}},
"upsert": False,
},
"command_stripped": {
"findAndModify": "my_collection",
"query": {"email": "%s"},
"new": "%s",
"update": {"$rename": "%s"},
"upsert": "%s",
},
},
{
"command": {
"renameCollection": "test.my_collection",
"to": "test.new_collection",
},
"command_stripped": {
"renameCollection": "test.my_collection",
"to": "test.new_collection",
},
},
{
"command": {"drop": "new_collection"},
"command_stripped": {"drop": "new_collection"},
},
],
)
def test_strip_pii(testcase):
assert _strip_pii(testcase["command"]) == testcase["command_stripped"]
sentry-python-1.39.2/tests/integrations/pyramid/ 0000775 0000000 0000000 00000000000 14547447232 0022002 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/pyramid/__init__.py 0000664 0000000 0000000 00000000056 14547447232 0024114 0 ustar 00root root 0000000 0000000 import pytest
pytest.importorskip("pyramid")
sentry-python-1.39.2/tests/integrations/pyramid/test_pyramid.py 0000664 0000000 0000000 00000025500 14547447232 0025062 0 ustar 00root root 0000000 0000000 import json
import logging
from io import BytesIO
import pyramid.testing
import pytest
from pyramid.authorization import ACLAuthorizationPolicy
from pyramid.response import Response
from werkzeug.test import Client
from sentry_sdk import capture_message, add_breadcrumb
from sentry_sdk.integrations.pyramid import PyramidIntegration
from sentry_sdk.serializer import MAX_DATABAG_BREADTH
from tests.conftest import unpack_werkzeug_response
try:
from importlib.metadata import version
PYRAMID_VERSION = tuple(map(int, version("pyramid").split(".")))
except ImportError:
# < py3.8
import pkg_resources
PYRAMID_VERSION = tuple(
map(int, pkg_resources.get_distribution("pyramid").version.split("."))
)
def hi(request):
capture_message("hi")
return Response("hi")
def hi_with_id(request):
capture_message("hi with id")
return Response("hi with id")
@pytest.fixture
def pyramid_config():
config = pyramid.testing.setUp()
try:
config.add_route("hi", "/message")
config.add_view(hi, route_name="hi")
config.add_route("hi_with_id", "/message/{message_id}")
config.add_view(hi_with_id, route_name="hi_with_id")
yield config
finally:
pyramid.testing.tearDown()
@pytest.fixture
def route(pyramid_config):
def inner(url):
def wrapper(f):
pyramid_config.add_route(f.__name__, url)
pyramid_config.add_view(f, route_name=f.__name__)
return f
return wrapper
return inner
@pytest.fixture
def get_client(pyramid_config):
def inner():
return Client(pyramid_config.make_wsgi_app())
return inner
def test_view_exceptions(
get_client, route, sentry_init, capture_events, capture_exceptions
):
sentry_init(integrations=[PyramidIntegration()])
events = capture_events()
exceptions = capture_exceptions()
add_breadcrumb({"message": "hi"})
@route("/errors")
def errors(request):
add_breadcrumb({"message": "hi2"})
1 / 0
client = get_client()
with pytest.raises(ZeroDivisionError):
client.get("/errors")
(error,) = exceptions
assert isinstance(error, ZeroDivisionError)
(event,) = events
(breadcrumb,) = event["breadcrumbs"]["values"]
assert breadcrumb["message"] == "hi2"
# Checking only the last value in the exceptions list,
# because Pyramid >= 1.9 returns a chained exception and before just a single exception
assert event["exception"]["values"][-1]["mechanism"]["type"] == "pyramid"
assert event["exception"]["values"][-1]["type"] == "ZeroDivisionError"
def test_has_context(route, get_client, sentry_init, capture_events):
sentry_init(integrations=[PyramidIntegration()])
events = capture_events()
@route("/context_message/{msg}")
def hi2(request):
capture_message(request.matchdict["msg"])
return Response("hi")
client = get_client()
client.get("/context_message/yoo")
(event,) = events
assert event["message"] == "yoo"
assert event["request"] == {
"env": {"SERVER_NAME": "localhost", "SERVER_PORT": "80"},
"headers": {"Host": "localhost"},
"method": "GET",
"query_string": "",
"url": "http://localhost/context_message/yoo",
}
assert event["transaction"] == "hi2"
@pytest.mark.parametrize(
"url,transaction_style,expected_transaction,expected_source",
[
("/message", "route_name", "hi", "component"),
("/message", "route_pattern", "/message", "route"),
("/message/123456", "route_name", "hi_with_id", "component"),
("/message/123456", "route_pattern", "/message/{message_id}", "route"),
],
)
def test_transaction_style(
sentry_init,
get_client,
capture_events,
url,
transaction_style,
expected_transaction,
expected_source,
):
sentry_init(integrations=[PyramidIntegration(transaction_style=transaction_style)])
events = capture_events()
client = get_client()
client.get(url)
(event,) = events
assert event["transaction"] == expected_transaction
assert event["transaction_info"] == {"source": expected_source}
def test_large_json_request(sentry_init, capture_events, route, get_client):
sentry_init(integrations=[PyramidIntegration()])
data = {"foo": {"bar": "a" * 2000}}
@route("/")
def index(request):
assert request.json == data
assert request.text == json.dumps(data)
assert not request.POST
capture_message("hi")
return Response("ok")
events = capture_events()
client = get_client()
client.post("/", content_type="application/json", data=json.dumps(data))
(event,) = events
assert event["_meta"]["request"]["data"]["foo"]["bar"] == {
"": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
}
assert len(event["request"]["data"]["foo"]["bar"]) == 1024
@pytest.mark.parametrize("data", [{}, []], ids=["empty-dict", "empty-list"])
def test_flask_empty_json_request(sentry_init, capture_events, route, get_client, data):
sentry_init(integrations=[PyramidIntegration()])
@route("/")
def index(request):
assert request.json == data
assert request.text == json.dumps(data)
assert not request.POST
capture_message("hi")
return Response("ok")
events = capture_events()
client = get_client()
response = client.post("/", content_type="application/json", data=json.dumps(data))
assert response[1] == "200 OK"
(event,) = events
assert event["request"]["data"] == data
def test_json_not_truncated_if_max_request_body_size_is_always(
sentry_init, capture_events, route, get_client
):
sentry_init(integrations=[PyramidIntegration()], max_request_body_size="always")
data = {
"key{}".format(i): "value{}".format(i) for i in range(MAX_DATABAG_BREADTH + 10)
}
@route("/")
def index(request):
assert request.json == data
assert request.text == json.dumps(data)
capture_message("hi")
return Response("ok")
events = capture_events()
client = get_client()
client.post("/", content_type="application/json", data=json.dumps(data))
(event,) = events
assert event["request"]["data"] == data
def test_files_and_form(sentry_init, capture_events, route, get_client):
sentry_init(integrations=[PyramidIntegration()], max_request_body_size="always")
data = {"foo": "a" * 2000, "file": (BytesIO(b"hello"), "hello.txt")}
@route("/")
def index(request):
capture_message("hi")
return Response("ok")
events = capture_events()
client = get_client()
client.post("/", data=data)
(event,) = events
assert event["_meta"]["request"]["data"]["foo"] == {
"": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
}
assert len(event["request"]["data"]["foo"]) == 1024
assert event["_meta"]["request"]["data"]["file"] == {"": {"rem": [["!raw", "x"]]}}
assert not event["request"]["data"]["file"]
def test_bad_request_not_captured(
sentry_init, pyramid_config, capture_events, route, get_client
):
import pyramid.httpexceptions as exc
sentry_init(integrations=[PyramidIntegration()])
events = capture_events()
@route("/")
def index(request):
raise exc.HTTPBadRequest()
def errorhandler(exc, request):
return Response("bad request")
pyramid_config.add_view(errorhandler, context=exc.HTTPBadRequest)
client = get_client()
client.get("/")
assert not events
def test_errorhandler_ok(
sentry_init, pyramid_config, capture_exceptions, route, get_client
):
sentry_init(integrations=[PyramidIntegration()])
errors = capture_exceptions()
@route("/")
def index(request):
raise Exception()
def errorhandler(exc, request):
return Response("bad request")
pyramid_config.add_view(errorhandler, context=Exception)
client = get_client()
client.get("/")
assert not errors
@pytest.mark.skipif(
PYRAMID_VERSION < (1, 9),
reason="We don't have the right hooks in older Pyramid versions",
)
def test_errorhandler_500(
sentry_init, pyramid_config, capture_exceptions, route, get_client
):
sentry_init(integrations=[PyramidIntegration()])
errors = capture_exceptions()
@route("/")
def index(request):
1 / 0
def errorhandler(exc, request):
return Response("bad request", status=500)
pyramid_config.add_view(errorhandler, context=Exception)
client = get_client()
app_iter, status, headers = unpack_werkzeug_response(client.get("/"))
assert app_iter == b"bad request"
assert status.lower() == "500 internal server error"
(error,) = errors
assert isinstance(error, ZeroDivisionError)
def test_error_in_errorhandler(
sentry_init, pyramid_config, capture_events, route, get_client
):
sentry_init(integrations=[PyramidIntegration()])
@route("/")
def index(request):
raise ValueError()
def error_handler(err, request):
1 / 0
pyramid_config.add_view(error_handler, context=ValueError)
events = capture_events()
client = get_client()
with pytest.raises(ZeroDivisionError):
client.get("/")
(event,) = events
exception = event["exception"]["values"][-1]
assert exception["type"] == "ZeroDivisionError"
def test_error_in_authenticated_userid(
sentry_init, pyramid_config, capture_events, route, get_client
):
from sentry_sdk.integrations.logging import LoggingIntegration
sentry_init(
send_default_pii=True,
integrations=[
PyramidIntegration(),
LoggingIntegration(event_level=logging.ERROR),
],
)
logger = logging.getLogger("test_pyramid")
class AuthenticationPolicy(object):
def authenticated_userid(self, request):
logger.error("failed to identify user")
pyramid_config.set_authorization_policy(ACLAuthorizationPolicy())
pyramid_config.set_authentication_policy(AuthenticationPolicy())
events = capture_events()
client = get_client()
client.get("/message")
assert len(events) == 1
def tween_factory(handler, registry):
def tween(request):
try:
response = handler(request)
except Exception:
mroute = request.matched_route
if mroute and mroute.name in ("index",):
return Response("bad request", status_code=400)
return response
return tween
def test_tween_ok(sentry_init, pyramid_config, capture_exceptions, route, get_client):
sentry_init(integrations=[PyramidIntegration()])
errors = capture_exceptions()
@route("/")
def index(request):
raise Exception()
pyramid_config.add_tween(
"tests.integrations.pyramid.test_pyramid.tween_factory",
under=pyramid.tweens.INGRESS,
)
client = get_client()
client.get("/")
assert not errors
sentry-python-1.39.2/tests/integrations/quart/ 0000775 0000000 0000000 00000000000 14547447232 0021471 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/quart/__init__.py 0000664 0000000 0000000 00000000054 14547447232 0023601 0 ustar 00root root 0000000 0000000 import pytest
pytest.importorskip("quart")
sentry-python-1.39.2/tests/integrations/quart/test_quart.py 0000664 0000000 0000000 00000034314 14547447232 0024243 0 ustar 00root root 0000000 0000000 import json
import threading
import pytest
import pytest_asyncio
from sentry_sdk import (
set_tag,
configure_scope,
capture_message,
capture_exception,
last_event_id,
)
from sentry_sdk.integrations.logging import LoggingIntegration
import sentry_sdk.integrations.quart as quart_sentry
from quart import Quart, Response, abort, stream_with_context
from quart.views import View
from quart_auth import AuthUser, login_user
try:
from quart_auth import QuartAuth
auth_manager = QuartAuth()
except ImportError:
from quart_auth import AuthManager
auth_manager = AuthManager()
@pytest_asyncio.fixture
async def app():
app = Quart(__name__)
app.debug = False
app.config["TESTING"] = False
app.secret_key = "haha"
auth_manager.init_app(app)
@app.route("/message")
async def hi():
capture_message("hi")
return "ok"
@app.route("/message/")
async def hi_with_id(message_id):
capture_message("hi with id")
return "ok with id"
@app.get("/sync/thread_ids")
def _thread_ids_sync():
return {
"main": str(threading.main_thread().ident),
"active": str(threading.current_thread().ident),
}
@app.get("/async/thread_ids")
async def _thread_ids_async():
return {
"main": str(threading.main_thread().ident),
"active": str(threading.current_thread().ident),
}
return app
@pytest.fixture(params=("manual",))
def integration_enabled_params(request):
if request.param == "manual":
return {"integrations": [quart_sentry.QuartIntegration()]}
else:
raise ValueError(request.param)
@pytest.mark.asyncio
async def test_has_context(sentry_init, app, capture_events):
sentry_init(integrations=[quart_sentry.QuartIntegration()])
events = capture_events()
client = app.test_client()
response = await client.get("/message")
assert response.status_code == 200
(event,) = events
assert event["transaction"] == "hi"
assert "data" not in event["request"]
assert event["request"]["url"] == "http://localhost/message"
@pytest.mark.asyncio
@pytest.mark.parametrize(
"url,transaction_style,expected_transaction,expected_source",
[
("/message", "endpoint", "hi", "component"),
("/message", "url", "/message", "route"),
("/message/123456", "endpoint", "hi_with_id", "component"),
("/message/123456", "url", "/message/", "route"),
],
)
async def test_transaction_style(
sentry_init,
app,
capture_events,
url,
transaction_style,
expected_transaction,
expected_source,
):
sentry_init(
integrations=[
quart_sentry.QuartIntegration(transaction_style=transaction_style)
]
)
events = capture_events()
client = app.test_client()
response = await client.get(url)
assert response.status_code == 200
(event,) = events
assert event["transaction"] == expected_transaction
@pytest.mark.asyncio
async def test_errors(
sentry_init,
capture_exceptions,
capture_events,
app,
integration_enabled_params,
):
sentry_init(debug=True, **integration_enabled_params)
@app.route("/")
async def index():
1 / 0
exceptions = capture_exceptions()
events = capture_events()
client = app.test_client()
try:
await client.get("/")
except ZeroDivisionError:
pass
(exc,) = exceptions
assert isinstance(exc, ZeroDivisionError)
(event,) = events
assert event["exception"]["values"][0]["mechanism"]["type"] == "quart"
@pytest.mark.asyncio
async def test_quart_auth_not_installed(
sentry_init, app, capture_events, monkeypatch, integration_enabled_params
):
sentry_init(**integration_enabled_params)
monkeypatch.setattr(quart_sentry, "quart_auth", None)
events = capture_events()
client = app.test_client()
await client.get("/message")
(event,) = events
assert event.get("user", {}).get("id") is None
@pytest.mark.asyncio
async def test_quart_auth_not_configured(
sentry_init, app, capture_events, monkeypatch, integration_enabled_params
):
sentry_init(**integration_enabled_params)
assert quart_sentry.quart_auth
events = capture_events()
client = app.test_client()
await client.get("/message")
(event,) = events
assert event.get("user", {}).get("id") is None
@pytest.mark.asyncio
async def test_quart_auth_partially_configured(
sentry_init, app, capture_events, monkeypatch, integration_enabled_params
):
sentry_init(**integration_enabled_params)
events = capture_events()
client = app.test_client()
await client.get("/message")
(event,) = events
assert event.get("user", {}).get("id") is None
@pytest.mark.asyncio
@pytest.mark.parametrize("send_default_pii", [True, False])
@pytest.mark.parametrize("user_id", [None, "42", "3"])
async def test_quart_auth_configured(
send_default_pii,
sentry_init,
app,
user_id,
capture_events,
monkeypatch,
integration_enabled_params,
):
sentry_init(send_default_pii=send_default_pii, **integration_enabled_params)
@app.route("/login")
async def login():
if user_id is not None:
login_user(AuthUser(user_id))
return "ok"
events = capture_events()
client = app.test_client()
assert (await client.get("/login")).status_code == 200
assert not events
assert (await client.get("/message")).status_code == 200
(event,) = events
if user_id is None or not send_default_pii:
assert event.get("user", {}).get("id") is None
else:
assert event["user"]["id"] == str(user_id)
@pytest.mark.asyncio
@pytest.mark.parametrize(
"integrations",
[
[quart_sentry.QuartIntegration()],
[quart_sentry.QuartIntegration(), LoggingIntegration(event_level="ERROR")],
],
)
async def test_errors_not_reported_twice(
sentry_init, integrations, capture_events, app
):
sentry_init(integrations=integrations)
@app.route("/")
async def index():
try:
1 / 0
except Exception as e:
app.logger.exception(e)
raise e
events = capture_events()
client = app.test_client()
# with pytest.raises(ZeroDivisionError):
await client.get("/")
assert len(events) == 1
@pytest.mark.asyncio
async def test_logging(sentry_init, capture_events, app):
# ensure that Quart's logger magic doesn't break ours
sentry_init(
integrations=[
quart_sentry.QuartIntegration(),
LoggingIntegration(event_level="ERROR"),
]
)
@app.route("/")
async def index():
app.logger.error("hi")
return "ok"
events = capture_events()
client = app.test_client()
await client.get("/")
(event,) = events
assert event["level"] == "error"
@pytest.mark.asyncio
async def test_no_errors_without_request(app, sentry_init):
sentry_init(integrations=[quart_sentry.QuartIntegration()])
async with app.app_context():
capture_exception(ValueError())
def test_cli_commands_raise(app):
if not hasattr(app, "cli"):
pytest.skip("Too old quart version")
from quart.cli import ScriptInfo
@app.cli.command()
def foo():
1 / 0
with pytest.raises(ZeroDivisionError):
app.cli.main(
args=["foo"], prog_name="myapp", obj=ScriptInfo(create_app=lambda _: app)
)
@pytest.mark.asyncio
async def test_500(sentry_init, capture_events, app):
sentry_init(integrations=[quart_sentry.QuartIntegration()])
@app.route("/")
async def index():
1 / 0
@app.errorhandler(500)
async def error_handler(err):
return "Sentry error: %s" % last_event_id()
events = capture_events()
client = app.test_client()
response = await client.get("/")
(event,) = events
assert (await response.get_data(as_text=True)) == "Sentry error: %s" % event[
"event_id"
]
@pytest.mark.asyncio
async def test_error_in_errorhandler(sentry_init, capture_events, app):
sentry_init(integrations=[quart_sentry.QuartIntegration()])
@app.route("/")
async def index():
raise ValueError()
@app.errorhandler(500)
async def error_handler(err):
1 / 0
events = capture_events()
client = app.test_client()
with pytest.raises(ZeroDivisionError):
await client.get("/")
event1, event2 = events
(exception,) = event1["exception"]["values"]
assert exception["type"] == "ValueError"
exception = event2["exception"]["values"][-1]
assert exception["type"] == "ZeroDivisionError"
@pytest.mark.asyncio
async def test_bad_request_not_captured(sentry_init, capture_events, app):
sentry_init(integrations=[quart_sentry.QuartIntegration()])
events = capture_events()
@app.route("/")
async def index():
abort(400)
client = app.test_client()
await client.get("/")
assert not events
@pytest.mark.asyncio
async def test_does_not_leak_scope(sentry_init, capture_events, app):
sentry_init(integrations=[quart_sentry.QuartIntegration()])
events = capture_events()
with configure_scope() as scope:
scope.set_tag("request_data", False)
@app.route("/")
async def index():
with configure_scope() as scope:
scope.set_tag("request_data", True)
async def generate():
for row in range(1000):
with configure_scope() as scope:
assert scope._tags["request_data"]
yield str(row) + "\n"
return Response(stream_with_context(generate)(), mimetype="text/csv")
client = app.test_client()
response = await client.get("/")
assert (await response.get_data(as_text=True)) == "".join(
str(row) + "\n" for row in range(1000)
)
assert not events
with configure_scope() as scope:
assert not scope._tags["request_data"]
@pytest.mark.asyncio
async def test_scoped_test_client(sentry_init, app):
sentry_init(integrations=[quart_sentry.QuartIntegration()])
@app.route("/")
async def index():
return "ok"
async with app.test_client() as client:
response = await client.get("/")
assert response.status_code == 200
@pytest.mark.asyncio
@pytest.mark.parametrize("exc_cls", [ZeroDivisionError, Exception])
async def test_errorhandler_for_exception_swallows_exception(
sentry_init, app, capture_events, exc_cls
):
# In contrast to error handlers for a status code, error
# handlers for exceptions can swallow the exception (this is
# just how the Quart signal works)
sentry_init(integrations=[quart_sentry.QuartIntegration()])
events = capture_events()
@app.route("/")
async def index():
1 / 0
@app.errorhandler(exc_cls)
async def zerodivision(e):
return "ok"
async with app.test_client() as client:
response = await client.get("/")
assert response.status_code == 200
assert not events
@pytest.mark.asyncio
async def test_tracing_success(sentry_init, capture_events, app):
sentry_init(traces_sample_rate=1.0, integrations=[quart_sentry.QuartIntegration()])
@app.before_request
async def _():
set_tag("before_request", "yes")
@app.route("/message_tx")
async def hi_tx():
set_tag("view", "yes")
capture_message("hi")
return "ok"
events = capture_events()
async with app.test_client() as client:
response = await client.get("/message_tx")
assert response.status_code == 200
message_event, transaction_event = events
assert transaction_event["type"] == "transaction"
assert transaction_event["transaction"] == "hi_tx"
assert transaction_event["tags"]["view"] == "yes"
assert transaction_event["tags"]["before_request"] == "yes"
assert message_event["message"] == "hi"
assert message_event["transaction"] == "hi_tx"
assert message_event["tags"]["view"] == "yes"
assert message_event["tags"]["before_request"] == "yes"
@pytest.mark.asyncio
async def test_tracing_error(sentry_init, capture_events, app):
sentry_init(traces_sample_rate=1.0, integrations=[quart_sentry.QuartIntegration()])
events = capture_events()
@app.route("/error")
async def error():
1 / 0
async with app.test_client() as client:
response = await client.get("/error")
assert response.status_code == 500
error_event, transaction_event = events
assert transaction_event["type"] == "transaction"
assert transaction_event["transaction"] == "error"
assert error_event["transaction"] == "error"
(exception,) = error_event["exception"]["values"]
assert exception["type"] == "ZeroDivisionError"
@pytest.mark.asyncio
async def test_class_based_views(sentry_init, app, capture_events):
sentry_init(integrations=[quart_sentry.QuartIntegration()])
events = capture_events()
@app.route("/")
class HelloClass(View):
methods = ["GET"]
async def dispatch_request(self):
capture_message("hi")
return "ok"
app.add_url_rule("/hello-class/", view_func=HelloClass.as_view("hello_class"))
async with app.test_client() as client:
response = await client.get("/hello-class/")
assert response.status_code == 200
(event,) = events
assert event["message"] == "hi"
assert event["transaction"] == "hello_class"
@pytest.mark.parametrize("endpoint", ["/sync/thread_ids", "/async/thread_ids"])
async def test_active_thread_id(sentry_init, capture_envelopes, endpoint, app):
sentry_init(
traces_sample_rate=1.0,
_experiments={"profiles_sample_rate": 1.0},
)
envelopes = capture_envelopes()
async with app.test_client() as client:
response = await client.get(endpoint)
assert response.status_code == 200
data = json.loads(response.content)
envelopes = [envelope for envelope in envelopes]
assert len(envelopes) == 1
profiles = [item for item in envelopes[0].items if item.type == "profile"]
assert len(profiles) == 1
for profile in profiles:
transactions = profile.payload.json["transactions"]
assert len(transactions) == 1
assert str(data["active"]) == transactions[0]["active_thread_id"]
sentry-python-1.39.2/tests/integrations/redis/ 0000775 0000000 0000000 00000000000 14547447232 0021443 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/redis/__init__.py 0000664 0000000 0000000 00000000054 14547447232 0023553 0 ustar 00root root 0000000 0000000 import pytest
pytest.importorskip("redis")
sentry-python-1.39.2/tests/integrations/redis/asyncio/ 0000775 0000000 0000000 00000000000 14547447232 0023110 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/redis/asyncio/__init__.py 0000664 0000000 0000000 00000000071 14547447232 0025217 0 ustar 00root root 0000000 0000000 import pytest
pytest.importorskip("fakeredis.aioredis")
sentry-python-1.39.2/tests/integrations/redis/asyncio/test_redis_asyncio.py 0000664 0000000 0000000 00000004413 14547447232 0027356 0 ustar 00root root 0000000 0000000 import pytest
from sentry_sdk import capture_message, start_transaction
from sentry_sdk.consts import SPANDATA
from sentry_sdk.integrations.redis import RedisIntegration
from fakeredis.aioredis import FakeRedis
@pytest.mark.asyncio
async def test_async_basic(sentry_init, capture_events):
sentry_init(integrations=[RedisIntegration()])
events = capture_events()
connection = FakeRedis()
await connection.get("foobar")
capture_message("hi")
(event,) = events
(crumb,) = event["breadcrumbs"]["values"]
assert crumb == {
"category": "redis",
"message": "GET 'foobar'",
"data": {
"db.operation": "GET",
"redis.key": "foobar",
"redis.command": "GET",
"redis.is_cluster": False,
},
"timestamp": crumb["timestamp"],
"type": "redis",
}
@pytest.mark.parametrize(
"is_transaction, send_default_pii, expected_first_ten",
[
(False, False, ["GET 'foo'", "SET 'bar' [Filtered]", "SET 'baz' [Filtered]"]),
(True, True, ["GET 'foo'", "SET 'bar' 1", "SET 'baz' 2"]),
],
)
@pytest.mark.asyncio
async def test_async_redis_pipeline(
sentry_init, capture_events, is_transaction, send_default_pii, expected_first_ten
):
sentry_init(
integrations=[RedisIntegration()],
traces_sample_rate=1.0,
send_default_pii=send_default_pii,
)
events = capture_events()
connection = FakeRedis()
with start_transaction():
pipeline = connection.pipeline(transaction=is_transaction)
pipeline.get("foo")
pipeline.set("bar", 1)
pipeline.set("baz", 2)
await pipeline.execute()
(event,) = events
(span,) = event["spans"]
assert span["op"] == "db.redis"
assert span["description"] == "redis.pipeline.execute"
assert span["data"] == {
"redis.commands": {
"count": 3,
"first_ten": expected_first_ten,
},
SPANDATA.DB_SYSTEM: "redis",
SPANDATA.DB_NAME: "0",
SPANDATA.SERVER_ADDRESS: connection.connection_pool.connection_kwargs.get(
"host"
),
SPANDATA.SERVER_PORT: 6379,
}
assert span["tags"] == {
"redis.transaction": is_transaction,
"redis.is_cluster": False,
}
sentry-python-1.39.2/tests/integrations/redis/cluster/ 0000775 0000000 0000000 00000000000 14547447232 0023124 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/redis/cluster/__init__.py 0000664 0000000 0000000 00000000064 14547447232 0025235 0 ustar 00root root 0000000 0000000 import pytest
pytest.importorskip("redis.cluster")
sentry-python-1.39.2/tests/integrations/redis/cluster/test_redis_cluster.py 0000664 0000000 0000000 00000010307 14547447232 0027405 0 ustar 00root root 0000000 0000000 import pytest
from sentry_sdk import capture_message
from sentry_sdk.consts import SPANDATA
from sentry_sdk.api import start_transaction
from sentry_sdk.integrations.redis import RedisIntegration
import redis
@pytest.fixture(autouse=True)
def monkeypatch_rediscluster_class(reset_integrations):
pipeline_cls = redis.cluster.ClusterPipeline
redis.cluster.NodesManager.initialize = lambda *_, **__: None
redis.RedisCluster.command = lambda *_: []
redis.RedisCluster.pipeline = lambda *_, **__: pipeline_cls(None, None)
redis.RedisCluster.get_default_node = lambda *_, **__: redis.cluster.ClusterNode(
"localhost", 6379
)
pipeline_cls.execute = lambda *_, **__: None
redis.RedisCluster.execute_command = lambda *_, **__: []
def test_rediscluster_breadcrumb(sentry_init, capture_events):
sentry_init(integrations=[RedisIntegration()])
events = capture_events()
rc = redis.RedisCluster(host="localhost", port=6379)
rc.get("foobar")
capture_message("hi")
(event,) = events
crumbs = event["breadcrumbs"]["values"]
# on initializing a RedisCluster, a COMMAND call is made - this is not important for the test
# but must be accounted for
assert len(crumbs) in (1, 2)
assert len(crumbs) == 1 or crumbs[0]["message"] == "COMMAND"
crumb = crumbs[-1]
assert crumb == {
"category": "redis",
"message": "GET 'foobar'",
"data": {
"db.operation": "GET",
"redis.key": "foobar",
"redis.command": "GET",
"redis.is_cluster": True,
},
"timestamp": crumb["timestamp"],
"type": "redis",
}
@pytest.mark.parametrize(
"send_default_pii, description",
[
(False, "SET 'bar' [Filtered]"),
(True, "SET 'bar' 1"),
],
)
def test_rediscluster_basic(sentry_init, capture_events, send_default_pii, description):
sentry_init(
integrations=[RedisIntegration()],
traces_sample_rate=1.0,
send_default_pii=send_default_pii,
)
events = capture_events()
with start_transaction():
rc = redis.RedisCluster(host="localhost", port=6379)
rc.set("bar", 1)
(event,) = events
spans = event["spans"]
# on initializing a RedisCluster, a COMMAND call is made - this is not important for the test
# but must be accounted for
assert len(spans) in (1, 2)
assert len(spans) == 1 or spans[0]["description"] == "COMMAND"
span = spans[-1]
assert span["op"] == "db.redis"
assert span["description"] == description
assert span["data"] == {
SPANDATA.DB_SYSTEM: "redis",
# ClusterNode converts localhost to 127.0.0.1
SPANDATA.SERVER_ADDRESS: "127.0.0.1",
SPANDATA.SERVER_PORT: 6379,
}
assert span["tags"] == {
"db.operation": "SET",
"redis.command": "SET",
"redis.is_cluster": True,
"redis.key": "bar",
}
@pytest.mark.parametrize(
"send_default_pii, expected_first_ten",
[
(False, ["GET 'foo'", "SET 'bar' [Filtered]", "SET 'baz' [Filtered]"]),
(True, ["GET 'foo'", "SET 'bar' 1", "SET 'baz' 2"]),
],
)
def test_rediscluster_pipeline(
sentry_init, capture_events, send_default_pii, expected_first_ten
):
sentry_init(
integrations=[RedisIntegration()],
traces_sample_rate=1.0,
send_default_pii=send_default_pii,
)
events = capture_events()
rc = redis.RedisCluster(host="localhost", port=6379)
with start_transaction():
pipeline = rc.pipeline()
pipeline.get("foo")
pipeline.set("bar", 1)
pipeline.set("baz", 2)
pipeline.execute()
(event,) = events
(span,) = event["spans"]
assert span["op"] == "db.redis"
assert span["description"] == "redis.pipeline.execute"
assert span["data"] == {
"redis.commands": {
"count": 3,
"first_ten": expected_first_ten,
},
SPANDATA.DB_SYSTEM: "redis",
# ClusterNode converts localhost to 127.0.0.1
SPANDATA.SERVER_ADDRESS: "127.0.0.1",
SPANDATA.SERVER_PORT: 6379,
}
assert span["tags"] == {
"redis.transaction": False, # For Cluster, this is always False
"redis.is_cluster": True,
}
sentry-python-1.39.2/tests/integrations/redis/cluster_asyncio/ 0000775 0000000 0000000 00000000000 14547447232 0024651 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/redis/cluster_asyncio/__init__.py 0000664 0000000 0000000 00000000074 14547447232 0026763 0 ustar 00root root 0000000 0000000 import pytest
pytest.importorskip("redis.asyncio.cluster")
sentry-python-1.39.2/tests/integrations/redis/cluster_asyncio/test_redis_cluster_asyncio.py 0000664 0000000 0000000 00000007624 14547447232 0032667 0 ustar 00root root 0000000 0000000 import pytest
from sentry_sdk import capture_message, start_transaction
from sentry_sdk.consts import SPANDATA
from sentry_sdk.integrations.redis import RedisIntegration
from redis.asyncio import cluster
async def fake_initialize(*_, **__):
return None
async def fake_execute_command(*_, **__):
return []
async def fake_execute(*_, **__):
return None
@pytest.fixture(autouse=True)
def monkeypatch_rediscluster_asyncio_class(reset_integrations):
pipeline_cls = cluster.ClusterPipeline
cluster.NodesManager.initialize = fake_initialize
cluster.RedisCluster.get_default_node = lambda *_, **__: cluster.ClusterNode(
"localhost", 6379
)
cluster.RedisCluster.pipeline = lambda self, *_, **__: pipeline_cls(self)
pipeline_cls.execute = fake_execute
cluster.RedisCluster.execute_command = fake_execute_command
@pytest.mark.asyncio
async def test_async_breadcrumb(sentry_init, capture_events):
sentry_init(integrations=[RedisIntegration()])
events = capture_events()
connection = cluster.RedisCluster(host="localhost", port=6379)
await connection.get("foobar")
capture_message("hi")
(event,) = events
(crumb,) = event["breadcrumbs"]["values"]
assert crumb == {
"category": "redis",
"message": "GET 'foobar'",
"data": {
"db.operation": "GET",
"redis.key": "foobar",
"redis.command": "GET",
"redis.is_cluster": True,
},
"timestamp": crumb["timestamp"],
"type": "redis",
}
@pytest.mark.parametrize(
"send_default_pii, description",
[
(False, "SET 'bar' [Filtered]"),
(True, "SET 'bar' 1"),
],
)
@pytest.mark.asyncio
async def test_async_basic(sentry_init, capture_events, send_default_pii, description):
sentry_init(
integrations=[RedisIntegration()],
traces_sample_rate=1.0,
send_default_pii=send_default_pii,
)
events = capture_events()
connection = cluster.RedisCluster(host="localhost", port=6379)
with start_transaction():
await connection.set("bar", 1)
(event,) = events
(span,) = event["spans"]
assert span["op"] == "db.redis"
assert span["description"] == description
assert span["data"] == {
SPANDATA.DB_SYSTEM: "redis",
# ClusterNode converts localhost to 127.0.0.1
SPANDATA.SERVER_ADDRESS: "127.0.0.1",
SPANDATA.SERVER_PORT: 6379,
}
assert span["tags"] == {
"redis.is_cluster": True,
"db.operation": "SET",
"redis.command": "SET",
"redis.key": "bar",
}
@pytest.mark.parametrize(
"send_default_pii, expected_first_ten",
[
(False, ["GET 'foo'", "SET 'bar' [Filtered]", "SET 'baz' [Filtered]"]),
(True, ["GET 'foo'", "SET 'bar' 1", "SET 'baz' 2"]),
],
)
@pytest.mark.asyncio
async def test_async_redis_pipeline(
sentry_init, capture_events, send_default_pii, expected_first_ten
):
sentry_init(
integrations=[RedisIntegration()],
traces_sample_rate=1.0,
send_default_pii=send_default_pii,
)
events = capture_events()
connection = cluster.RedisCluster(host="localhost", port=6379)
with start_transaction():
pipeline = connection.pipeline()
pipeline.get("foo")
pipeline.set("bar", 1)
pipeline.set("baz", 2)
await pipeline.execute()
(event,) = events
(span,) = event["spans"]
assert span["op"] == "db.redis"
assert span["description"] == "redis.pipeline.execute"
assert span["data"] == {
"redis.commands": {
"count": 3,
"first_ten": expected_first_ten,
},
SPANDATA.DB_SYSTEM: "redis",
# ClusterNode converts localhost to 127.0.0.1
SPANDATA.SERVER_ADDRESS: "127.0.0.1",
SPANDATA.SERVER_PORT: 6379,
}
assert span["tags"] == {
"redis.transaction": False,
"redis.is_cluster": True,
}
sentry-python-1.39.2/tests/integrations/redis/test_redis.py 0000664 0000000 0000000 00000021476 14547447232 0024174 0 ustar 00root root 0000000 0000000 import pytest
from sentry_sdk import capture_message, start_transaction
from sentry_sdk.consts import SPANDATA
from sentry_sdk.integrations.redis import RedisIntegration
from fakeredis import FakeStrictRedis
try:
from unittest import mock # python 3.3 and above
except ImportError:
import mock # python < 3.3
MOCK_CONNECTION_POOL = mock.MagicMock()
MOCK_CONNECTION_POOL.connection_kwargs = {
"host": "localhost",
"port": 63791,
"db": 1,
}
def test_basic(sentry_init, capture_events):
sentry_init(integrations=[RedisIntegration()])
events = capture_events()
connection = FakeStrictRedis()
connection.get("foobar")
capture_message("hi")
(event,) = events
(crumb,) = event["breadcrumbs"]["values"]
assert crumb == {
"category": "redis",
"message": "GET 'foobar'",
"data": {
"redis.key": "foobar",
"redis.command": "GET",
"redis.is_cluster": False,
"db.operation": "GET",
},
"timestamp": crumb["timestamp"],
"type": "redis",
}
@pytest.mark.parametrize(
"is_transaction, send_default_pii, expected_first_ten",
[
(False, False, ["GET 'foo'", "SET 'bar' [Filtered]", "SET 'baz' [Filtered]"]),
(True, True, ["GET 'foo'", "SET 'bar' 1", "SET 'baz' 2"]),
],
)
def test_redis_pipeline(
sentry_init, capture_events, is_transaction, send_default_pii, expected_first_ten
):
sentry_init(
integrations=[RedisIntegration()],
traces_sample_rate=1.0,
send_default_pii=send_default_pii,
)
events = capture_events()
connection = FakeStrictRedis()
with start_transaction():
pipeline = connection.pipeline(transaction=is_transaction)
pipeline.get("foo")
pipeline.set("bar", 1)
pipeline.set("baz", 2)
pipeline.execute()
(event,) = events
(span,) = event["spans"]
assert span["op"] == "db.redis"
assert span["description"] == "redis.pipeline.execute"
assert span["data"][SPANDATA.DB_SYSTEM] == "redis"
assert span["data"]["redis.commands"] == {
"count": 3,
"first_ten": expected_first_ten,
}
assert span["tags"] == {
"redis.transaction": is_transaction,
"redis.is_cluster": False,
}
def test_sensitive_data(sentry_init, capture_events):
# fakeredis does not support the AUTH command, so we need to mock it
with mock.patch(
"sentry_sdk.integrations.redis._COMMANDS_INCLUDING_SENSITIVE_DATA", ["get"]
):
sentry_init(
integrations=[RedisIntegration()],
traces_sample_rate=1.0,
send_default_pii=True,
)
events = capture_events()
connection = FakeStrictRedis()
with start_transaction():
connection.get(
"this is super secret"
) # because fakeredis does not support AUTH we use GET instead
(event,) = events
spans = event["spans"]
assert spans[0]["op"] == "db.redis"
assert spans[0]["description"] == "GET [Filtered]"
def test_pii_data_redacted(sentry_init, capture_events):
sentry_init(
integrations=[RedisIntegration()],
traces_sample_rate=1.0,
)
events = capture_events()
connection = FakeStrictRedis()
with start_transaction():
connection.set("somekey1", "my secret string1")
connection.set("somekey2", "my secret string2")
connection.get("somekey2")
connection.delete("somekey1", "somekey2")
(event,) = events
spans = event["spans"]
assert spans[0]["op"] == "db.redis"
assert spans[0]["description"] == "SET 'somekey1' [Filtered]"
assert spans[1]["description"] == "SET 'somekey2' [Filtered]"
assert spans[2]["description"] == "GET 'somekey2'"
assert spans[3]["description"] == "DEL 'somekey1' [Filtered]"
def test_pii_data_sent(sentry_init, capture_events):
sentry_init(
integrations=[RedisIntegration()],
traces_sample_rate=1.0,
send_default_pii=True,
)
events = capture_events()
connection = FakeStrictRedis()
with start_transaction():
connection.set("somekey1", "my secret string1")
connection.set("somekey2", "my secret string2")
connection.get("somekey2")
connection.delete("somekey1", "somekey2")
(event,) = events
spans = event["spans"]
assert spans[0]["op"] == "db.redis"
assert spans[0]["description"] == "SET 'somekey1' 'my secret string1'"
assert spans[1]["description"] == "SET 'somekey2' 'my secret string2'"
assert spans[2]["description"] == "GET 'somekey2'"
assert spans[3]["description"] == "DEL 'somekey1' 'somekey2'"
def test_data_truncation(sentry_init, capture_events):
sentry_init(
integrations=[RedisIntegration()],
traces_sample_rate=1.0,
send_default_pii=True,
)
events = capture_events()
connection = FakeStrictRedis()
with start_transaction():
long_string = "a" * 100000
connection.set("somekey1", long_string)
short_string = "b" * 10
connection.set("somekey2", short_string)
(event,) = events
spans = event["spans"]
assert spans[0]["op"] == "db.redis"
assert spans[0]["description"] == "SET 'somekey1' '%s..." % (
long_string[: 1024 - len("...") - len("SET 'somekey1' '")],
)
assert spans[1]["description"] == "SET 'somekey2' '%s'" % (short_string,)
def test_data_truncation_custom(sentry_init, capture_events):
sentry_init(
integrations=[RedisIntegration(max_data_size=30)],
traces_sample_rate=1.0,
send_default_pii=True,
)
events = capture_events()
connection = FakeStrictRedis()
with start_transaction():
long_string = "a" * 100000
connection.set("somekey1", long_string)
short_string = "b" * 10
connection.set("somekey2", short_string)
(event,) = events
spans = event["spans"]
assert spans[0]["op"] == "db.redis"
assert spans[0]["description"] == "SET 'somekey1' '%s..." % (
long_string[: 30 - len("...") - len("SET 'somekey1' '")],
)
assert spans[1]["description"] == "SET 'somekey2' '%s'" % (short_string,)
def test_breadcrumbs(sentry_init, capture_events):
sentry_init(
integrations=[RedisIntegration(max_data_size=30)],
send_default_pii=True,
)
events = capture_events()
connection = FakeStrictRedis()
long_string = "a" * 100000
connection.set("somekey1", long_string)
short_string = "b" * 10
connection.set("somekey2", short_string)
capture_message("hi")
(event,) = events
crumbs = event["breadcrumbs"]["values"]
assert crumbs[0] == {
"message": "SET 'somekey1' 'aaaaaaaaaaa...",
"type": "redis",
"category": "redis",
"data": {
"db.operation": "SET",
"redis.is_cluster": False,
"redis.command": "SET",
"redis.key": "somekey1",
},
"timestamp": crumbs[0]["timestamp"],
}
assert crumbs[1] == {
"message": "SET 'somekey2' 'bbbbbbbbbb'",
"type": "redis",
"category": "redis",
"data": {
"db.operation": "SET",
"redis.is_cluster": False,
"redis.command": "SET",
"redis.key": "somekey2",
},
"timestamp": crumbs[1]["timestamp"],
}
def test_db_connection_attributes_client(sentry_init, capture_events):
sentry_init(
traces_sample_rate=1.0,
integrations=[RedisIntegration()],
)
events = capture_events()
with start_transaction():
connection = FakeStrictRedis(connection_pool=MOCK_CONNECTION_POOL)
connection.get("foobar")
(event,) = events
(span,) = event["spans"]
assert span["op"] == "db.redis"
assert span["description"] == "GET 'foobar'"
assert span["data"][SPANDATA.DB_SYSTEM] == "redis"
assert span["data"][SPANDATA.DB_NAME] == "1"
assert span["data"][SPANDATA.SERVER_ADDRESS] == "localhost"
assert span["data"][SPANDATA.SERVER_PORT] == 63791
def test_db_connection_attributes_pipeline(sentry_init, capture_events):
sentry_init(
traces_sample_rate=1.0,
integrations=[RedisIntegration()],
)
events = capture_events()
with start_transaction():
connection = FakeStrictRedis(connection_pool=MOCK_CONNECTION_POOL)
pipeline = connection.pipeline(transaction=False)
pipeline.get("foo")
pipeline.set("bar", 1)
pipeline.set("baz", 2)
pipeline.execute()
(event,) = events
(span,) = event["spans"]
assert span["op"] == "db.redis"
assert span["description"] == "redis.pipeline.execute"
assert span["data"][SPANDATA.DB_SYSTEM] == "redis"
assert span["data"][SPANDATA.DB_NAME] == "1"
assert span["data"][SPANDATA.SERVER_ADDRESS] == "localhost"
assert span["data"][SPANDATA.SERVER_PORT] == 63791
sentry-python-1.39.2/tests/integrations/rediscluster/ 0000775 0000000 0000000 00000000000 14547447232 0023045 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/rediscluster/__init__.py 0000664 0000000 0000000 00000000063 14547447232 0025155 0 ustar 00root root 0000000 0000000 import pytest
pytest.importorskip("rediscluster")
sentry-python-1.39.2/tests/integrations/rediscluster/test_rediscluster.py 0000664 0000000 0000000 00000011222 14547447232 0027164 0 ustar 00root root 0000000 0000000 import pytest
from sentry_sdk import capture_message
from sentry_sdk.api import start_transaction
from sentry_sdk.consts import SPANDATA
from sentry_sdk.integrations.redis import RedisIntegration
try:
from unittest import mock
except ImportError:
import mock
import rediscluster
MOCK_CONNECTION_POOL = mock.MagicMock()
MOCK_CONNECTION_POOL.connection_kwargs = {
"host": "localhost",
"port": 63791,
"db": 1,
}
rediscluster_classes = [rediscluster.RedisCluster]
if hasattr(rediscluster, "StrictRedisCluster"):
rediscluster_classes.append(rediscluster.StrictRedisCluster)
@pytest.fixture(autouse=True)
def monkeypatch_rediscluster_classes(reset_integrations):
try:
pipeline_cls = rediscluster.pipeline.ClusterPipeline
except AttributeError:
pipeline_cls = rediscluster.StrictClusterPipeline
rediscluster.RedisCluster.pipeline = lambda *_, **__: pipeline_cls(
connection_pool=MOCK_CONNECTION_POOL
)
pipeline_cls.execute = lambda *_, **__: None
for cls in rediscluster_classes:
cls.execute_command = lambda *_, **__: None
@pytest.mark.parametrize("rediscluster_cls", rediscluster_classes)
def test_rediscluster_basic(rediscluster_cls, sentry_init, capture_events):
sentry_init(integrations=[RedisIntegration()])
events = capture_events()
rc = rediscluster_cls(connection_pool=MOCK_CONNECTION_POOL)
rc.get("foobar")
capture_message("hi")
(event,) = events
(crumb,) = event["breadcrumbs"]["values"]
assert crumb == {
"category": "redis",
"message": "GET 'foobar'",
"data": {
"db.operation": "GET",
"redis.key": "foobar",
"redis.command": "GET",
"redis.is_cluster": True,
},
"timestamp": crumb["timestamp"],
"type": "redis",
}
@pytest.mark.parametrize(
"send_default_pii, expected_first_ten",
[
(False, ["GET 'foo'", "SET 'bar' [Filtered]", "SET 'baz' [Filtered]"]),
(True, ["GET 'foo'", "SET 'bar' 1", "SET 'baz' 2"]),
],
)
def test_rediscluster_pipeline(
sentry_init, capture_events, send_default_pii, expected_first_ten
):
sentry_init(
integrations=[RedisIntegration()],
traces_sample_rate=1.0,
send_default_pii=send_default_pii,
)
events = capture_events()
rc = rediscluster.RedisCluster(connection_pool=MOCK_CONNECTION_POOL)
with start_transaction():
pipeline = rc.pipeline()
pipeline.get("foo")
pipeline.set("bar", 1)
pipeline.set("baz", 2)
pipeline.execute()
(event,) = events
(span,) = event["spans"]
assert span["op"] == "db.redis"
assert span["description"] == "redis.pipeline.execute"
assert span["data"] == {
"redis.commands": {
"count": 3,
"first_ten": expected_first_ten,
},
SPANDATA.DB_SYSTEM: "redis",
SPANDATA.DB_NAME: "1",
SPANDATA.SERVER_ADDRESS: "localhost",
SPANDATA.SERVER_PORT: 63791,
}
assert span["tags"] == {
"redis.transaction": False, # For Cluster, this is always False
"redis.is_cluster": True,
}
@pytest.mark.parametrize("rediscluster_cls", rediscluster_classes)
def test_db_connection_attributes_client(sentry_init, capture_events, rediscluster_cls):
sentry_init(
traces_sample_rate=1.0,
integrations=[RedisIntegration()],
)
events = capture_events()
rc = rediscluster_cls(connection_pool=MOCK_CONNECTION_POOL)
with start_transaction():
rc.get("foobar")
(event,) = events
(span,) = event["spans"]
assert span["data"] == {
SPANDATA.DB_SYSTEM: "redis",
SPANDATA.DB_NAME: "1",
SPANDATA.SERVER_ADDRESS: "localhost",
SPANDATA.SERVER_PORT: 63791,
}
@pytest.mark.parametrize("rediscluster_cls", rediscluster_classes)
def test_db_connection_attributes_pipeline(
sentry_init, capture_events, rediscluster_cls
):
sentry_init(
traces_sample_rate=1.0,
integrations=[RedisIntegration()],
)
events = capture_events()
rc = rediscluster.RedisCluster(connection_pool=MOCK_CONNECTION_POOL)
with start_transaction():
pipeline = rc.pipeline()
pipeline.get("foo")
pipeline.execute()
(event,) = events
(span,) = event["spans"]
assert span["op"] == "db.redis"
assert span["description"] == "redis.pipeline.execute"
assert span["data"] == {
"redis.commands": {
"count": 1,
"first_ten": ["GET 'foo'"],
},
SPANDATA.DB_SYSTEM: "redis",
SPANDATA.DB_NAME: "1",
SPANDATA.SERVER_ADDRESS: "localhost",
SPANDATA.SERVER_PORT: 63791,
}
sentry-python-1.39.2/tests/integrations/requests/ 0000775 0000000 0000000 00000000000 14547447232 0022210 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/requests/__init__.py 0000664 0000000 0000000 00000000057 14547447232 0024323 0 ustar 00root root 0000000 0000000 import pytest
pytest.importorskip("requests")
sentry-python-1.39.2/tests/integrations/requests/test_requests.py 0000664 0000000 0000000 00000003235 14547447232 0025477 0 ustar 00root root 0000000 0000000 import requests
import responses
import pytest
from sentry_sdk import capture_message
from sentry_sdk.consts import SPANDATA
from sentry_sdk.integrations.stdlib import StdlibIntegration
try:
from unittest import mock # python 3.3 and above
except ImportError:
import mock # python < 3.3
def test_crumb_capture(sentry_init, capture_events):
sentry_init(integrations=[StdlibIntegration()])
url = "http://example.com/"
responses.add(responses.GET, url, status=200)
events = capture_events()
response = requests.get(url)
capture_message("Testing!")
(event,) = events
(crumb,) = event["breadcrumbs"]["values"]
assert crumb["type"] == "http"
assert crumb["category"] == "httplib"
assert crumb["data"] == {
"url": url,
SPANDATA.HTTP_METHOD: "GET",
SPANDATA.HTTP_FRAGMENT: "",
SPANDATA.HTTP_QUERY: "",
SPANDATA.HTTP_STATUS_CODE: response.status_code,
"reason": response.reason,
}
@pytest.mark.tests_internal_exceptions
def test_omit_url_data_if_parsing_fails(sentry_init, capture_events):
sentry_init(integrations=[StdlibIntegration()])
url = "https://example.com"
responses.add(responses.GET, url, status=200)
events = capture_events()
with mock.patch(
"sentry_sdk.integrations.stdlib.parse_url",
side_effect=ValueError,
):
response = requests.get(url)
capture_message("Testing!")
(event,) = events
assert event["breadcrumbs"]["values"][0]["data"] == {
SPANDATA.HTTP_METHOD: "GET",
SPANDATA.HTTP_STATUS_CODE: response.status_code,
"reason": response.reason,
# no url related data
}
sentry-python-1.39.2/tests/integrations/rq/ 0000775 0000000 0000000 00000000000 14547447232 0020757 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/rq/__init__.py 0000664 0000000 0000000 00000000051 14547447232 0023064 0 ustar 00root root 0000000 0000000 import pytest
pytest.importorskip("rq")
sentry-python-1.39.2/tests/integrations/rq/test_rq.py 0000664 0000000 0000000 00000020606 14547447232 0023016 0 ustar 00root root 0000000 0000000 import pytest
from fakeredis import FakeStrictRedis
from sentry_sdk import configure_scope, start_transaction
from sentry_sdk.integrations.rq import RqIntegration
from sentry_sdk.utils import parse_version
import rq
try:
from unittest import mock # python 3.3 and above
except ImportError:
import mock # python < 3.3
@pytest.fixture(autouse=True)
def _patch_rq_get_server_version(monkeypatch):
"""
Patch RQ lower than 1.5.1 to work with fakeredis.
https://github.com/jamesls/fakeredis/issues/273
"""
from distutils.version import StrictVersion
if parse_version(rq.VERSION) <= (1, 5, 1):
for k in (
"rq.job.Job.get_redis_server_version",
"rq.worker.Worker.get_redis_server_version",
):
try:
monkeypatch.setattr(k, lambda _: StrictVersion("4.0.0"))
except AttributeError:
# old RQ Job/Worker doesn't have a get_redis_server_version attr
pass
def crashing_job(foo):
1 / 0
def chew_up_shoes(dog, human, shoes):
raise Exception("{}!! Why did you eat {}'s {}??".format(dog, human, shoes))
def do_trick(dog, trick):
return "{}, can you {}? Good dog!".format(dog, trick)
def test_basic(sentry_init, capture_events):
sentry_init(integrations=[RqIntegration()])
events = capture_events()
queue = rq.Queue(connection=FakeStrictRedis())
worker = rq.SimpleWorker([queue], connection=queue.connection)
queue.enqueue(crashing_job, foo=42)
worker.work(burst=True)
(event,) = events
(exception,) = event["exception"]["values"]
assert exception["type"] == "ZeroDivisionError"
assert exception["mechanism"]["type"] == "rq"
assert exception["stacktrace"]["frames"][-1]["vars"]["foo"] == "42"
assert event["transaction"] == "tests.integrations.rq.test_rq.crashing_job"
extra = event["extra"]["rq-job"]
assert extra["args"] == []
assert extra["kwargs"] == {"foo": 42}
assert extra["description"] == "tests.integrations.rq.test_rq.crashing_job(foo=42)"
assert extra["func"] == "tests.integrations.rq.test_rq.crashing_job"
assert "job_id" in extra
assert "enqueued_at" in extra
# older versions don't persist started_at correctly
if tuple(map(int, rq.VERSION.split("."))) >= (0, 9):
assert "started_at" in extra
def test_transport_shutdown(sentry_init, capture_events_forksafe):
sentry_init(integrations=[RqIntegration()])
events = capture_events_forksafe()
queue = rq.Queue(connection=FakeStrictRedis())
worker = rq.Worker([queue], connection=queue.connection)
queue.enqueue(crashing_job, foo=42)
worker.work(burst=True)
event = events.read_event()
events.read_flush()
(exception,) = event["exception"]["values"]
assert exception["type"] == "ZeroDivisionError"
def test_transaction_with_error(
sentry_init, capture_events, DictionaryContaining # noqa:N803
):
sentry_init(integrations=[RqIntegration()], traces_sample_rate=1.0)
events = capture_events()
queue = rq.Queue(connection=FakeStrictRedis())
worker = rq.SimpleWorker([queue], connection=queue.connection)
queue.enqueue(chew_up_shoes, "Charlie", "Katie", shoes="flip-flops")
worker.work(burst=True)
error_event, envelope = events
assert error_event["transaction"] == "tests.integrations.rq.test_rq.chew_up_shoes"
assert error_event["contexts"]["trace"]["op"] == "queue.task.rq"
assert error_event["exception"]["values"][0]["type"] == "Exception"
assert (
error_event["exception"]["values"][0]["value"]
== "Charlie!! Why did you eat Katie's flip-flops??"
)
assert envelope["type"] == "transaction"
assert envelope["contexts"]["trace"] == error_event["contexts"]["trace"]
assert envelope["transaction"] == error_event["transaction"]
assert envelope["extra"]["rq-job"] == DictionaryContaining(
{
"args": ["Charlie", "Katie"],
"kwargs": {"shoes": "flip-flops"},
"func": "tests.integrations.rq.test_rq.chew_up_shoes",
"description": "tests.integrations.rq.test_rq.chew_up_shoes('Charlie', 'Katie', shoes='flip-flops')",
}
)
def test_error_has_trace_context_if_tracing_disabled(
sentry_init,
capture_events,
):
sentry_init(integrations=[RqIntegration()])
events = capture_events()
queue = rq.Queue(connection=FakeStrictRedis())
worker = rq.SimpleWorker([queue], connection=queue.connection)
queue.enqueue(crashing_job, foo=None)
worker.work(burst=True)
(error_event,) = events
assert error_event["contexts"]["trace"]
def test_tracing_enabled(
sentry_init,
capture_events,
):
sentry_init(integrations=[RqIntegration()], traces_sample_rate=1.0)
events = capture_events()
queue = rq.Queue(connection=FakeStrictRedis())
worker = rq.SimpleWorker([queue], connection=queue.connection)
with start_transaction(op="rq transaction") as transaction:
queue.enqueue(crashing_job, foo=None)
worker.work(burst=True)
error_event, envelope, _ = events
assert error_event["transaction"] == "tests.integrations.rq.test_rq.crashing_job"
assert error_event["contexts"]["trace"]["trace_id"] == transaction.trace_id
assert envelope["contexts"]["trace"] == error_event["contexts"]["trace"]
def test_tracing_disabled(
sentry_init,
capture_events,
):
sentry_init(integrations=[RqIntegration()])
events = capture_events()
queue = rq.Queue(connection=FakeStrictRedis())
worker = rq.SimpleWorker([queue], connection=queue.connection)
with configure_scope() as scope:
queue.enqueue(crashing_job, foo=None)
worker.work(burst=True)
(error_event,) = events
assert (
error_event["transaction"] == "tests.integrations.rq.test_rq.crashing_job"
)
assert (
error_event["contexts"]["trace"]["trace_id"]
== scope._propagation_context["trace_id"]
)
def test_transaction_no_error(
sentry_init, capture_events, DictionaryContaining # noqa:N803
):
sentry_init(integrations=[RqIntegration()], traces_sample_rate=1.0)
events = capture_events()
queue = rq.Queue(connection=FakeStrictRedis())
worker = rq.SimpleWorker([queue], connection=queue.connection)
queue.enqueue(do_trick, "Maisey", trick="kangaroo")
worker.work(burst=True)
envelope = events[0]
assert envelope["type"] == "transaction"
assert envelope["contexts"]["trace"]["op"] == "queue.task.rq"
assert envelope["transaction"] == "tests.integrations.rq.test_rq.do_trick"
assert envelope["extra"]["rq-job"] == DictionaryContaining(
{
"args": ["Maisey"],
"kwargs": {"trick": "kangaroo"},
"func": "tests.integrations.rq.test_rq.do_trick",
"description": "tests.integrations.rq.test_rq.do_trick('Maisey', trick='kangaroo')",
}
)
def test_traces_sampler_gets_correct_values_in_sampling_context(
sentry_init, DictionaryContaining, ObjectDescribedBy # noqa:N803
):
traces_sampler = mock.Mock(return_value=True)
sentry_init(integrations=[RqIntegration()], traces_sampler=traces_sampler)
queue = rq.Queue(connection=FakeStrictRedis())
worker = rq.SimpleWorker([queue], connection=queue.connection)
queue.enqueue(do_trick, "Bodhi", trick="roll over")
worker.work(burst=True)
traces_sampler.assert_any_call(
DictionaryContaining(
{
"rq_job": ObjectDescribedBy(
type=rq.job.Job,
attrs={
"description": "tests.integrations.rq.test_rq.do_trick('Bodhi', trick='roll over')",
"result": "Bodhi, can you roll over? Good dog!",
"func_name": "tests.integrations.rq.test_rq.do_trick",
"args": ("Bodhi",),
"kwargs": {"trick": "roll over"},
},
),
}
)
)
@pytest.mark.skipif(
parse_version(rq.__version__) < (1, 5), reason="At least rq-1.5 required"
)
def test_job_with_retries(sentry_init, capture_events):
sentry_init(integrations=[RqIntegration()])
events = capture_events()
queue = rq.Queue(connection=FakeStrictRedis())
worker = rq.SimpleWorker([queue], connection=queue.connection)
queue.enqueue(crashing_job, foo=42, retry=rq.Retry(max=1))
worker.work(burst=True)
assert len(events) == 1
sentry-python-1.39.2/tests/integrations/sanic/ 0000775 0000000 0000000 00000000000 14547447232 0021432 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/sanic/__init__.py 0000664 0000000 0000000 00000000054 14547447232 0023542 0 ustar 00root root 0000000 0000000 import pytest
pytest.importorskip("sanic")
sentry-python-1.39.2/tests/integrations/sanic/test_sanic.py 0000664 0000000 0000000 00000032302 14547447232 0024140 0 ustar 00root root 0000000 0000000 import asyncio
import contextlib
import os
import random
import sys
from unittest.mock import Mock
import pytest
from sentry_sdk import capture_message, configure_scope
from sentry_sdk.integrations.sanic import SanicIntegration
from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT, TRANSACTION_SOURCE_URL
from sanic import Sanic, request, response, __version__ as SANIC_VERSION_RAW
from sanic.response import HTTPResponse
from sanic.exceptions import SanicException
try:
from sanic_testing import TestManager
except ImportError:
TestManager = None
try:
from sanic_testing.reusable import ReusableClient
except ImportError:
ReusableClient = None
from sentry_sdk._types import TYPE_CHECKING
if TYPE_CHECKING:
from collections.abc import Iterable, Container
from typing import Any, Optional
SANIC_VERSION = tuple(map(int, SANIC_VERSION_RAW.split(".")))
PERFORMANCE_SUPPORTED = SANIC_VERSION >= (21, 9)
@pytest.fixture
def app():
if SANIC_VERSION < (19,):
"""
Older Sanic versions 0.8 and 18 bind to the same fixed port which
creates problems when we run tests concurrently.
"""
old_test_client = Sanic.test_client.__get__
def new_test_client(self):
client = old_test_client(self, Sanic)
client.port += os.getpid() % 100
return client
Sanic.test_client = property(new_test_client)
if SANIC_VERSION >= (20, 12) and SANIC_VERSION < (22, 6):
# Some builds (20.12.0 intruduced and 22.6.0 removed again) have a feature where the instance is stored in an internal class
# registry for later retrieval, and so add register=False to disable that
sanic_app = Sanic("Test", register=False)
else:
sanic_app = Sanic("Test")
if TestManager is not None:
TestManager(sanic_app)
@sanic_app.route("/message")
def hi(request):
capture_message("hi")
return response.text("ok")
@sanic_app.route("/message/")
def hi_with_id(request, message_id):
capture_message("hi with id")
return response.text("ok with id")
@sanic_app.route("/500")
def fivehundred(_):
1 / 0
return sanic_app
def get_client(app):
@contextlib.contextmanager
def simple_client(app):
yield app.test_client
if ReusableClient is not None:
return ReusableClient(app)
else:
return simple_client(app)
def test_request_data(sentry_init, app, capture_events):
sentry_init(integrations=[SanicIntegration()])
events = capture_events()
c = get_client(app)
with c as client:
_, response = client.get("/message?foo=bar")
assert response.status == 200
(event,) = events
assert event["transaction"] == "hi"
assert event["request"]["env"] == {"REMOTE_ADDR": ""}
assert set(event["request"]["headers"]) >= {
"accept",
"accept-encoding",
"host",
"user-agent",
}
assert event["request"]["query_string"] == "foo=bar"
assert event["request"]["url"].endswith("/message")
assert event["request"]["method"] == "GET"
# Assert that state is not leaked
events.clear()
capture_message("foo")
(event,) = events
assert "request" not in event
assert "transaction" not in event
@pytest.mark.parametrize(
"url,expected_transaction,expected_source",
[
("/message", "hi", "component"),
("/message/123456", "hi_with_id", "component"),
],
)
def test_transaction_name(
sentry_init, app, capture_events, url, expected_transaction, expected_source
):
sentry_init(integrations=[SanicIntegration()])
events = capture_events()
c = get_client(app)
with c as client:
_, response = client.get(url)
assert response.status == 200
(event,) = events
assert event["transaction"] == expected_transaction
assert event["transaction_info"] == {"source": expected_source}
def test_errors(sentry_init, app, capture_events):
sentry_init(integrations=[SanicIntegration()])
events = capture_events()
@app.route("/error")
def myerror(request):
raise ValueError("oh no")
c = get_client(app)
with c as client:
_, response = client.get("/error")
assert response.status == 500
(event,) = events
assert event["transaction"] == "myerror"
(exception,) = event["exception"]["values"]
assert exception["type"] == "ValueError"
assert exception["value"] == "oh no"
assert any(
frame["filename"].endswith("test_sanic.py")
for frame in exception["stacktrace"]["frames"]
)
def test_bad_request_not_captured(sentry_init, app, capture_events):
sentry_init(integrations=[SanicIntegration()])
events = capture_events()
@app.route("/")
def index(request):
raise SanicException("...", status_code=400)
c = get_client(app)
with c as client:
_, response = client.get("/")
assert response.status == 400
assert not events
def test_error_in_errorhandler(sentry_init, app, capture_events):
sentry_init(integrations=[SanicIntegration()])
events = capture_events()
@app.route("/error")
def myerror(request):
raise ValueError("oh no")
@app.exception(ValueError)
def myhandler(request, exception):
1 / 0
c = get_client(app)
with c as client:
_, response = client.get("/error")
assert response.status == 500
event1, event2 = events
(exception,) = event1["exception"]["values"]
assert exception["type"] == "ValueError"
assert any(
frame["filename"].endswith("test_sanic.py")
for frame in exception["stacktrace"]["frames"]
)
exception = event2["exception"]["values"][-1]
assert exception["type"] == "ZeroDivisionError"
assert any(
frame["filename"].endswith("test_sanic.py")
for frame in exception["stacktrace"]["frames"]
)
def test_concurrency(sentry_init, app):
"""
Make sure we instrument Sanic in a way where request data does not leak
between request handlers. This test also implicitly tests our concept of
how async code should be instrumented, so if it breaks it likely has
ramifications for other async integrations and async usercode.
We directly call the request handler instead of using Sanic's test client
because that's the only way we could reproduce leakage with such a low
amount of concurrent tasks.
"""
sentry_init(integrations=[SanicIntegration()])
@app.route("/context-check/")
async def context_check(request, i):
with configure_scope() as scope:
scope.set_tag("i", i)
await asyncio.sleep(random.random())
with configure_scope() as scope:
assert scope._tags["i"] == i
return response.text("ok")
async def task(i):
responses = []
kwargs = {
"url_bytes": "http://localhost/context-check/{i}".format(i=i).encode(
"ascii"
),
"headers": {},
"version": "1.1",
"method": "GET",
"transport": None,
}
if SANIC_VERSION >= (19,):
kwargs["app"] = app
if SANIC_VERSION >= (21, 3):
class MockAsyncStreamer:
def __init__(self, request_body):
self.request_body = request_body
self.iter = iter(self.request_body)
if SANIC_VERSION >= (21, 12):
self.response = None
self.stage = Mock()
else:
self.response = b"success"
def respond(self, response):
responses.append(response)
patched_response = HTTPResponse()
return patched_response
def __aiter__(self):
return self
async def __anext__(self):
try:
return next(self.iter)
except StopIteration:
raise StopAsyncIteration
patched_request = request.Request(**kwargs)
patched_request.stream = MockAsyncStreamer([b"hello", b"foo"])
if SANIC_VERSION >= (21, 9):
await app.dispatch(
"http.lifecycle.request",
context={"request": patched_request},
inline=True,
)
await app.handle_request(
patched_request,
)
else:
await app.handle_request(
request.Request(**kwargs),
write_callback=responses.append,
stream_callback=responses.append,
)
(r,) = responses
assert r.status == 200
async def runner():
if SANIC_VERSION >= (21, 3):
if SANIC_VERSION >= (21, 9):
await app._startup()
else:
try:
app.router.reset()
app.router.finalize()
except AttributeError:
...
await asyncio.gather(*(task(i) for i in range(1000)))
if sys.version_info < (3, 7):
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
loop.run_until_complete(runner())
else:
asyncio.run(runner())
with configure_scope() as scope:
assert not scope._tags
class TransactionTestConfig:
"""
Data class to store configurations for each performance transaction test run, including
both the inputs and relevant expected results.
"""
def __init__(
self,
integration_args,
url,
expected_status,
expected_transaction_name,
expected_source=None,
):
# type: (Iterable[Optional[Container[int]]], str, int, Optional[str], Optional[str]) -> None
"""
expected_transaction_name of None indicates we expect to not receive a transaction
"""
self.integration_args = integration_args
self.url = url
self.expected_status = expected_status
self.expected_transaction_name = expected_transaction_name
self.expected_source = expected_source
@pytest.mark.skipif(
not PERFORMANCE_SUPPORTED, reason="Performance not supported on this Sanic version"
)
@pytest.mark.parametrize(
"test_config",
[
TransactionTestConfig(
# Transaction for successful page load
integration_args=(),
url="/message",
expected_status=200,
expected_transaction_name="hi",
expected_source=TRANSACTION_SOURCE_COMPONENT,
),
TransactionTestConfig(
# Transaction still recorded when we have an internal server error
integration_args=(),
url="/500",
expected_status=500,
expected_transaction_name="fivehundred",
expected_source=TRANSACTION_SOURCE_COMPONENT,
),
TransactionTestConfig(
# By default, no transaction when we have a 404 error
integration_args=(),
url="/404",
expected_status=404,
expected_transaction_name=None,
),
TransactionTestConfig(
# With no ignored HTTP statuses, we should get transactions for 404 errors
integration_args=(None,),
url="/404",
expected_status=404,
expected_transaction_name="/404",
expected_source=TRANSACTION_SOURCE_URL,
),
TransactionTestConfig(
# Transaction can be suppressed for other HTTP statuses, too, by passing config to the integration
integration_args=({200},),
url="/message",
expected_status=200,
expected_transaction_name=None,
),
],
)
def test_transactions(test_config, sentry_init, app, capture_events):
# type: (TransactionTestConfig, Any, Any, Any) -> None
# Init the SanicIntegration with the desired arguments
sentry_init(
integrations=[SanicIntegration(*test_config.integration_args)],
traces_sample_rate=1.0,
)
events = capture_events()
# Make request to the desired URL
c = get_client(app)
with c as client:
_, response = client.get(test_config.url)
assert response.status == test_config.expected_status
# Extract the transaction events by inspecting the event types. We should at most have 1 transaction event.
transaction_events = [
e for e in events if "type" in e and e["type"] == "transaction"
]
assert len(transaction_events) <= 1
# Get the only transaction event, or set to None if there are no transaction events.
(transaction_event, *_) = [*transaction_events, None]
# We should have no transaction event if and only if we expect no transactions
assert (transaction_event is None) == (
test_config.expected_transaction_name is None
)
# If a transaction was expected, ensure it is correct
assert (
transaction_event is None
or transaction_event["transaction"] == test_config.expected_transaction_name
)
assert (
transaction_event is None
or transaction_event["transaction_info"]["source"]
== test_config.expected_source
)
sentry-python-1.39.2/tests/integrations/serverless/ 0000775 0000000 0000000 00000000000 14547447232 0022532 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/serverless/test_serverless.py 0000664 0000000 0000000 00000002022 14547447232 0026334 0 ustar 00root root 0000000 0000000 import pytest
from sentry_sdk.integrations.serverless import serverless_function
def test_basic(sentry_init, capture_exceptions, monkeypatch):
sentry_init()
exceptions = capture_exceptions()
flush_calls = []
@serverless_function
def foo():
monkeypatch.setattr(
"sentry_sdk.Hub.current.flush", lambda: flush_calls.append(1)
)
1 / 0
with pytest.raises(ZeroDivisionError):
foo()
(exception,) = exceptions
assert isinstance(exception, ZeroDivisionError)
assert flush_calls == [1]
def test_flush_disabled(sentry_init, capture_exceptions, monkeypatch):
sentry_init()
exceptions = capture_exceptions()
flush_calls = []
monkeypatch.setattr("sentry_sdk.Hub.current.flush", lambda: flush_calls.append(1))
@serverless_function(flush=False)
def foo():
1 / 0
with pytest.raises(ZeroDivisionError):
foo()
(exception,) = exceptions
assert isinstance(exception, ZeroDivisionError)
assert flush_calls == []
sentry-python-1.39.2/tests/integrations/socket/ 0000775 0000000 0000000 00000000000 14547447232 0021625 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/socket/__init__.py 0000664 0000000 0000000 00000000055 14547447232 0023736 0 ustar 00root root 0000000 0000000 import pytest
pytest.importorskip("socket")
sentry-python-1.39.2/tests/integrations/socket/test_socket.py 0000664 0000000 0000000 00000002715 14547447232 0024533 0 ustar 00root root 0000000 0000000 import socket
from sentry_sdk import start_transaction
from sentry_sdk.integrations.socket import SocketIntegration
def test_getaddrinfo_trace(sentry_init, capture_events):
sentry_init(integrations=[SocketIntegration()], traces_sample_rate=1.0)
events = capture_events()
with start_transaction():
socket.getaddrinfo("example.com", 443)
(event,) = events
(span,) = event["spans"]
assert span["op"] == "socket.dns"
assert span["description"] == "example.com:443"
assert span["data"] == {
"host": "example.com",
"port": 443,
}
def test_create_connection_trace(sentry_init, capture_events):
timeout = 10
sentry_init(integrations=[SocketIntegration()], traces_sample_rate=1.0)
events = capture_events()
with start_transaction():
socket.create_connection(("example.com", 443), timeout, None)
(event,) = events
(connect_span, dns_span) = event["spans"]
# as getaddrinfo gets called in create_connection it should also contain a dns span
assert connect_span["op"] == "socket.connection"
assert connect_span["description"] == "example.com:443"
assert connect_span["data"] == {
"address": ["example.com", 443],
"timeout": timeout,
"source_address": None,
}
assert dns_span["op"] == "socket.dns"
assert dns_span["description"] == "example.com:443"
assert dns_span["data"] == {
"host": "example.com",
"port": 443,
}
sentry-python-1.39.2/tests/integrations/spark/ 0000775 0000000 0000000 00000000000 14547447232 0021455 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/spark/__init__.py 0000664 0000000 0000000 00000000112 14547447232 0023560 0 ustar 00root root 0000000 0000000 import pytest
pytest.importorskip("pyspark")
pytest.importorskip("py4j")
sentry-python-1.39.2/tests/integrations/spark/test_spark.py 0000664 0000000 0000000 00000014706 14547447232 0024216 0 ustar 00root root 0000000 0000000 import pytest
import sys
from sentry_sdk.integrations.spark.spark_driver import (
_set_app_properties,
_start_sentry_listener,
SentryListener,
)
from sentry_sdk.integrations.spark.spark_worker import SparkWorkerIntegration
from pyspark import SparkContext
from py4j.protocol import Py4JJavaError
################
# DRIVER TESTS #
################
def test_set_app_properties():
spark_context = SparkContext(appName="Testing123")
_set_app_properties()
assert spark_context.getLocalProperty("sentry_app_name") == "Testing123"
# applicationId generated by sparkContext init
assert (
spark_context.getLocalProperty("sentry_application_id")
== spark_context.applicationId
)
def test_start_sentry_listener():
spark_context = SparkContext.getOrCreate()
gateway = spark_context._gateway
assert gateway._callback_server is None
_start_sentry_listener(spark_context)
assert gateway._callback_server is not None
@pytest.fixture
def sentry_listener(monkeypatch):
class MockHub:
def __init__(self):
self.args = []
self.kwargs = {}
def add_breadcrumb(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
listener = SentryListener()
mock_hub = MockHub()
monkeypatch.setattr(listener, "hub", mock_hub)
return listener, mock_hub
def test_sentry_listener_on_job_start(sentry_listener):
listener, mock_hub = sentry_listener
class MockJobStart:
def jobId(self): # noqa: N802
return "sample-job-id-start"
mock_job_start = MockJobStart()
listener.onJobStart(mock_job_start)
assert mock_hub.kwargs["level"] == "info"
assert "sample-job-id-start" in mock_hub.kwargs["message"]
@pytest.mark.parametrize(
"job_result, level", [("JobSucceeded", "info"), ("JobFailed", "warning")]
)
def test_sentry_listener_on_job_end(sentry_listener, job_result, level):
listener, mock_hub = sentry_listener
class MockJobResult:
def toString(self): # noqa: N802
return job_result
class MockJobEnd:
def jobId(self): # noqa: N802
return "sample-job-id-end"
def jobResult(self): # noqa: N802
result = MockJobResult()
return result
mock_job_end = MockJobEnd()
listener.onJobEnd(mock_job_end)
assert mock_hub.kwargs["level"] == level
assert mock_hub.kwargs["data"]["result"] == job_result
assert "sample-job-id-end" in mock_hub.kwargs["message"]
def test_sentry_listener_on_stage_submitted(sentry_listener):
listener, mock_hub = sentry_listener
class StageInfo:
def stageId(self): # noqa: N802
return "sample-stage-id-submit"
def name(self):
return "run-job"
def attemptId(self): # noqa: N802
return 14
class MockStageSubmitted:
def stageInfo(self): # noqa: N802
stageinf = StageInfo()
return stageinf
mock_stage_submitted = MockStageSubmitted()
listener.onStageSubmitted(mock_stage_submitted)
assert mock_hub.kwargs["level"] == "info"
assert "sample-stage-id-submit" in mock_hub.kwargs["message"]
assert mock_hub.kwargs["data"]["attemptId"] == 14
assert mock_hub.kwargs["data"]["name"] == "run-job"
@pytest.fixture
def get_mock_stage_completed():
def _inner(failure_reason):
class JavaException:
def __init__(self):
self._target_id = "id"
class FailureReason:
def get(self):
if failure_reason:
return "failure-reason"
else:
raise Py4JJavaError("msg", JavaException())
class StageInfo:
def stageId(self): # noqa: N802
return "sample-stage-id-submit"
def name(self):
return "run-job"
def attemptId(self): # noqa: N802
return 14
def failureReason(self): # noqa: N802
return FailureReason()
class MockStageCompleted:
def stageInfo(self): # noqa: N802
return StageInfo()
return MockStageCompleted()
return _inner
def test_sentry_listener_on_stage_completed_success(
sentry_listener, get_mock_stage_completed
):
listener, mock_hub = sentry_listener
mock_stage_completed = get_mock_stage_completed(failure_reason=False)
listener.onStageCompleted(mock_stage_completed)
assert mock_hub.kwargs["level"] == "info"
assert "sample-stage-id-submit" in mock_hub.kwargs["message"]
assert mock_hub.kwargs["data"]["attemptId"] == 14
assert mock_hub.kwargs["data"]["name"] == "run-job"
assert "reason" not in mock_hub.kwargs["data"]
def test_sentry_listener_on_stage_completed_failure(
sentry_listener, get_mock_stage_completed
):
listener, mock_hub = sentry_listener
mock_stage_completed = get_mock_stage_completed(failure_reason=True)
listener.onStageCompleted(mock_stage_completed)
assert mock_hub.kwargs["level"] == "warning"
assert "sample-stage-id-submit" in mock_hub.kwargs["message"]
assert mock_hub.kwargs["data"]["attemptId"] == 14
assert mock_hub.kwargs["data"]["name"] == "run-job"
assert mock_hub.kwargs["data"]["reason"] == "failure-reason"
################
# WORKER TESTS #
################
def test_spark_worker(monkeypatch, sentry_init, capture_events, capture_exceptions):
import pyspark.worker as original_worker
import pyspark.daemon as original_daemon
from pyspark.taskcontext import TaskContext
task_context = TaskContext._getOrCreate()
def mock_main():
task_context._stageId = 0
task_context._attemptNumber = 1
task_context._partitionId = 2
task_context._taskAttemptId = 3
try:
raise ZeroDivisionError
except ZeroDivisionError:
sys.exit(-1)
monkeypatch.setattr(original_worker, "main", mock_main)
sentry_init(integrations=[SparkWorkerIntegration()])
events = capture_events()
exceptions = capture_exceptions()
original_daemon.worker_main()
# SystemExit called, but not recorded as part of event
assert type(exceptions.pop()) == SystemExit
assert len(events[0]["exception"]["values"]) == 1
assert events[0]["exception"]["values"][0]["type"] == "ZeroDivisionError"
assert events[0]["tags"] == {
"stageId": "0",
"attemptNumber": "1",
"partitionId": "2",
"taskAttemptId": "3",
}
sentry-python-1.39.2/tests/integrations/sqlalchemy/ 0000775 0000000 0000000 00000000000 14547447232 0022477 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/sqlalchemy/__init__.py 0000664 0000000 0000000 00000000061 14547447232 0024605 0 ustar 00root root 0000000 0000000 import pytest
pytest.importorskip("sqlalchemy")
sentry-python-1.39.2/tests/integrations/sqlalchemy/test_sqlalchemy.py 0000664 0000000 0000000 00000025536 14547447232 0026265 0 ustar 00root root 0000000 0000000 import os
import pytest
import sys
from sqlalchemy import Column, ForeignKey, Integer, String, create_engine
from sqlalchemy.exc import IntegrityError
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship, sessionmaker
from sqlalchemy import text
from sentry_sdk import capture_message, start_transaction, configure_scope
from sentry_sdk.consts import DEFAULT_MAX_VALUE_LENGTH, SPANDATA
from sentry_sdk.integrations.sqlalchemy import SqlalchemyIntegration
from sentry_sdk.serializer import MAX_EVENT_BYTES
from sentry_sdk.utils import json_dumps
def test_orm_queries(sentry_init, capture_events):
sentry_init(
integrations=[SqlalchemyIntegration()], _experiments={"record_sql_params": True}
)
events = capture_events()
Base = declarative_base() # noqa: N806
class Person(Base):
__tablename__ = "person"
id = Column(Integer, primary_key=True)
name = Column(String(250), nullable=False)
class Address(Base):
__tablename__ = "address"
id = Column(Integer, primary_key=True)
street_name = Column(String(250))
street_number = Column(String(250))
post_code = Column(String(250), nullable=False)
person_id = Column(Integer, ForeignKey("person.id"))
person = relationship(Person)
engine = create_engine("sqlite:///:memory:")
Base.metadata.create_all(engine)
Session = sessionmaker(bind=engine) # noqa: N806
session = Session()
bob = Person(name="Bob")
session.add(bob)
assert session.query(Person).first() == bob
capture_message("hi")
(event,) = events
for crumb in event["breadcrumbs"]["values"]:
del crumb["timestamp"]
assert event["breadcrumbs"]["values"][-2:] == [
{
"category": "query",
"data": {"db.params": ["Bob"], "db.paramstyle": "qmark"},
"message": "INSERT INTO person (name) VALUES (?)",
"type": "default",
},
{
"category": "query",
"data": {"db.params": [1, 0], "db.paramstyle": "qmark"},
"message": "SELECT person.id AS person_id, person.name AS person_name \n"
"FROM person\n"
" LIMIT ? OFFSET ?",
"type": "default",
},
]
@pytest.mark.skipif(
sys.version_info < (3,), reason="This sqla usage seems to be broken on Py2"
)
def test_transactions(sentry_init, capture_events, render_span_tree):
sentry_init(
integrations=[SqlalchemyIntegration()],
_experiments={"record_sql_params": True},
traces_sample_rate=1.0,
)
events = capture_events()
Base = declarative_base() # noqa: N806
class Person(Base):
__tablename__ = "person"
id = Column(Integer, primary_key=True)
name = Column(String(250), nullable=False)
class Address(Base):
__tablename__ = "address"
id = Column(Integer, primary_key=True)
street_name = Column(String(250))
street_number = Column(String(250))
post_code = Column(String(250), nullable=False)
person_id = Column(Integer, ForeignKey("person.id"))
person = relationship(Person)
engine = create_engine("sqlite:///:memory:")
Base.metadata.create_all(engine)
Session = sessionmaker(bind=engine) # noqa: N806
session = Session()
with start_transaction(name="test_transaction", sampled=True):
with session.begin_nested():
session.query(Person).first()
for _ in range(2):
with pytest.raises(IntegrityError):
with session.begin_nested():
session.add(Person(id=1, name="bob"))
session.add(Person(id=1, name="bob"))
with session.begin_nested():
session.query(Person).first()
(event,) = events
for span in event["spans"]:
assert span["data"][SPANDATA.DB_SYSTEM] == "sqlite"
assert span["data"][SPANDATA.DB_NAME] == ":memory:"
assert SPANDATA.SERVER_ADDRESS not in span["data"]
assert SPANDATA.SERVER_PORT not in span["data"]
assert (
render_span_tree(event)
== """\
- op=null: description=null
- op="db": description="SAVEPOINT sa_savepoint_1"
- op="db": description="SELECT person.id AS person_id, person.name AS person_name \\nFROM person\\n LIMIT ? OFFSET ?"
- op="db": description="RELEASE SAVEPOINT sa_savepoint_1"
- op="db": description="SAVEPOINT sa_savepoint_2"
- op="db": description="INSERT INTO person (id, name) VALUES (?, ?)"
- op="db": description="ROLLBACK TO SAVEPOINT sa_savepoint_2"
- op="db": description="SAVEPOINT sa_savepoint_3"
- op="db": description="INSERT INTO person (id, name) VALUES (?, ?)"
- op="db": description="ROLLBACK TO SAVEPOINT sa_savepoint_3"
- op="db": description="SAVEPOINT sa_savepoint_4"
- op="db": description="SELECT person.id AS person_id, person.name AS person_name \\nFROM person\\n LIMIT ? OFFSET ?"
- op="db": description="RELEASE SAVEPOINT sa_savepoint_4"\
"""
)
def test_long_sql_query_preserved(sentry_init, capture_events):
sentry_init(
traces_sample_rate=1,
integrations=[SqlalchemyIntegration()],
)
events = capture_events()
engine = create_engine("sqlite:///:memory:")
with start_transaction(name="test"):
with engine.connect() as con:
con.execute(text(" UNION ".join("SELECT {}".format(i) for i in range(100))))
(event,) = events
description = event["spans"][0]["description"]
assert description.startswith("SELECT 0 UNION SELECT 1")
assert description.endswith("SELECT 98 UNION SELECT 99")
def test_large_event_not_truncated(sentry_init, capture_events):
sentry_init(
traces_sample_rate=1,
integrations=[SqlalchemyIntegration()],
)
events = capture_events()
long_str = "x" * (DEFAULT_MAX_VALUE_LENGTH + 10)
with configure_scope() as scope:
@scope.add_event_processor
def processor(event, hint):
event["message"] = long_str
return event
engine = create_engine("sqlite:///:memory:")
with start_transaction(name="test"):
with engine.connect() as con:
for _ in range(1500):
con.execute(
text(" UNION ".join("SELECT {}".format(i) for i in range(100)))
)
(event,) = events
assert len(json_dumps(event)) > MAX_EVENT_BYTES
# Some spans are discarded.
assert len(event["spans"]) == 1000
# Span descriptions are not truncated.
description = event["spans"][0]["description"]
assert len(description) == 1583
assert description.startswith("SELECT 0")
assert description.endswith("SELECT 98 UNION SELECT 99")
description = event["spans"][999]["description"]
assert len(description) == 1583
assert description.startswith("SELECT 0")
assert description.endswith("SELECT 98 UNION SELECT 99")
# Smoke check that truncation of other fields has not changed.
assert len(event["message"]) == DEFAULT_MAX_VALUE_LENGTH
# The _meta for other truncated fields should be there as well.
assert event["_meta"]["message"] == {
"": {"len": 1034, "rem": [["!limit", "x", 1021, 1024]]}
}
def test_engine_name_not_string(sentry_init):
sentry_init(
integrations=[SqlalchemyIntegration()],
)
engine = create_engine("sqlite:///:memory:")
engine.dialect.name = b"sqlite"
with engine.connect() as con:
con.execute(text("SELECT 0"))
@pytest.mark.parametrize("enable_db_query_source", [None, False])
def test_query_source_disabled(sentry_init, capture_events, enable_db_query_source):
sentry_options = {
"integrations": [SqlalchemyIntegration()],
"enable_tracing": True,
}
if enable_db_query_source is not None:
sentry_options["enable_db_query_source"] = enable_db_query_source
sentry_options["db_query_source_threshold_ms"] = 0
sentry_init(**sentry_options)
events = capture_events()
with start_transaction(name="test_transaction", sampled=True):
Base = declarative_base() # noqa: N806
class Person(Base):
__tablename__ = "person"
id = Column(Integer, primary_key=True)
name = Column(String(250), nullable=False)
engine = create_engine("sqlite:///:memory:")
Base.metadata.create_all(engine)
Session = sessionmaker(bind=engine) # noqa: N806
session = Session()
bob = Person(name="Bob")
session.add(bob)
assert session.query(Person).first() == bob
(event,) = events
for span in event["spans"]:
if span.get("op") == "db" and span.get("description").startswith(
"SELECT person"
):
data = span.get("data", {})
assert SPANDATA.CODE_LINENO not in data
assert SPANDATA.CODE_NAMESPACE not in data
assert SPANDATA.CODE_FILEPATH not in data
assert SPANDATA.CODE_FUNCTION not in data
break
else:
raise AssertionError("No db span found")
def test_query_source(sentry_init, capture_events):
sentry_init(
integrations=[SqlalchemyIntegration()],
enable_tracing=True,
enable_db_query_source=True,
db_query_source_threshold_ms=0,
)
events = capture_events()
with start_transaction(name="test_transaction", sampled=True):
Base = declarative_base() # noqa: N806
class Person(Base):
__tablename__ = "person"
id = Column(Integer, primary_key=True)
name = Column(String(250), nullable=False)
engine = create_engine("sqlite:///:memory:")
Base.metadata.create_all(engine)
Session = sessionmaker(bind=engine) # noqa: N806
session = Session()
bob = Person(name="Bob")
session.add(bob)
assert session.query(Person).first() == bob
(event,) = events
for span in event["spans"]:
if span.get("op") == "db" and span.get("description").startswith(
"SELECT person"
):
data = span.get("data", {})
assert SPANDATA.CODE_LINENO in data
assert SPANDATA.CODE_NAMESPACE in data
assert SPANDATA.CODE_FILEPATH in data
assert SPANDATA.CODE_FUNCTION in data
assert type(data.get(SPANDATA.CODE_LINENO)) == int
assert data.get(SPANDATA.CODE_LINENO) > 0
assert (
data.get(SPANDATA.CODE_NAMESPACE)
== "tests.integrations.sqlalchemy.test_sqlalchemy"
)
assert data.get(SPANDATA.CODE_FILEPATH).endswith(
"tests/integrations/sqlalchemy/test_sqlalchemy.py"
)
is_relative_path = data.get(SPANDATA.CODE_FILEPATH)[0] != os.sep
assert is_relative_path
assert data.get(SPANDATA.CODE_FUNCTION) == "test_query_source"
break
else:
raise AssertionError("No db span found")
sentry-python-1.39.2/tests/integrations/starlette/ 0000775 0000000 0000000 00000000000 14547447232 0022344 5 ustar 00root root 0000000 0000000 sentry-python-1.39.2/tests/integrations/starlette/__init__.py 0000664 0000000 0000000 00000000060 14547447232 0024451 0 ustar 00root root 0000000 0000000 import pytest
pytest.importorskip("starlette")
sentry-python-1.39.2/tests/integrations/starlette/photo.jpg 0000664 0000000 0000000 00000051026 14547447232 0024203 0 ustar 00root root 0000000 0000000 JFIF H H C
C
1 x88\Ld
W-)yk\=mdPm';.6[aƠp @-'MMkGVL ElHij.T\j:#ERpݖ-LI(MX