[
  {
    "path": ".github/CODEOWNERS",
    "content": "#####################################################\n#\n# List of approvers for this repository\n#\n#####################################################\n#\n# Learn about CODEOWNERS file format:\n#  https://help.github.com/en/articles/about-code-owners\n#\n\n* @aws/aws-x-ray\n"
  },
  {
    "path": ".github/PULL_REQUEST_TEMPLATE.md",
    "content": "*Issue #, if available:*\n\n*Description of changes:*\n\n\nBy submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.\n"
  },
  {
    "path": ".github/dependency-check-suppressions.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<suppressions xmlns=\"https://jeremylong.github.io/DependencyCheck/dependency-suppression.1.3.xsd\">\n</suppressions>\n"
  },
  {
    "path": ".github/stale.yml",
    "content": "# Number of days of inactivity before an issue becomes stale\ndaysUntilStale: 30\n# Number of days of inactivity before a stale issue is closed\ndaysUntilClose: 7\n# Limit to only `issues` or `pulls`\nonly: issues\n# Issues with these labels will never be considered stale\nexemptLabels:\n  - pinned\n  - bug\n  - enhancement\n  - feature-request\n  - help wanted\n  - work-in-progress\n  - pending release\n# Label to use when marking an issue as stale\nstaleLabel: stale\n# Comment to post when marking an issue as stale. Set to `false` to disable\nmarkComment: >\n  This issue has been automatically marked as stale because it has not had\n  recent activity. It will be closed if no further activity occurs in next 7 days. Thank you\n  for your contributions.\n# Comment to post when closing a stale issue. Set to `false` to disable\ncloseComment: false\n"
  },
  {
    "path": ".github/trivy/daily-scan.trivyignore.yaml",
    "content": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.\n# SPDX-License-Identifier: Apache-2.0\n\n# Trivy ignore file for daily scans.\n# This file is intentionally empty. Daily scans should flag all CVEs.\n# See: https://aquasecurity.github.io/trivy/latest/docs/configuration/filtering/\n\n# Format:\n# - id: <CVE-###>\n#   statement: \"<Why are we excluding?> <link to CVE where we can track status>\"\n#   expired_at: <required - YYYY-MM-DD>\n\nvulnerabilities: []\n"
  },
  {
    "path": ".github/workflows/IntegrationTesting.yaml",
    "content": "name: Integration Testing\non:\n  push:\n    branches:\n      - master\n\npermissions:\n  id-token: write\n  contents: read\n\njobs:\n  build_SDK:\n    name: Build X-Ray Python SDK\n    runs-on: ubuntu-latest\n\n    steps:\n      - name: Pull in source code from aws-xray-sdk-python Github repository\n        uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 #v3.6.0\n\n      - name: Setup python\n        uses: actions/setup-python@7f4fc3e22c37d6ff65e88745f38bd3157c663f7c #v4.9.1\n        with:\n          python-version: '3.8'\n\n      - name: Build X-Ray Python SDK\n        run: python setup.py sdist\n\n      - name: Upload SDK build artifact\n        uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 #v4.6.2\n        with:\n          name: sdk-build-artifact\n          path: .\n\n  build_WebApp:\n    name: Build Web Application\n    needs: build_SDK\n    runs-on: ubuntu-latest\n\n    steps:\n      - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 #v3.6.0\n\n      - name: Setup python\n        uses: actions/setup-python@7f4fc3e22c37d6ff65e88745f38bd3157c663f7c #v4.9.1\n        with:\n          python-version: '3.8'\n\n      - name: Download X-Ray SDK build artifact\n        uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 #v4.3.0\n        with:\n          name: sdk-build-artifact\n          path: ./sample-apps/flask\n\n      - name: Build WebApp with X-Ray Python SDK\n        run: pip3 install . -t .\n        working-directory: ./sample-apps/flask\n\n      - name: Zip up the deployment package\n        run: zip -r deploy.zip . -x '*.git*'\n        working-directory: ./sample-apps/flask\n\n      - name: Upload WebApp with X-Ray SDK build artifact\n        uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 #v4.6.2\n        with:\n          name: sdk-flask-build-artifact\n          path: ./sample-apps/flask/deploy.zip\n\n  deploy_WebApp:\n    name: Deploy Web Application\n    needs: build_WebApp\n    runs-on: ubuntu-latest\n\n    steps:\n      - name: Checkout X-Ray SDK to get terraform source\n        uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 #v3.6.0\n\n      - name: Download WebApp with X-Ray SDK build artifact\n        uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 #v4.3.0\n        with:\n          name: sdk-flask-build-artifact\n\n      - name: Copy deployment package to terraform directory\n        run: cp deploy.zip ./terraform\n\n      - name: Configure AWS Credentials\n        uses: aws-actions/configure-aws-credentials@7474bc4690e29a8392af63c5b98e7449536d5c3a #v4.3.1\n        with:\n          role-to-assume: ${{ secrets.AWS_INTEG_TEST_ROLE_ARN }}\n          aws-region: us-west-2\n\n      - name: Setup Terraform\n        uses: hashicorp/setup-terraform@633666f66e0061ca3b725c73b2ec20cd13a8fdd1 #v2.0.3\n\n      - name: Terraform Init\n        run: terraform init\n        working-directory: ./terraform\n\n      - name: Terraform Validate\n        run: terraform validate -no-color\n        working-directory: ./terraform\n\n      - name: Terraform Plan\n        run: terraform plan -var-file=\"fixtures.us-west-2.tfvars\" -no-color\n        env:\n          TF_VAR_resource_prefix: '${{ github.run_id }}-${{ github.run_number }}'\n        continue-on-error: true\n        working-directory: ./terraform\n\n      - name: Terraform Apply\n        run: terraform apply -var-file=\"fixtures.us-west-2.tfvars\" -auto-approve\n        env:\n          TF_VAR_resource_prefix: '${{ github.run_id }}-${{ github.run_number }}'\n        working-directory: ./terraform\n\n      - name: Upload terraform state files for destorying resources\n        uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 #v4.6.2\n        with:\n          name: terraform-state-artifact\n          path: ./terraform\n\n  test_WebApp:\n    name: Test WebApp\n    needs: deploy_WebApp\n    runs-on: ubuntu-latest\n\n    steps:\n      - uses: actions/setup-java@17f84c3641ba7b8f6deff6309fc4c864478f5d62 #v3.14.1\n        with:\n          distribution: 'zulu'\n          java-version: 14\n\n      - name: Configure AWS Credentials\n        uses: aws-actions/configure-aws-credentials@7474bc4690e29a8392af63c5b98e7449536d5c3a #v4.3.1\n        with:\n          role-to-assume: ${{ secrets.AWS_INTEG_TEST_ROLE_ARN }}\n          aws-region: us-west-2\n\n      - name: Checkout test framework\n        uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 #v3.6.0\n        with:\n          repository: aws-observability/aws-otel-test-framework\n          ref: terraform\n\n      - name: Run testing suite\n        run: ./gradlew :validator:run --args='-c default-xray-trace-validation.yml --endpoint http://${{ github.run_id }}-${{ github.run_number }}-eb-app-env.us-west-2.elasticbeanstalk.com'\n\n  cleanup:\n    name: Resource tear down\n    needs: test_WebApp\n    if: always()\n    runs-on: ubuntu-latest\n\n    steps:\n      - name: Download terraform state artifact\n        uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 #v4.3.0\n        with:\n          name: terraform-state-artifact\n\n      - name: Configure AWS Credentials\n        uses: aws-actions/configure-aws-credentials@7474bc4690e29a8392af63c5b98e7449536d5c3a #v4.3.1\n        with:\n          role-to-assume: ${{ secrets.AWS_INTEG_TEST_ROLE_ARN }}\n          aws-region: us-west-2\n\n      - name: Setup Terraform\n        uses: hashicorp/setup-terraform@633666f66e0061ca3b725c73b2ec20cd13a8fdd1 #v2.0.3\n\n      - name: Terraform Init\n        run: terraform init\n\n      - name: set permissions to terraform plugins\n        run: chmod -R a+x .terraform/*\n\n      - name: Destroy resources\n        run: terraform destroy -state=\"terraform.tfstate\" -var-file=\"fixtures.us-west-2.tfvars\" -auto-approve\n        env:\n          TF_VAR_resource_prefix: '${{ github.run_id }}-${{ github.run_number }}'\n"
  },
  {
    "path": ".github/workflows/Release.yaml",
    "content": "name: Release X-Ray Python SDK\n\non:\n  workflow_dispatch:\n    inputs:\n      version:\n        description: The version to tag the release with, e.g., 1.2.0, 1.3.0\n        required: true\n\njobs:\n  release:\n    permissions:\n      contents: write\n    runs-on: ubuntu-latest\n    steps:\n      - name: Checkout master branch\n        uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 #v3.6.0\n\n      - name: Create Release\n        id: create_release\n        uses: actions/create-release@0cb9c9b65d5d1901c1f53e5e66eaf4afd303e70e #v1.1.4\n        env:\n          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}\n        with:\n          tag_name: '${{ github.event.inputs.version }}'\n          release_name: '${{ github.event.inputs.version }} Release'\n          body: 'See details in [CHANGELOG](https://github.com/aws/aws-xray-sdk-python/blob/master/CHANGELOG.rst)'\n          draft: true\n          prerelease: false\n"
  },
  {
    "path": ".github/workflows/UnitTesting.yaml",
    "content": "name: Unit Testing\npermissions:\n  contents: read\non:\n  push:\n    branches:\n      - master\n  pull_request:\n    branches:\n      - master\n\njobs:\n  test:\n    runs-on: ubuntu-22.04\n    env:\n      py37: 3.7\n      py38: 3.8\n      py39: 3.9\n      py310: '3.10'\n      py311: '3.11'\n      py312: '3.12'\n      DB_DATABASE: test_db\n      DB_USER: root\n      DB_PASSWORD: root\n    strategy:\n      fail-fast: false\n      matrix:\n        python-version: [py37, py38, py39, py310, py311, py312]\n        testenv: [core, ext]\n    steps:\n      - name: Checkout repo\n        uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 #v3.6.0\n\n      - name: Start MySQL\n        if: ${{ matrix.testenv == 'ext' }}\n        run: |\n          sudo /etc/init.d/mysql start\n          mysql -e 'CREATE DATABASE ${{ env.DB_DATABASE }};' -u${{ env.DB_USER }} -p${{ env.DB_PASSWORD }}\n          mysql -e 'CREATE DATABASE test_dburl;' -u${{ env.DB_USER }} -p${{ env.DB_PASSWORD }}\n          mysql -e \"CREATE USER test_dburl_user@localhost IDENTIFIED BY 'test]password';\" -u${{ env.DB_USER }} -p${{ env.DB_PASSWORD }}\n          mysql -e \"GRANT ALL PRIVILEGES ON test_dburl.* TO test_dburl_user@localhost;\" -u${{ env.DB_USER }} -p${{ env.DB_PASSWORD }}\n          mysql -e \"FLUSH PRIVILEGES;\" -u${{ env.DB_USER }} -p${{ env.DB_PASSWORD }}\n      - name: Setup Python\n        uses: actions/setup-python@7f4fc3e22c37d6ff65e88745f38bd3157c663f7c #v4.9.1\n        with:\n          python-version: ${{ env[matrix.python-version] }}\n\n      - name: Install tox\n        run: pip install \"tox<=3.27.1\" -U tox-factor setuptools\n\n      - name: Cache tox environment\n        # Preserves .tox directory between runs for faster installs\n        uses: actions/cache@6f8efc29b200d32929f49075959781ed54ec270c #v3.5.0\n        with:\n          path: |\n            .tox\n            ~/.cache/pip\n          key: tox-cache-${{ matrix.python-version }}-${{ matrix.testenv }}-${{ hashFiles('tox.ini') }}\n\n      - name: Run tox\n        run: |\n          tox -f ${{ matrix.python-version }}-${{ matrix.testenv }}\n\n  static-code-checks:\n    runs-on: ubuntu-latest\n    steps:\n\n      - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #5.0.0\n        with:\n          fetch-depth: 0\n      - name: Check for versioned GitHub actions\n        if: always()\n        run: |\n          # Get changed GitHub workflow/action files\n          CHANGED_FILES=$(git diff --name-only origin/${{ github.base_ref }}..HEAD | grep -E \"^\\.github/(workflows|actions)/.*\\.ya?ml$\" || true)\n          \n          if [ -n \"$CHANGED_FILES\" ]; then\n            # Check for any versioned actions, excluding comments and this validation script\n            VIOLATIONS=$(grep -Hn \"uses:.*@v\" $CHANGED_FILES | grep -v \"grep.*uses:.*@v\" | grep -v \"#.*@v\" || true)\n            if [ -n \"$VIOLATIONS\" ]; then\n              echo \"Found versioned GitHub actions. Use commit SHAs instead:\"\n              echo \"$VIOLATIONS\"\n              exit 1\n            fi\n          fi\n          \n          echo \"No versioned actions found in changed files\""
  },
  {
    "path": ".github/workflows/continuous-monitoring.yml",
    "content": "name: Continuous monitoring of distribution channels\non:\n  workflow_dispatch:\n  schedule:\n    - cron:  '*/10 * * * *'\n\npermissions:\n  id-token: write\n  contents: read\n\njobs:\n  smoke-tests:\n    name: Run smoke tests\n    runs-on: ubuntu-latest\n    steps:\n      - name: Checkout Repository\n        uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 #v3.6.0\n      \n      - name: Configure AWS Credentials\n        uses: aws-actions/configure-aws-credentials@7474bc4690e29a8392af63c5b98e7449536d5c3a #v4.3.1\n        with:\n          role-to-assume: ${{ secrets.AWS_INTEG_TEST_ROLE_ARN }}\n          aws-region: us-east-1\n      \n      - uses: actions/setup-python@7f4fc3e22c37d6ff65e88745f38bd3157c663f7c #v4.9.1\n        with:\n          python-version: '3.x'\n      \n      - run: pip install tox\n      \n      - name: Run smoke tests\n        id: distribution-availability\n        run: tox -c tox-distributioncheck.ini\n        \n      - name: Publish metric on X-Ray Python SDK distribution availability\n        if: ${{ always() }}\n        run: |\n          if [[ \"${{ steps.distribution-availability.outcome }}\" == \"failure\" ]]; then\n            aws cloudwatch put-metric-data --metric-name XRayPythonSDKDistributionUnavailability --dimensions failure=rate --namespace MonitorSDK --value 1 --timestamp $(date +%s)\n          else\n            aws cloudwatch put-metric-data --metric-name XRayPythonSDKDistributionUnavailability --dimensions failure=rate --namespace MonitorSDK --value 0 --timestamp $(date +%s)\n          fi\n"
  },
  {
    "path": ".github/workflows/daily-scan.yml",
    "content": "## Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.\n## SPDX-License-Identifier: Apache-2.0\n# Performs a daily scan of:\n# * The X-Ray Python SDK published artifact dependencies, using Trivy\n# * Project dependencies, using DependencyCheck\n#\n#  Publishes results to CloudWatch Metrics.\nname: Daily scan\n\non:\n  schedule: # scheduled to run every 6 hours\n    - cron: '20 */6 * * *' #  \"At minute 20 past every 6th hour.\"\n  workflow_dispatch: # be able to run the workflow on demand\n\nenv:\n  AWS_DEFAULT_REGION: us-east-1\n\npermissions:\n  id-token: write\n  contents: read\n\njobs:\n  scan_and_report:\n    runs-on: ubuntu-latest\n    steps:\n      - name: Checkout repo for dependency scan\n        uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #5.0.0\n        with:\n          fetch-depth: 0\n\n      - name: Setup Python for dependency scan\n        uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b #v5.3.0\n        with:\n          python-version: '3.x'\n\n      - name: Install published package for scanning\n        run: |\n          mkdir -p scan-target\n          python -m venv scan-venv\n          source scan-venv/bin/activate\n          pip install aws-xray-sdk\n          pip freeze > scan-target/requirements.txt\n \n      - name: Install Java for dependency scan\n        uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 #v5.0.0\n        with:\n          java-version: 17\n          distribution: 'temurin'\n\n      - name: Configure AWS credentials for dependency scan\n        uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #5.0.0\n        with:\n          role-to-assume: ${{ secrets.SECRET_MANAGER_ROLE_ARN }}\n          aws-region: ${{ env.AWS_DEFAULT_REGION }}\n\n      - name: Get secrets for dependency scan\n        uses: aws-actions/aws-secretsmanager-get-secrets@a9a7eb4e2f2871d30dc5b892576fde60a2ecc802 #v2.0.10\n        id: nvd_api_key\n        with:\n          secret-ids: |\n            ${{ secrets.NVD_API_KEY_SECRET_ARN }}\n            OSS_INDEX, ${{ secrets.OSS_INDEX_SECRET_ARN }}\n          parse-json-secrets: true\n\n      # See http://jeremylong.github.io/DependencyCheck/dependency-check-cli/ for installation explanation\n      - name: Install and run dependency scan\n        id: dep_scan\n        if: always()\n        run: |\n          gpg --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys 259A55407DD6C00299E6607EFFDE55BE73A2D1ED\n          VERSION=$(curl -s https://jeremylong.github.io/DependencyCheck/current.txt | head -n1 | cut -d\" \" -f1)\n          curl -Ls \"https://github.com/dependency-check/DependencyCheck/releases/download/v$VERSION/dependency-check-$VERSION-release.zip\" --output dependency-check.zip\n          curl -Ls \"https://github.com/dependency-check/DependencyCheck/releases/download/v$VERSION/dependency-check-$VERSION-release.zip.asc\" --output dependency-check.zip.asc\n          gpg --verify dependency-check.zip.asc\n          unzip dependency-check.zip\n          ./dependency-check/bin/dependency-check.sh --enableExperimental --failOnCVSS 0 --nvdApiKey ${{ env.NVD_API_KEY_NVD_API_KEY }} --ossIndexUsername ${{ env.OSS_INDEX_USERNAME }} --ossIndexPassword ${{ env.OSS_INDEX_PASSWORD }} --suppression .github/dependency-check-suppressions.xml -s \"scan-target/\"\n\n      - name: Print dependency scan results on failure\n        if: ${{ steps.dep_scan.outcome != 'success' }}\n        run: less dependency-check-report.html\n\n      - name: Perform high severity scan on published artifact dependencies\n        if: always()\n        id: high_scan_latest\n        uses: aquasecurity/trivy-action@97e0b3872f55f89b95b2f65b3dbab56962816478 # v0.34.2\n        with:\n          scan-type: 'fs'\n          scan-ref: 'scan-target/'\n          severity: 'CRITICAL,HIGH'\n          exit-code: '1'\n          scanners: 'vuln'\n        env:\n          TRIVY_IGNOREFILE: .github/trivy/daily-scan.trivyignore.yaml\n\n      - name: Perform low severity scan on published artifact dependencies\n        if: always()\n        id: low_scan_latest\n        uses: aquasecurity/trivy-action@97e0b3872f55f89b95b2f65b3dbab56962816478 # v0.34.2\n        with:\n          scan-type: 'fs'\n          scan-ref: 'scan-target/'\n          severity: 'MEDIUM,LOW,UNKNOWN'\n          exit-code: '1'\n          scanners: 'vuln'\n        env:\n          TRIVY_IGNOREFILE: .github/trivy/daily-scan.trivyignore.yaml\n\n      - name: Configure AWS Credentials for emitting metrics\n        if: always()\n        uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #5.0.0\n        with:\n          role-to-assume: ${{ secrets.AWS_INTEG_TEST_ROLE_ARN }}\n          aws-region: ${{ env.AWS_DEFAULT_REGION }}\n\n      - name: Publish high scan status\n        if: always()\n        run: |\n          value=\"${{ steps.high_scan_latest.outcome == 'success' && '1.0' || '0.0' }}\"\n          aws cloudwatch put-metric-data --namespace 'MonitorSDK' \\\n            --metric-name Success \\\n            --dimensions repository=${{ github.repository }},branch=${{ github.ref_name }},workflow=daily_scan_high \\\n            --value $value\n\n      - name: Publish low scan status\n        if: always()\n        run: |\n          value=\"${{ steps.low_scan_latest.outcome == 'success' && steps.dep_scan.outcome == 'success' && '1.0' || '0.0' }}\"\n          aws cloudwatch put-metric-data --namespace 'MonitorSDK' \\\n            --metric-name Success \\\n            --dimensions repository=${{ github.repository }},branch=${{ github.ref_name }},workflow=daily_scan_low \\\n            --value $value\n"
  },
  {
    "path": ".gitignore",
    "content": ".DS_Store\n*.pyc\n.Python\n.cache\n.pytest_cache\nman\n\nbuild\nbin\ninclude\nlib\ndist\n*.egg\n*.egg-info\n.tox\n.python-version\n.pytest_cache\n\npip-selfcheck.json\n\n.coverage*\nhtmlcov\n\nvenv\n.idea\n"
  },
  {
    "path": "CHANGELOG.rst",
    "content": "=========\nCHANGELOG\n=========\n\nUnreleased\n==========\n\n2.15.0\n==========\n* bugfix: Fix log stack overflow if metadata contains circular reference `https://github.com/aws/aws-xray-sdk-python/pull/464`\n\n2.14.0\n==========\n* bugfix: Fix warning message condition for subsegment ending `https://github.com/aws/aws-xray-sdk-python/pull/434`\n\n2.13.1\n==========\n* improvement: Bump idna from 3.6 to 3.7 in /sample-apps/flask `https://github.com/aws/aws-xray-sdk-python/pull/425`\n* bugfix: Fix end_time param type docstring from int to float `https://github.com/aws/aws-xray-sdk-python/pull/426`\n* improvement:  Bump werkzeug from 3.0.1 to 3.0.3 in /sample-apps/flask `https://github.com/aws/aws-xray-sdk-python/pull/428`\n* improvement: [LambdaContext] Create dummy segment when trace header is incomplete `https://github.com/aws/aws-xray-sdk-python/pull/429`\n* bugfix: [LambdaContext] Fix logging to only happen inside lambda function `https://github.com/aws/aws-xray-sdk-python/pull/431`\n\n2.13.0\n==========\n* bugfix: Fix passing multiple values in testenv.passenv in tox.ini `https://github.com/aws/aws-xray-sdk-python/pull/399`\n* improvement: Pin flask < 3.x for flask sqlalchemy tests `https://github.com/aws/aws-xray-sdk-python/pull/412`\n* improvement: Bump werkzeug from 2.2.3 to 3.0.1 in /sample-apps/flask `https://github.com/aws/aws-xray-sdk-python/pull/413`\n* improvement: Fix typo in docs `https://github.com/aws/aws-xray-sdk-python/pull/419`\n* bugfix: Fix sqlalchemy_core patch errors for unencoded special characters in db url `https://github.com/aws/aws-xray-sdk-python/pull/418`\n* bugfix: Fix EB platform version for integration test `https://github.com/aws/aws-xray-sdk-python/pull/420`\n\n2.12.1\n==========\n* bugfix: set_trace_entity() in lambda adds segment to thread `PR409 https://github.com/aws/aws-xray-sdk-python/pull/409`\n* bugfix: Cleanup after drop of support for Python `PR387 https://github.com/aws/aws-xray-sdk-python/pull/387`\n\n2.12.0\n==========\n* improvement: Default Context Missing Strategy set to Log Error `PR372 https://github.com/aws/aws-xray-sdk-python/pull/372`\n* bugfix: Pin tox version to <=3.27.1 to fix CI tests `PR374 https://github.com/aws/aws-xray-sdk-python/pull/374`\n* improvement: Sample app dependency update `PR373 https://github.com/aws/aws-xray-sdk-python/pull/373`\n* bugfix: Fix pynamodb tests for Python < 3.6 `PR375 https://github.com/aws/aws-xray-sdk-python/pull/375`\n* improvement: Use latest GH Actions versions in CI tests `PR365 https://github.com/aws/aws-xray-sdk-python/pull/365`\n* improvement: Simplify setup script `PR363 https://github.com/aws/aws-xray-sdk-python/pull/363`\n* bugfix: Fix deprecation warnings related to asyncio `PR364 https://github.com/aws/aws-xray-sdk-python/pull/364`\n* improvement: Run tests against Python 3.10 and 3.11 `PR376 https://github.com/aws/aws-xray-sdk-python/pull/376`\n* improvement: Sample app dependency update `PR380 https://github.com/aws/aws-xray-sdk-python/pull/380`\n* bugfix: Pin sqlalchemy version to 1.x to fix tests `PR381 https://github.com/aws/aws-xray-sdk-python/pull/381`\n* bugfix: Fix sample app dependencies incompatibility with XRay SDK `PR382 https://github.com/aws/aws-xray-sdk-python/pull/382`\n* bugfix: Start MySQL from GH Actions, upgrade Ubuntu, and remove Python versions for unit tests `PR384 https://github.com/aws/aws-xray-sdk-python/pull/384`\n\n2.11.0\n==========\n* bugfix: Fix TypeError by patching register_default_jsonb from psycopg2 `PR350 https://github.com/aws/aws-xray-sdk-python/pull/350`\n* improvement: Add annotations `PR348 https://github.com/aws/aws-xray-sdk-python/pull/348`\n* bugfix: Use service parameter to match centralized sampling rules `PR 353 https://github.com/aws/aws-xray-sdk-python/pull/353`\n* bugfix: Implement PEP3134 to discover underlying problems with python3 `PR355 https://github.com/aws/aws-xray-sdk-python/pull/355`\n* improvement: Allow list TopicArn for SNS PublishBatch request `PR358 https://github.com/aws/aws-xray-sdk-python/pull/358`\n* bugfix: Version pinning flask-sqlalchemy version to 2.5.1 or less `PR360 https://github.com/aws/aws-xray-sdk-python/pull/360`\n* bugfix: Fix UnboundLocalError when aiohttp server raises a CancelledError `PR356 https://github.com/aws/aws-xray-sdk-python/pull/356`\n* improvement: Instrument httpx >= 0.20 `PR357 https://github.com/aws/aws-xray-sdk-python/pull/357`\n* improvement: [LambdaContext] persist original trace header `PR362 https://github.com/aws/aws-xray-sdk-python/pull/362`\n* bugfix: Run tests against Django 4.x `PR361 https://github.com/aws/aws-xray-sdk-python/pull/361`\n* improvement: Oversampling Mitigation `PR366 https://github.com/aws/aws-xray-sdk-python/pull/366`\n\n2.10.0\n==========\n* bugfix: Only import future for py2. `PR343 <https://github.com/aws/aws-xray-sdk-python/pull/343>`_.\n* bugfix: Defensively copy context entities to async thread. `PR340 <https://github.com/aws/aws-xray-sdk-python/pull/340>`_.\n* improvement: Added support for IGNORE_ERROR option when context is missing. `PR338 <https://github.com/aws/aws-xray-sdk-python/pull/338>`_.\n\n2.9.0\n==========\n* bugfix: Change logging behavior to avoid overflow. `PR302 <https://github.com/aws/aws-xray-sdk-python/pull/302>`_.\n* improvement: Lazy load samplers to speed up cold start in lambda. `PR312 <https://github.com/aws/aws-xray-sdk-python/pull/312>`_.\n* improvement: Replace slow json file name resolver. `PR 306 <https://github.com/aws/aws-xray-sdk-python/pull/306>`_.  \n\n2.8.0\n==========\n* improvement: feat(sqla-core): Add support for rendering Database Specific queries. `PR291 <https://github.com/aws/aws-xray-sdk-python/pull/291>`_.\n* bugfix: Fixing broken instrumentation for sqlalchemy >= 1.4.0. `PR289 <https://github.com/aws/aws-xray-sdk-python/pull/289>`_.\n* feature: no op trace id generation. `PR293 <https://github.com/aws/aws-xray-sdk-python/pull/293>`_.\n* bugfix: Handle exception when sending entity to Daemon. `PR292 <https://github.com/aws/aws-xray-sdk-python/pull/292>`_.\n* bugfix: Fixed serialization issue when cause is a string. `PR284 <https://github.com/aws/aws-xray-sdk-python/pull/284>`_.\n* improvement: Publish metric on distribution availability. `PR279 <https://github.com/aws/aws-xray-sdk-python/pull/279>`_.\n\n2.7.0\n==========\n* improvement: Only run integration tests on master. `PR277 <https://github.com/aws/aws-xray-sdk-python/pull/277>`_.\n* improvement: Add distribution channel smoke test. `PR276 <https://github.com/aws/aws-xray-sdk-python/pull/276>`_.\n* improvement: Replace jsonpickle with json to serialize entity. `PR275 <https://github.com/aws/aws-xray-sdk-python/pull/275>`_.\n* bugfix: Always close segment in teardown_request handler. `PR272 <https://github.com/aws/aws-xray-sdk-python/pull/272>`_.\n* improvement: Close segment in only _handle_exception in case of Internal Server Error. `PR271 <https://github.com/aws/aws-xray-sdk-python/pull/271>`_.\n* bugfix: Handling condition where Entity.cause is not a dict. `PR267 <https://github.com/aws/aws-xray-sdk-python/pull/267>`_.\n* improvement: Add ability to ignore some requests from httplib. `PR263 <https://github.com/aws/aws-xray-sdk-python/pull/263>`_.\n* feature: Add support for SQLAlchemy Core. `PR264 <https://github.com/aws/aws-xray-sdk-python/pull/264>`_.\n* improvement: Added always() to run clean up workflow. `PR259 <https://github.com/aws/aws-xray-sdk-python/pull/259>`_.\n* improvement: Allow configuring different Sampler in Django App. `PR252 <https://github.com/aws/aws-xray-sdk-python/pull/252>`_.\n* bugfix: Restore python2 compatibility of EC2 plugin. `PR249 <https://github.com/aws/aws-xray-sdk-python/pull/249>`_.\n* bugfix: eb solution stack name. `PR251 <https://github.com/aws/aws-xray-sdk-python/pull/251>`_.\n* improvement: Integration Test Workflow. `PR246 <https://github.com/aws/aws-xray-sdk-python/pull/246>`_.\n* improvement: Include unicode type for annotation value. `PR235 <https://github.com/aws/aws-xray-sdk-python/pull/235>`_.\n* improvement: Run tests against Django 3.1 instead of 1.11. `PR240 <https://github.com/aws/aws-xray-sdk-python/pull/240>`_.\n* bugfix: Generalize error check for pymysql error type. `PR239 <https://github.com/aws/aws-xray-sdk-python/pull/239>`_.\n* bugfix: SqlAlchemy: Close segment even if error was raised. `PR234 <https://github.com/aws/aws-xray-sdk-python/pull/234>`_.\n\n2.6.0\n==========\n* bugfix: asyncio.Task.current_task PendingDeprecation fix. `PR217 <https://github.com/aws/aws-xray-sdk-python/pull/217>`_.\n* bugfix: Added proper TraceID in dummy segments. `PR223 <https://github.com/aws/aws-xray-sdk-python/pull/223>`_.\n* improvement: Add testing for current Django versions. `PR200 <https://github.com/aws/aws-xray-sdk-python/pull/200>`_.\n* improvement: IMDSv2 support for EC2 plugin. `PR226 <https://github.com/aws/aws-xray-sdk-python/pull/226>`_.\n* improvement: Using instance doc to fetch EC2 metadata. Added 2 additional fields. `PR227 <https://github.com/aws/aws-xray-sdk-python/pull/227>`_.\n* improvement: Added StaleBot. `PR228 <https://github.com/aws/aws-xray-sdk-python/pull/228>`_.\n\n2.5.0\n==========\n* bugfix: Downgrade Coverage to 4.5.4. `PR197 <https://github.com/aws/aws-xray-sdk-python/pull/197>`_.\n* bugfix: Unwrap context provided to psycopg2.extensions.quote_ident. `PR198 <https://github.com/aws/aws-xray-sdk-python/pull/198>`_.\n* feature: extension support as Bottle plugin. `PR204 <https://github.com/aws/aws-xray-sdk-python/pull/204>`_.\n* bugfix: streaming_threshold not None check. `PR205 <https://github.com/aws/aws-xray-sdk-python/pull/205>`_.\n* bugfix: Add support for Django 2.0 to 3.0. `PR206 <https://github.com/aws/aws-xray-sdk-python/pull/206>`_.\n* bugfix: add puttracesegments to boto whitelist avoid a catch 22. `PR210 <https://github.com/aws/aws-xray-sdk-python/pull/210>`_.\n* feature: Add patch support for pymysql. `PR215 <https://github.com/aws/aws-xray-sdk-python/pull/215>`_.\n\n2.4.3\n==========\n* bugfix: Downstream Http Calls should use hostname rather than full URL as subsegment name. `PR192 <https://github.com/aws/aws-xray-sdk-python/pull/192>`_.\n* improvement: Whitelist SageMakerRuntime InvokeEndpoint operation. `PR183 <https://github.com/aws/aws-xray-sdk-python/pull/183>`_.\n* bugfix: Fix patching for PynamoDB4 with botocore 1.13. `PR181 <https://github.com/aws/aws-xray-sdk-python/pull/181>`_.\n* bugfix: Add X-Ray client with default empty credentials. `PR180 <https://github.com/aws/aws-xray-sdk-python/pull/180>`_.\n* improvement: Faster implementation of Wildcard Matching. `PR178 <https://github.com/aws/aws-xray-sdk-python/pull/178>`_.\n* bugfix: Make patch compatible with PynamoDB4. `PR177 <https://github.com/aws/aws-xray-sdk-python/pull/177>`_.\n* bugfix: Fix unit tests for newer versions of psycopg2. `PR163 <https://github.com/aws/aws-xray-sdk-python/pull/163>`_.\n* improvement: Enable tests with python 3.7. `PR157 <https://github.com/aws/aws-xray-sdk-python/pull/157>`_.\n\n2.4.2\n==========\n* bugfix: Fix exception processing in Django running in Lambda. `PR145 <https://github.com/aws/aws-xray-sdk-python/pull/145>`_.\n* bugfix: Poller threads block main thread from exiting bug. `PR144 <https://github.com/aws/aws-xray-sdk-python/pull/144>`_.\n\n2.4.1\n==========\n* bugfix: Middlewares should create subsegments only when in the Lambda context running under a Lambda environment. `PR139 <https://github.com/aws/aws-xray-sdk-python/pull/139>`_.\n\n2.4.0\n==========\n* feature: Add ability to enable/disable the SDK. `PR119 <https://github.com/aws/aws-xray-sdk-python/pull/119>`_.\n* feature: Add Serverless Framework Support `PR127 <https://github.com/aws/aws-xray-sdk-python/pull/127>`_.\n* feature: Bring aiobotocore support back. `PR125 <https://github.com/aws/aws-xray-sdk-python/pull/125>`_.\n* bugfix: Fix httplib invalid scheme detection for HTTPS. `PR122 <https://github.com/aws/aws-xray-sdk-python/pull/122>`_.\n* bugfix: Max_trace_back = 0 returns full exception stack trace bug fix. `PR123 <https://github.com/aws/aws-xray-sdk-python/pull/123>`_.\n* bugfix: Rename incorrect config module name to the correct global name. `PR130 <https://github.com/aws/aws-xray-sdk-python/pull/130>`_.\n* bugfix: Correctly remove password component from SQLAlchemy URLs, preventing... `PR132 <https://github.com/aws/aws-xray-sdk-python/pull/132>`_.\n\n2.3.0\n==========\n* feature: Stream Django ORM SQL queries and add flag to toggle their streaming. `PR111 <https://github.com/aws/aws-xray-sdk-python/pull/111>`_.\n* feature: Recursively patch any given module functions with capture. `PR113 <https://github.com/aws/aws-xray-sdk-python/pull/113>`_.\n* feature: Add patch support for pg8000 (Pure Python Driver). `PR115 <https://github.com/aws/aws-xray-sdk-python/pull/115>`_.\n* improvement: Remove the dependency on Requests. `PR112 <https://github.com/aws/aws-xray-sdk-python/pull/112>`_.\n* bugfix: Fix psycop2 register type. `PR95 <https://github.com/aws/aws-xray-sdk-python/pull/95>`_.\n\n2.2.0\n=====\n* feature: Added context managers on segment/subsegment capture. `PR97 <https://github.com/aws/aws-xray-sdk-python/pull/97>`_.\n* feature: Added AWS SNS topic ARN to the default whitelist file. `PR93 <https://github.com/aws/aws-xray-sdk-python/pull/93>`_.\n* bugfix: Fixed an issue on `psycopg2` to support all keywords. `PR91 <https://github.com/aws/aws-xray-sdk-python/pull/91>`_.\n* bugfix: Fixed an issue on `endSegment` when there is context missing. `ISSUE98 <https://github.com/aws/aws-xray-sdk-python/issues/98>`_.\n* bugfix: Fixed the package description rendered on PyPI. `PR101 <https://github.com/aws/aws-xray-sdk-python/pull/101>`_.\n* bugfix: Fixed an issue where `patch_all` could patch the same module multiple times. `ISSUE99 <https://github.com/aws/aws-xray-sdk-python/issues/99>`_.\n* bugfix: Fixed the `datetime` to `epoch` conversion on Windows OS. `ISSUE103 <https://github.com/aws/aws-xray-sdk-python/issues/103>`_.\n* bugfix: Fixed a wrong segment json key where it should be `sampling_rule_name` rather than `rule_name`.\n\n2.1.0\n=====\n* feature: Added support for `psycopg2`. `PR83 <https://github.com/aws/aws-xray-sdk-python/pull/83>`_.\n* feature: Added support for `pynamodb` >= 3.3.1. `PR88 <https://github.com/aws/aws-xray-sdk-python/pull/88>`_.\n* improvement: Improved stack trace recording when exception is thrown in decorators. `PR70 <https://github.com/aws/aws-xray-sdk-python/pull/70>`_.\n* bugfix: Argument `sampling_req` in LocalSampler `should_trace` method now becomes optional. `PR89 <https://github.com/aws/aws-xray-sdk-python/pull/89>`_.\n* bugfix: Fixed a wrong test setup and leftover poller threads in recorder unit test.\n\n2.0.1\n=====\n* bugfix: Fixed a issue where manually `begin_segment` might break when making sampling decisions. `PR82 <https://github.com/aws/aws-xray-sdk-python/pull/82>`_.\n\n2.0.0\n=====\n* **Breaking**: The default sampler now launches background tasks to poll sampling rules from X-Ray backend. See the new default sampling strategy in more details here: https://docs.aws.amazon.com/xray/latest/devguide/xray-sdk-python-configuration.html#xray-sdk-python-configuration-sampling.\n* **Breaking**: The `should_trace` function in the sampler now takes a dictionary for sampling rule matching.\n* **Breaking**: The original sampling modules for local defined rules are moved from `models.sampling` to `models.sampling.local`.\n* **Breaking**: The default behavior of `patch_all` changed to selectively patches libraries to avoid double patching. You can use `patch_all(double_patch=True)` to force it to patch ALL supported libraries. See more details on `ISSUE63 <https://github.com/aws/aws-xray-sdk-python/issues/63>`_\n* **Breaking**: The latest `botocore` that has new X-Ray service API `GetSamplingRules` and `GetSamplingTargets` are required.\n* **Breaking**: Version 2.x doesn't support pynamodb and aiobotocore as it requires botocore >= 1.11.3 which isn’t currently supported by the pynamodb and aiobotocore libraries. Please continue to use version 1.x if you’re using pynamodb or aiobotocore until those haven been updated to use botocore > = 1.11.3.\n* feature: Environment variable `AWS_XRAY_DAEMON_ADDRESS` now takes an additional notation in `tcp:127.0.0.1:2000 udp:127.0.0.2:2001` to set TCP and UDP destination separately. By default it assumes a X-Ray daemon listening to both UDP and TCP traffic on `127.0.0.1:2000`.\n* feature: Added MongoDB python client support. `PR65 <https://github.com/aws/aws-xray-sdk-python/pull/65>`_.\n* bugfix: Support binding connection in sqlalchemy as well as engine. `PR78 <https://github.com/aws/aws-xray-sdk-python/pull/78>`_.\n* bugfix: Flask middleware safe request teardown. `ISSUE75 <https://github.com/aws/aws-xray-sdk-python/issues/75>`_.\n\n\n1.1.2\n=====\n* bugfix: Fixed an issue on PynamoDB patcher where the capture didn't handle client timeout.\n\n1.1.1\n=====\n* bugfix: Handle Aiohttp Exceptions as valid responses `PR59 <https://github.com/aws/aws-xray-sdk-python/pull/59>`_.\n\n1.1\n===\n* feature: Added Sqlalchemy parameterized query capture. `PR34 <https://github.com/aws/aws-xray-sdk-python/pull/34>`_\n* bugfix: Allow standalone sqlalchemy integrations without flask_sqlalchemy. `PR53 <https://github.com/aws/aws-xray-sdk-python/pull/53>`_\n* bugfix: Give up aiohttp client tracing when there is no open segment and LOG_ERROR is configured. `PR58 <https://github.com/aws/aws-xray-sdk-python/pull/58>`_\n* bugfix: Handle missing subsegment when rendering a Django template. `PR54 <https://github.com/aws/aws-xray-sdk-python/pull/54>`_\n* Typo fixes on comments and docs.\n\n1.0\n===\n* Changed development status to `5 - Production/Stable` and removed beta tag.\n* feature: Added S3 API parameters to the default whitelist.\n* feature: Added new recorder APIs to add annotations/metadata.\n* feature: The recorder now adds more runtime and version information to sampled segments.\n* feature: Django, Flask and Aiohttp middleware now inject trace header to response headers.\n* feature: Added a new API to configure maximum captured stack trace.\n* feature: Modularized subsegments streaming logic and now it can be overriden with custom implementation.\n* bugfix(**Breaking**): Subsegment `set_user` API is removed since this attribute is not supported by X-Ray back-end.\n* bugfix: Fixed an issue where arbitrary fields in trace header being dropped when calling downstream.\n* bugfix: Fixed a compatibility issue between botocore and httplib patcher. `ISSUE48 <https://github.com/aws/aws-xray-sdk-python/issues/48>`_.\n* bugfix: Fixed a typo in sqlalchemy decorators. `PR50 <https://github.com/aws/aws-xray-sdk-python/pull/50>`_.\n* Updated `README` with more usage examples.\n\n0.97\n====\n* feature: Support aiohttp client tracing for aiohttp 3.x. `PR42 <https://github.com/aws/aws-xray-sdk-python/pull/42>`_.\n* feature: Use the official middleware pattern for Aiohttp ext. `PR29 <https://github.com/aws/aws-xray-sdk-python/pull/29>`_.\n* bugfix: Aiohttp middleware serialized URL values incorrectly. `PR37 <https://github.com/aws/aws-xray-sdk-python/pull/37>`_\n* bugfix: Don't overwrite plugins list on each `.configure` call. `PR38 <https://github.com/aws/aws-xray-sdk-python/pull/38>`_\n* bugfix: Do not swallow `return_value` when context is missing and `LOG_ERROR` is set. `PR44 <https://github.com/aws/aws-xray-sdk-python/pull/44>`_\n* bugfix: Loose entity name validation. `ISSUE36 <https://github.com/aws/aws-xray-sdk-python/issues/36>`_\n* bugfix: Fix PyPI project page being rendered incorrectly. `ISSUE30 <https://github.com/aws/aws-xray-sdk-python/issues/30>`_\n\n0.96\n====\n* feature: Add support for SQLAlchemy and Flask-SQLAlcemy. `PR14 <https://github.com/aws/aws-xray-sdk-python/pull/14>`_.\n* feature: Add support for PynamoDB calls to DynamoDB. `PR13 <https://github.com/aws/aws-xray-sdk-python/pull/13>`_.\n* feature: Add support for httplib calls. `PR19 <https://github.com/aws/aws-xray-sdk-python/pull/19>`_.\n* feature: Make streaming threshold configurable through public interface. `ISSUE21 <https://github.com/aws/aws-xray-sdk-python/issues/21>`_.\n* bugfix:  Drop invalid annotation keys and log a warning. `PR22 <https://github.com/aws/aws-xray-sdk-python/pull/22>`_.\n* bugfix:  Respect `with` statement on cursor objects in dbapi2 patcher. `PR17 <https://github.com/aws/aws-xray-sdk-python/pull/17>`_.\n* bugfix:  Don't throw error from built in subsegment capture when `LOG_ERROR` is set. `ISSUE4 <https://github.com/aws/aws-xray-sdk-python/issues/4>`_.\n\n0.95\n====\n* **Breaking**: AWS API parameter whitelist json file is moved to path `aws_xray_sdk/ext/resources/aws_para_whitelist.json` in `PR6 <https://github.com/aws/aws-xray-sdk-python/pull/6>`_.\n* Added aiobotocore/aioboto3 support and async function capture. `PR6 <https://github.com/aws/aws-xray-sdk-python/pull/6>`_\n* Added logic to removing segment/subsegment name invalid characters. `PR9 <https://github.com/aws/aws-xray-sdk-python/pull/9>`_\n* Temporarily disabled tests run on Django2.0. `PR10 <https://github.com/aws/aws-xray-sdk-python/pull/10>`_\n* Code cleanup. `PR11 <https://github.com/aws/aws-xray-sdk-python/pull/11>`_\n\n0.94\n====\n* Added aiohttp support. `PR3 <https://github.com/aws/aws-xray-sdk-python/pull/3>`_\n\n0.93\n====\n* The X-Ray SDK for Python is now an open source project. You can follow the project and submit issues and pull requests on GitHub: https://github.com/aws/aws-xray-sdk-python\n\n0.92.2\n======\n* bugfix: Fixed an issue that caused the X-Ray recorder to omit the origin when recording segments with a service plugin. This caused the service's type to not appear on the service map in the X-Ray console.\n\n0.92.1\n======\n* bugfix: Fixed an issue that caused all calls to Amazon DynamoDB tables to be grouped under a single node in the service map. With this update, each table gets a separate node.\n\n0.92\n====\n\n* feature: Add Flask support\n* feature: Add dynamic naming on segment name\n\n0.91.1\n======\n\n* bugfix: The SDK has been released as a universal wheel\n"
  },
  {
    "path": "CODE_OF_CONDUCT.md",
    "content": "## Code of Conduct\nThis project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct). \nFor more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact \nopensource-codeofconduct@amazon.com with any additional questions or comments.\n"
  },
  {
    "path": "CONTRIBUTING.md",
    "content": "# Contributing Guidelines\n\nThank you for your interest in contributing to our project. Whether it's a bug report, new feature, correction, or additional \ndocumentation, we greatly value feedback and contributions from our community.\n\nPlease read through this document before submitting any issues or pull requests to ensure we have all the necessary \ninformation to effectively respond to your bug report or contribution.\n\n\n## Reporting Bugs/Feature Requests\n\nWe welcome you to use the GitHub issue tracker to report bugs or suggest features.\n\nWhen filing an issue, please check [existing open](https://github.com/aws/aws-xray-sdk-python/issues), or [recently closed](https://github.com/aws/aws-xray-sdk-python/issues?utf8=%E2%9C%93&q=is%3Aissue%20is%3Aclosed%20), issues to make sure somebody else hasn't already \nreported the issue. Please try to include as much information as you can. Details like these are incredibly useful:\n\n* A reproducible test case or series of steps\n* The version of our code being used\n* Any modifications you've made relevant to the bug\n* Anything unusual about your environment or deployment\n\n\n## Contributing via Pull Requests\nContributions via pull requests are much appreciated. Before sending us a pull request, please ensure that:\n\n1. You are working against the latest source on the *master* branch.\n2. You check existing open, and recently merged, pull requests to make sure someone else hasn't addressed the problem already.\n3. You open an issue to discuss any significant work - we would hate for your time to be wasted.\n\nTo send us a pull request, please:\n\n1. Fork the repository.\n2. Modify the source; please focus on the specific change you are contributing. If you also reformat all the code, it will be hard for us to focus on your change.\n3. Ensure local tests pass.\n4. Commit to your fork using clear commit messages.\n5. Send us a pull request, answering any default questions in the pull request interface.\n6. Pay attention to any automated CI failures reported in the pull request, and stay involved in the conversation.\n\nGitHub provides additional document on [forking a repository](https://help.github.com/articles/fork-a-repo/) and \n[creating a pull request](https://help.github.com/articles/creating-a-pull-request/).\n\n\n## Finding contributions to work on\nLooking at the existing issues is a great way to find something to contribute on. As our projects, by default, use the default GitHub issue labels ((enhancement/bug/duplicate/help wanted/invalid/question/wontfix), looking at any ['help wanted'](https://github.com/aws/aws-xray-sdk-python/labels/help%20wanted) issues is a great place to start. \n\n\n## Code of Conduct\nThis project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct). \nFor more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact \nopensource-codeofconduct@amazon.com with any additional questions or comments.\n\n\n## Security issue notifications\nIf you discover a potential security issue in this project we ask that you notify AWS/Amazon Security via our [vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/). Please do **not** create a public github issue.\n\n\n## Licensing\n\nSee the [LICENSE](https://github.com/aws/aws-xray-sdk-python/blob/master/LICENSE) file for our project's licensing. We will ask you to confirm the licensing of your contribution.\n\nWe may ask you to sign a [Contributor License Agreement (CLA)](http://en.wikipedia.org/wiki/Contributor_License_Agreement) for larger changes.\n"
  },
  {
    "path": "LICENSE",
    "content": "                                 Apache License\n                           Version 2.0, January 2004\n                        http://www.apache.org/licenses/\n\n   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n   1. Definitions.\n\n      \"License\" shall mean the terms and conditions for use, reproduction,\n      and distribution as defined by Sections 1 through 9 of this document.\n\n      \"Licensor\" shall mean the copyright owner or entity authorized by\n      the copyright owner that is granting the License.\n\n      \"Legal Entity\" shall mean the union of the acting entity and all\n      other entities that control, are controlled by, or are under common\n      control with that entity. For the purposes of this definition,\n      \"control\" means (i) the power, direct or indirect, to cause the\n      direction or management of such entity, whether by contract or\n      otherwise, or (ii) ownership of fifty percent (50%) or more of the\n      outstanding shares, or (iii) beneficial ownership of such entity.\n\n      \"You\" (or \"Your\") shall mean an individual or Legal Entity\n      exercising permissions granted by this License.\n\n      \"Source\" form shall mean the preferred form for making modifications,\n      including but not limited to software source code, documentation\n      source, and configuration files.\n\n      \"Object\" form shall mean any form resulting from mechanical\n      transformation or translation of a Source form, including but\n      not limited to compiled object code, generated documentation,\n      and conversions to other media types.\n\n      \"Work\" shall mean the work of authorship, whether in Source or\n      Object form, made available under the License, as indicated by a\n      copyright notice that is included in or attached to the work\n      (an example is provided in the Appendix below).\n\n      \"Derivative Works\" shall mean any work, whether in Source or Object\n      form, that is based on (or derived from) the Work and for which the\n      editorial revisions, annotations, elaborations, or other modifications\n      represent, as a whole, an original work of authorship. For the purposes\n      of this License, Derivative Works shall not include works that remain\n      separable from, or merely link (or bind by name) to the interfaces of,\n      the Work and Derivative Works thereof.\n\n      \"Contribution\" shall mean any work of authorship, including\n      the original version of the Work and any modifications or additions\n      to that Work or Derivative Works thereof, that is intentionally\n      submitted to Licensor for inclusion in the Work by the copyright owner\n      or by an individual or Legal Entity authorized to submit on behalf of\n      the copyright owner. For the purposes of this definition, \"submitted\"\n      means any form of electronic, verbal, or written communication sent\n      to the Licensor or its representatives, including but not limited to\n      communication on electronic mailing lists, source code control systems,\n      and issue tracking systems that are managed by, or on behalf of, the\n      Licensor for the purpose of discussing and improving the Work, but\n      excluding communication that is conspicuously marked or otherwise\n      designated in writing by the copyright owner as \"Not a Contribution.\"\n\n      \"Contributor\" shall mean Licensor and any individual or Legal Entity\n      on behalf of whom a Contribution has been received by Licensor and\n      subsequently incorporated within the Work.\n\n   2. Grant of Copyright License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      copyright license to reproduce, prepare Derivative Works of,\n      publicly display, publicly perform, sublicense, and distribute the\n      Work and such Derivative Works in Source or Object form.\n\n   3. Grant of Patent License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      (except as stated in this section) patent license to make, have made,\n      use, offer to sell, sell, import, and otherwise transfer the Work,\n      where such license applies only to those patent claims licensable\n      by such Contributor that are necessarily infringed by their\n      Contribution(s) alone or by combination of their Contribution(s)\n      with the Work to which such Contribution(s) was submitted. If You\n      institute patent litigation against any entity (including a\n      cross-claim or counterclaim in a lawsuit) alleging that the Work\n      or a Contribution incorporated within the Work constitutes direct\n      or contributory patent infringement, then any patent licenses\n      granted to You under this License for that Work shall terminate\n      as of the date such litigation is filed.\n\n   4. Redistribution. You may reproduce and distribute copies of the\n      Work or Derivative Works thereof in any medium, with or without\n      modifications, and in Source or Object form, provided that You\n      meet the following conditions:\n\n      (a) You must give any other recipients of the Work or\n          Derivative Works a copy of this License; and\n\n      (b) You must cause any modified files to carry prominent notices\n          stating that You changed the files; and\n\n      (c) You must retain, in the Source form of any Derivative Works\n          that You distribute, all copyright, patent, trademark, and\n          attribution notices from the Source form of the Work,\n          excluding those notices that do not pertain to any part of\n          the Derivative Works; and\n\n      (d) If the Work includes a \"NOTICE\" text file as part of its\n          distribution, then any Derivative Works that You distribute must\n          include a readable copy of the attribution notices contained\n          within such NOTICE file, excluding those notices that do not\n          pertain to any part of the Derivative Works, in at least one\n          of the following places: within a NOTICE text file distributed\n          as part of the Derivative Works; within the Source form or\n          documentation, if provided along with the Derivative Works; or,\n          within a display generated by the Derivative Works, if and\n          wherever such third-party notices normally appear. The contents\n          of the NOTICE file are for informational purposes only and\n          do not modify the License. You may add Your own attribution\n          notices within Derivative Works that You distribute, alongside\n          or as an addendum to the NOTICE text from the Work, provided\n          that such additional attribution notices cannot be construed\n          as modifying the License.\n\n      You may add Your own copyright statement to Your modifications and\n      may provide additional or different license terms and conditions\n      for use, reproduction, or distribution of Your modifications, or\n      for any such Derivative Works as a whole, provided Your use,\n      reproduction, and distribution of the Work otherwise complies with\n      the conditions stated in this License.\n\n   5. Submission of Contributions. Unless You explicitly state otherwise,\n      any Contribution intentionally submitted for inclusion in the Work\n      by You to the Licensor shall be under the terms and conditions of\n      this License, without any additional terms or conditions.\n      Notwithstanding the above, nothing herein shall supersede or modify\n      the terms of any separate license agreement you may have executed\n      with Licensor regarding such Contributions.\n\n   6. Trademarks. This License does not grant permission to use the trade\n      names, trademarks, service marks, or product names of the Licensor,\n      except as required for reasonable and customary use in describing the\n      origin of the Work and reproducing the content of the NOTICE file.\n\n   7. Disclaimer of Warranty. Unless required by applicable law or\n      agreed to in writing, Licensor provides the Work (and each\n      Contributor provides its Contributions) on an \"AS IS\" BASIS,\n      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n      implied, including, without limitation, any warranties or conditions\n      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n      PARTICULAR PURPOSE. You are solely responsible for determining the\n      appropriateness of using or redistributing the Work and assume any\n      risks associated with Your exercise of permissions under this License.\n\n   8. Limitation of Liability. In no event and under no legal theory,\n      whether in tort (including negligence), contract, or otherwise,\n      unless required by applicable law (such as deliberate and grossly\n      negligent acts) or agreed to in writing, shall any Contributor be\n      liable to You for damages, including any direct, indirect, special,\n      incidental, or consequential damages of any character arising as a\n      result of this License or out of the use or inability to use the\n      Work (including but not limited to damages for loss of goodwill,\n      work stoppage, computer failure or malfunction, or any and all\n      other commercial damages or losses), even if such Contributor\n      has been advised of the possibility of such damages.\n\n   9. Accepting Warranty or Additional Liability. While redistributing\n      the Work or Derivative Works thereof, You may choose to offer,\n      and charge a fee for, acceptance of support, warranty, indemnity,\n      or other liability obligations and/or rights consistent with this\n      License. However, in accepting such obligations, You may act only\n      on Your own behalf and on Your sole responsibility, not on behalf\n      of any other Contributor, and only if You agree to indemnify,\n      defend, and hold each Contributor harmless for any liability\n      incurred by, or claims asserted against, such Contributor by reason\n      of your accepting any such warranty or additional liability.\n\n   END OF TERMS AND CONDITIONS\n\n   APPENDIX: How to apply the Apache License to your work.\n\n      To apply the Apache License to your work, attach the following\n      boilerplate notice, with the fields enclosed by brackets \"{}\"\n      replaced with your own identifying information. (Don't include\n      the brackets!)  The text should be enclosed in the appropriate\n      comment syntax for the file format. We also recommend that a\n      file or class name and description of purpose be included on the\n      same \"printed page\" as the copyright notice for easier\n      identification within third-party archives.\n\n   Copyright {yyyy} {name of copyright owner}\n\n   Licensed under the Apache License, Version 2.0 (the \"License\");\n   you may not use this file except in compliance with the License.\n   You may obtain a copy of the License at\n\n       http://www.apache.org/licenses/LICENSE-2.0\n\n   Unless required by applicable law or agreed to in writing, software\n   distributed under the License is distributed on an \"AS IS\" BASIS,\n   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n   See the License for the specific language governing permissions and\n   limitations under the License.\n"
  },
  {
    "path": "MANIFEST.in",
    "content": "include aws_xray_sdk/ext/resources/*.json\ninclude aws_xray_sdk/core/sampling/local/*.json\ninclude README.md\ninclude LICENSE\ninclude NOTICE\n"
  },
  {
    "path": "NOTICE",
    "content": "Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.\n"
  },
  {
    "path": "README.md",
    "content": "![Build Status](https://github.com/aws/aws-xray-sdk-python/actions/workflows/IntegrationTesting.yaml/badge.svg)\n[![codecov](https://codecov.io/gh/aws/aws-xray-sdk-python/branch/master/graph/badge.svg)](https://codecov.io/gh/aws/aws-xray-sdk-python)\n\n# AWS X-Ray SDK for Python\n\n## :mega: Upcoming Maintenance Mode on February 25, 2026\n\n[The AWS X-Ray SDKs will enter maintenance mode on **`February 25, 2026`**][xray-sdk-daemon-timeline]. During maintenance mode, the X-Ray SDKs and Daemon will only receive critical bug fixes and security updates, and will not be updated to support new features.\n\nWe recommend that you migrate to [AWS Distro for OpenTelemetry (ADOT) or OpenTelemetry Instrumentation][xray-otel-migration-docs] to generate traces (through manual or zero-code instrumentation) from your application and send them to AWS X-Ray. OpenTelemetry is the industry-wide standard for tracing instrumentation and observability. It has a large open-source community for support and provides more instrumentations and updates. By adopting an OpenTelemetry solution, developers can leverage the latest services and innovations from AWS CloudWatch.\n\n[xray-otel-migration-docs]: https://docs.aws.amazon.com/xray/latest/devguide/xray-sdk-migration.html\n[xray-sdk-daemon-timeline]: https://docs.aws.amazon.com/xray/latest/devguide/xray-daemon-eos.html\n\n-------------------------------------\n\n### OpenTelemetry Python with AWS X-Ray\n\nAWS X-Ray supports using OpenTelemetry Python and the AWS Distro for OpenTelemetry (ADOT) Collector to instrument your application and send trace data to X-Ray. The OpenTelemetry SDKs are an industry-wide standard for tracing instrumentation. They provide more instrumentations and have a larger community for support, but may not have complete feature parity with the X-Ray SDKs. See [choosing between the ADOT and X-Ray SDKs](https://docs.aws.amazon.com/xray/latest/devguide/xray-instrumenting-your-app.html#xray-instrumenting-choosing) for more help with choosing between the two.\n\nIf you want additional features when tracing your Python applications, please [open an issue on the OpenTelemetry Python Instrumentation repository](https://github.com/open-telemetry/opentelemetry-python-contrib/issues/new?labels=feature-request&template=feature_request.md&title=X-Ray%20Compatible%20Feature%20Request).\n\n### Python Versions End-of-Support Notice\n\nAWS X-Ray SDK for Python versions `>2.11.0` has dropped support for Python 2.7, 3.4, 3.5, and 3.6.\n\n-------------------------------------\n\n![Screenshot of the AWS X-Ray console](/images/example_servicemap.png?raw=true)\n\n## Installing\n\nThe AWS X-Ray SDK for Python is compatible with Python 3.7, 3.8, 3.9, 3.10, and 3.11.\n\nInstall the SDK using the following command (the SDK's non-testing dependencies will be installed).\n\n```\npip install aws-xray-sdk\n```\n\nTo install the SDK's testing dependencies, use the following command.\n\n```\npip install tox\n```\n\n## Getting Help\n\nUse the following community resources for getting help with the SDK. We use the GitHub\nissues for tracking bugs and feature requests.\n\n* Ask a question in the [AWS X-Ray Forum](https://forums.aws.amazon.com/forum.jspa?forumID=241&start=0).\n* Open a support ticket with [AWS Support](http://docs.aws.amazon.com/awssupport/latest/user/getting-started.html).\n* If you think you may have found a bug, open an [issue](https://github.com/aws/aws-xray-sdk-python/issues/new).\n\n## Opening Issues\n\nIf you encounter a bug with the AWS X-Ray SDK for Python, we want to hear about\nit. Before opening a new issue, search the [existing issues](https://github.com/aws/aws-xray-sdk-python/issues)\nto see if others are also experiencing the issue. Include the version of the AWS X-Ray\nSDK for Python, Python language, and botocore/boto3 if applicable. In addition, \ninclude the repro case when appropriate.\n\nThe GitHub issues are intended for bug reports and feature requests. For help and\nquestions about using the AWS SDK for Python, use the resources listed\nin the [Getting Help](https://github.com/aws/aws-xray-sdk-python#getting-help) section. Keeping the list of open issues lean helps us respond in a timely manner.\n\n## Documentation\n\nThe [developer guide](https://docs.aws.amazon.com/xray/latest/devguide) provides in-depth\nguidance about using the AWS X-Ray service.\nThe [API Reference](http://docs.aws.amazon.com/xray-sdk-for-python/latest/reference/)\nprovides guidance for using the SDK and module-level documentation.\n\n## Quick Start\n\n### Configuration\n\n```python\nfrom aws_xray_sdk.core import xray_recorder\n\nxray_recorder.configure(\n    sampling=False,\n    context_missing='LOG_ERROR',\n    plugins=('EC2Plugin', 'ECSPlugin', 'ElasticBeanstalkPlugin'),\n    daemon_address='127.0.0.1:3000',\n    dynamic_naming='*mysite.com*'\n)\n```\n\n### Start a custom segment/subsegment\n\nUsing context managers for implicit exceptions recording:\n\n```python\nfrom aws_xray_sdk.core import xray_recorder\n\nwith xray_recorder.in_segment('segment_name') as segment:\n    # Add metadata or annotation here if necessary\n    segment.put_metadata('key', dict, 'namespace')\n    with xray_recorder.in_subsegment('subsegment_name') as subsegment:\n        subsegment.put_annotation('key', 'value')\n        # Do something here\n    with xray_recorder.in_subsegment('subsegment2') as subsegment:\n        subsegment.put_annotation('key2', 'value2')\n        # Do something else \n```\n\nasync versions of context managers:\n\n```python\nfrom aws_xray_sdk.core import xray_recorder\n\nasync with xray_recorder.in_segment_async('segment_name') as segment:\n    # Add metadata or annotation here if necessary\n    segment.put_metadata('key', dict, 'namespace')\n    async with xray_recorder.in_subsegment_async('subsegment_name') as subsegment:\n        subsegment.put_annotation('key', 'value')\n        # Do something here\n    async with xray_recorder.in_subsegment_async('subsegment2') as subsegment:\n        subsegment.put_annotation('key2', 'value2')\n        # Do something else \n```\n\nDefault begin/end functions:\n\n```python\nfrom aws_xray_sdk.core import xray_recorder\n\n# Start a segment\nsegment = xray_recorder.begin_segment('segment_name')\n# Start a subsegment\nsubsegment = xray_recorder.begin_subsegment('subsegment_name')\n\n# Add metadata or annotation here if necessary\nsegment.put_metadata('key', dict, 'namespace')\nsubsegment.put_annotation('key', 'value')\nxray_recorder.end_subsegment()\n\n# Close the segment\nxray_recorder.end_segment()\n```\n\n### Oversampling Mitigation\nTo modify the sampling decision at the subsegment level, subsegments that inherit the decision of their direct parent (segment or subsegment) can be created using `xray_recorder.begin_subsegment()` and unsampled subsegments can be created using\n`xray_recorder.begin_subsegment_without_sampling()`.\n\nThe code snippet below demonstrates creating a sampled or unsampled subsegment based on the sampling decision of each SQS message processed by Lambda.\n\n```python\nfrom aws_xray_sdk.core import xray_recorder\nfrom aws_xray_sdk.core.models.subsegment import Subsegment\nfrom aws_xray_sdk.core.utils.sqs_message_helper import SqsMessageHelper\n\ndef lambda_handler(event, context):\n\n    for message in event['Records']:\n        if SqsMessageHelper.isSampled(message):\n            subsegment = xray_recorder.begin_subsegment('sampled_subsegment')\n            print('sampled - processing SQS message')\n\n        else:\n            subsegment = xray_recorder.begin_subsegment_without_sampling('unsampled_subsegment')\n            print('unsampled - processing SQS message')\n    \n    xray_recorder.end_subsegment()   \n```\n\nThe code snippet below demonstrates wrapping a downstream AWS SDK request with an unsampled subsegment.\n```python\nfrom aws_xray_sdk.core import xray_recorder, patch_all\nimport boto3\n\npatch_all()\n\ndef lambda_handler(event, context):\n    subsegment = xray_recorder.begin_subsegment_without_sampling('unsampled_subsegment')\n    client = boto3.client('sqs')\n    print(client.list_queues())\n    \n    xray_recorder.end_subsegment()\n```\n\n### Capture\n\nAs a decorator:\n\n```python\nfrom aws_xray_sdk.core import xray_recorder\n\n@xray_recorder.capture('subsegment_name')\ndef myfunc():\n    # Do something here\n\nmyfunc()\n```\n\nor as a context manager:\n\n```python\nfrom aws_xray_sdk.core import xray_recorder\n\nwith xray_recorder.capture('subsegment_name') as subsegment:\n    # Do something here\n    subsegment.put_annotation('mykey', val)\n    # Do something more\n```\n\nAsync capture as decorator:\n\n```python\nfrom aws_xray_sdk.core import xray_recorder\n\n@xray_recorder.capture_async('subsegment_name')\nasync def myfunc():\n    # Do something here\n\nasync def main():\n    await myfunc()\n```\n\nor as context manager:\n\n```python\nfrom aws_xray_sdk.core import xray_recorder\n\nasync with xray_recorder.capture_async('subsegment_name') as subsegment:\n    # Do something here\n    subsegment.put_annotation('mykey', val)\n    # Do something more\n```\n\n### Adding annotations/metadata using recorder\n\n```python\nfrom aws_xray_sdk.core import xray_recorder\n\n# Start a segment if no segment exist\nsegment1 = xray_recorder.begin_segment('segment_name')\n\n# This will add the key value pair to segment1 as it is active\nxray_recorder.put_annotation('key', 'value')\n\n# Start a subsegment so it becomes the active trace entity\nsubsegment1 = xray_recorder.begin_subsegment('subsegment_name')\n\n# This will add the key value pair to subsegment1 as it is active\nxray_recorder.put_metadata('key', 'value')\n\nif xray_recorder.is_sampled():\n    # some expensitve annotations/metadata generation code here\n    val = compute_annotation_val()\n    metadata = compute_metadata_body()\n    xray_recorder.put_annotation('mykey', val)\n    xray_recorder.put_metadata('mykey', metadata)\n```\n\n### Generate NoOp Trace and Entity Id\nX-Ray Python SDK will by default generate no-op trace and entity id for unsampled requests and secure random trace and entity id for sampled requests. If customer wants to enable generating secure random trace and entity id for all the (sampled/unsampled) requests (this is applicable for trace id injection into logs use case) then they should set the `AWS_XRAY_NOOP_ID` environment variable as False.\n\n### Disabling X-Ray\nOften times, it may be useful to be able to disable X-Ray for specific use cases, whether to stop X-Ray from sending traces at any moment, or to test code functionality that originally depended on X-Ray instrumented packages to begin segments prior to the code call. For example, if your application relied on an XRayMiddleware to instrument incoming web requests, and you have a method which begins subsegments based on the segment generated by that middleware, it would be useful to be able to disable X-Ray for your unit tests so that `SegmentNotFound` exceptions are not thrown when you need to test your method.\n\nThere are two ways to disable X-Ray, one is through environment variables, and the other is through the SDKConfig module.\n\n**Disabling through the environment variable:**\n\nPrior to running your application, make sure to have the environment variable `AWS_XRAY_SDK_ENABLED` set to `false`. \n\n**Disabling through the SDKConfig module:**\n```\nfrom aws_xray_sdk import global_sdk_config\n\nglobal_sdk_config.set_sdk_enabled(False)\n```\n\n**Important Notes:**\n* Environment Variables always take precedence over the SDKConfig module when disabling/enabling. If your environment variable is set to `false` while your code calls `global_sdk_config.set_sdk_enabled(True)`, X-Ray will still be disabled.\n\n* If you need to re-enable X-Ray again during runtime and acknowledge disabling/enabling through the SDKConfig module, you may run the following in your application:\n```\nimport os\nfrom aws_xray_sdk import global_sdk_config\n\ndel os.environ['AWS_XRAY_SDK_ENABLED']\nglobal_sdk_config.set_sdk_enabled(True)\n```\n\n### Trace AWS Lambda functions\n\n```python\nfrom aws_xray_sdk.core import xray_recorder\n\ndef lambda_handler(event, context):\n    # ... some code\n\n    subsegment = xray_recorder.begin_subsegment('subsegment_name')\n    # Code to record\n    # Add metadata or annotation here, if necessary\n    subsegment.put_metadata('key', dict, 'namespace')\n    subsegment.put_annotation('key', 'value')\n\n    xray_recorder.end_subsegment()\n\n    # ... some other code\n```\n\n### Trace ThreadPoolExecutor\n\n```python\nimport concurrent.futures\n\nimport requests\n\nfrom aws_xray_sdk.core import xray_recorder\nfrom aws_xray_sdk.core import patch\n\npatch(('requests',))\n\nURLS = ['http://www.amazon.com/',\n        'http://aws.amazon.com/',\n        'http://example.com/',\n        'http://www.bilibili.com/',\n        'http://invalid-domain.com/']\n\ndef load_url(url, trace_entity):\n    # Set the parent X-Ray entity for the worker thread.\n    xray_recorder.set_trace_entity(trace_entity)\n    # Subsegment captured from the following HTTP GET will be\n    # a child of parent entity passed from the main thread.\n    resp = requests.get(url)\n    # prevent thread pollution\n    xray_recorder.clear_trace_entities()\n    return resp\n\n# Get the current active segment or subsegment from the main thread.\ncurrent_entity = xray_recorder.get_trace_entity()\nwith concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor:\n    # Pass the active entity from main thread to worker threads.\n    future_to_url = {executor.submit(load_url, url, current_entity): url for url in URLS}\n    for future in concurrent.futures.as_completed(future_to_url):\n        url = future_to_url[future]\n        try:\n            data = future.result()\n        except Exception:\n            pass\n```\n\n### Trace SQL queries\nBy default, if no other value is provided to `.configure()`, SQL trace streaming is enabled\nfor all the supported DB engines. Those currently are:\n- Any engine attached to the Django ORM.\n- Any engine attached to SQLAlchemy.\n\nThe behaviour can be toggled by sending the appropriate `stream_sql` value, for example:\n```python\nfrom aws_xray_sdk.core import xray_recorder\n\nxray_recorder.configure(service='fallback_name', stream_sql=True)\n```\n\n### Patch third-party libraries\n\n```python\nfrom aws_xray_sdk.core import patch\n\nlibs_to_patch = ('boto3', 'mysql', 'requests')\npatch(libs_to_patch)\n```\n\n#### Automatic module patching\n\nFull modules in the local codebase can be recursively patched by providing the module references\nto the patch function.\n```python\nfrom aws_xray_sdk.core import patch\n\nlibs_to_patch = ('boto3', 'requests', 'local.module.ref', 'other_module')\npatch(libs_to_patch)\n```\nAn `xray_recorder.capture()` decorator will be applied to all functions and class methods in the\ngiven module and all the modules inside them recursively. Some files/modules can be excluded by\nproviding to the `patch` function a regex that matches them.\n```python\nfrom aws_xray_sdk.core import patch\n\nlibs_to_patch = ('boto3', 'requests', 'local.module.ref', 'other_module')\nignore = ('local.module.ref.some_file', 'other_module.some_module\\.*')\npatch(libs_to_patch, ignore_module_patterns=ignore)\n```\n\n### Django\n#### Add Django middleware\n\nIn django settings.py, use the following.\n\n```python\nINSTALLED_APPS = [\n    # ... other apps\n    'aws_xray_sdk.ext.django',\n]\n\nMIDDLEWARE = [\n    'aws_xray_sdk.ext.django.middleware.XRayMiddleware',\n    # ... other middlewares\n]\n```\n\nYou can configure the X-Ray recorder in a Django app under the ‘XRAY_RECORDER’ namespace. For a minimal configuration, the 'AWS_XRAY_TRACING_NAME' is required unless it is specified in an environment variable.\n```\nXRAY_RECORDER = {\n    'AWS_XRAY_TRACING_NAME': 'My application', # Required - the segment name for segments generated from incoming requests\n}\n```\nFor more information about configuring Django with X-Ray read more about it in the [API reference](https://docs.aws.amazon.com/xray-sdk-for-python/latest/reference/frameworks.html)\n\n#### SQL tracing\nIf Django's ORM is patched - either using the `AUTO_INSTRUMENT = True` in your settings file\nor explicitly calling `patch_db()` - the SQL query trace streaming can then be enabled or \ndisabled updating the `STREAM_SQL` variable in your settings file. It is enabled by default.\n\n#### Automatic patching\nThe automatic module patching can also be configured through Django settings.\n```python\nXRAY_RECORDER = {\n    'PATCH_MODULES': [\n        'boto3',\n        'requests',\n        'local.module.ref',\n        'other_module',\n    ],\n    'IGNORE_MODULE_PATTERNS': [\n        'local.module.ref.some_file',\n        'other_module.some_module\\.*',\n    ],\n    ...\n}\n```\nIf `AUTO_PATCH_PARENT_SEGMENT_NAME` is also specified, then a segment parent will be created \nwith the supplied name, wrapping the automatic patching so that it captures any dangling\nsubsegments created on the import patching.\n\n### Django in Lambda\nX-Ray can't search on http annotations in subsegments.   To enable searching the middleware adds the http values as annotations\nThis allows searching in the X-Ray console like so\n\nThis is configurable in settings with `URLS_AS_ANNOTATION` that has 3 valid values\n`LAMBDA` - the default, which uses URLs as annotations by default if running in a lambda context\n`ALL` - do this for every request (useful if running in a mixed lambda/other deployment)\n`NONE` - don't do this for any (avoiding hitting the 50 annotation limit)\n\n```\nannotation.url BEGINSWITH \"https://your.url.com/here\"\n```\n\n### Add Flask middleware\n\n```python\nfrom aws_xray_sdk.core import xray_recorder\nfrom aws_xray_sdk.ext.flask.middleware import XRayMiddleware\n\napp = Flask(__name__)\n\nxray_recorder.configure(service='fallback_name', dynamic_naming='*mysite.com*')\nXRayMiddleware(app, xray_recorder)\n```\n\n### Add Bottle middleware(plugin)\n\n```python\nfrom aws_xray_sdk.core import xray_recorder\nfrom aws_xray_sdk.ext.bottle.middleware import XRayMiddleware\n\napp = Bottle()\n\nxray_recorder.configure(service='fallback_name', dynamic_naming='*mysite.com*')\napp.install(XRayMiddleware(xray_recorder))\n```\n\n### Serverless Support for Flask & Django & Bottle Using X-Ray\nServerless is an application model that enables you to shift more of your operational responsibilities to AWS. As a result, you can focus only on your applications and services, instead of the infrastructure management tasks such as server provisioning, patching, operating system maintenance, and capacity provisioning. With serverless, you can deploy your web application to [AWS Lambda](https://aws.amazon.com/lambda/) and have customers interact with it through a Lambda-invoking endpoint, such as [Amazon API Gateway](https://aws.amazon.com/api-gateway/). \n\nX-Ray supports the Serverless model out of the box and requires no extra configuration. The middlewares in Lambda generate `Subsegments` instead of `Segments` when an endpoint is reached. This is because `Segments` cannot be generated inside the Lambda function, but it is generated automatically by the Lambda container. Therefore, when using the middlewares with this model, it is important to make sure that your methods only generate `Subsegments`.\n\nThe following guide shows an example of setting up a Serverless application that utilizes API Gateway and Lambda:\n\n[Instrumenting Web Frameworks in a Serverless Environment](https://docs.aws.amazon.com/xray/latest/devguide/xray-sdk-python-serverless.html)\n\n### Working with aiohttp\n\nAdding aiohttp middleware. Support aiohttp >= 2.3.\n\n```python\nfrom aiohttp import web\n\nfrom aws_xray_sdk.ext.aiohttp.middleware import middleware\nfrom aws_xray_sdk.core import xray_recorder\nfrom aws_xray_sdk.core.async_context import AsyncContext\n\nxray_recorder.configure(service='fallback_name', context=AsyncContext())\n\napp = web.Application(middlewares=[middleware])\napp.router.add_get(\"/\", handler)\n\nweb.run_app(app)\n```\n\nTracing aiohttp client. Support aiohttp >=3.\n\n```python\nfrom aws_xray_sdk.ext.aiohttp.client import aws_xray_trace_config\n\nasync def foo():\n    trace_config = aws_xray_trace_config()\n    async with ClientSession(loop=loop, trace_configs=[trace_config]) as session:\n        async with session.get(url) as resp\n            await resp.read()\n```\n\n### Use SQLAlchemy ORM\nThe SQLAlchemy integration requires you to override the Session and Query Classes for SQL Alchemy\n\nSQLAlchemy integration uses subsegments so you need to have a segment started before you make a query.\n\n```python\nfrom aws_xray_sdk.core import xray_recorder\nfrom aws_xray_sdk.ext.sqlalchemy.query import XRaySessionMaker\n\nxray_recorder.begin_segment('SQLAlchemyTest')\n\nSession = XRaySessionMaker(bind=engine)\nsession = Session()\n\nxray_recorder.end_segment()\napp = Flask(__name__)\n\nxray_recorder.configure(service='fallback_name', dynamic_naming='*mysite.com*')\nXRayMiddleware(app, xray_recorder)\n```\n\n### Add Flask-SQLAlchemy\n\n```python\nfrom aws_xray_sdk.core import xray_recorder\nfrom aws_xray_sdk.ext.flask.middleware import XRayMiddleware\nfrom aws_xray_sdk.ext.flask_sqlalchemy.query import XRayFlaskSqlAlchemy\n\napp = Flask(__name__)\napp.config[\"SQLALCHEMY_DATABASE_URI\"] = \"sqlite:///:memory:\"\n\nXRayMiddleware(app, xray_recorder)\ndb = XRayFlaskSqlAlchemy(app)\n\n```\n\n### Ignoring httplib requests\n\nIf you want to ignore certain httplib requests you can do so based on the hostname or URL that is being requsted. The hostname is matched using the Python [fnmatch library](https://docs.python.org/3/library/fnmatch.html) which does Unix glob style matching.\n\n```python\nfrom aws_xray_sdk.ext.httplib import add_ignored as xray_add_ignored\n\n# ignore requests to test.myapp.com\nxray_add_ignored(hostname='test.myapp.com')\n\n# ignore requests to a subdomain of myapp.com with a glob pattern\nxray_add_ignored(hostname='*.myapp.com')\n\n# ignore requests to /test-url and /other-test-url\nxray_add_ignored(urls=['/test-path', '/other-test-path'])\n\n# ignore requests to myapp.com for /test-url\nxray_add_ignored(hostname='myapp.com', urls=['/test-url'])\n```\n\nIf you use a subclass of httplib to make your requests, you can also filter on the class name that initiates the request. This must use the complete package name to do the match.\n\n```python\nfrom aws_xray_sdk.ext.httplib import add_ignored as xray_add_ignored\n\n# ignore all requests made by botocore\nxray_add_ignored(subclass='botocore.awsrequest.AWSHTTPConnection')\n```\n\n## License\n\nThe AWS X-Ray SDK for Python is licensed under the Apache 2.0 License. See LICENSE and NOTICE.txt for more information.\n"
  },
  {
    "path": "__init__.py",
    "content": ""
  },
  {
    "path": "aws_xray_sdk/__init__.py",
    "content": "from .sdk_config import SDKConfig\n\nglobal_sdk_config = SDKConfig()\n"
  },
  {
    "path": "aws_xray_sdk/core/__init__.py",
    "content": "from .async_recorder import AsyncAWSXRayRecorder\nfrom .patcher import patch, patch_all\nfrom .recorder import AWSXRayRecorder\n\nxray_recorder = AsyncAWSXRayRecorder()\n\n__all__ = [\n    'patch',\n    'patch_all',\n    'xray_recorder',\n    'AWSXRayRecorder',\n]\n"
  },
  {
    "path": "aws_xray_sdk/core/async_context.py",
    "content": "import asyncio\nimport copy\n\nfrom .context import Context as _Context\n\n\nclass AsyncContext(_Context):\n    \"\"\"\n    Async Context for storing segments.\n\n    Inherits nearly everything from the main Context class.\n    Replaces threading.local with a task based local storage class,\n    Also overrides clear_trace_entities\n    \"\"\"\n    def __init__(self, *args, loop=None, use_task_factory=True, **kwargs):\n        super().__init__(*args, **kwargs)\n\n        self._loop = loop\n        if loop is None:\n            self._loop = asyncio.get_event_loop()\n\n        if use_task_factory:\n            self._loop.set_task_factory(task_factory)\n\n        self._local = TaskLocalStorage(loop=loop)\n\n    def clear_trace_entities(self):\n        \"\"\"\n        Clear all trace_entities stored in the task local context.\n        \"\"\"\n        if self._local is not None:\n            self._local.clear()\n\n\nclass TaskLocalStorage:\n    \"\"\"\n    Simple task local storage\n    \"\"\"\n    def __init__(self, loop=None):\n        if loop is None:\n            loop = asyncio.get_event_loop()\n        self._loop = loop\n\n    def __setattr__(self, name, value):\n        if name in ('_loop',):\n            # Set normal attributes\n            object.__setattr__(self, name, value)\n\n        else:\n            # Set task local attributes\n            task = asyncio.current_task(loop=self._loop)\n            if task is None:\n                return None\n\n            if not hasattr(task, 'context'):\n                task.context = {}\n\n            task.context[name] = value\n\n    def __getattribute__(self, item):\n        if item in ('_loop', 'clear'):\n            # Return references to local objects\n            return object.__getattribute__(self, item)\n\n        task = asyncio.current_task(loop=self._loop)\n        if task is None:\n            return None\n\n        if hasattr(task, 'context') and item in task.context:\n            return task.context[item]\n\n        raise AttributeError('Task context does not have attribute {0}'.format(item))\n\n    def clear(self):\n        # If were in a task, clear the context dictionary\n        task = asyncio.current_task(loop=self._loop)\n        if task is not None and hasattr(task, 'context'):\n            task.context.clear()\n\n\ndef task_factory(loop, coro):\n    \"\"\"\n    Task factory function\n\n    Fuction closely mirrors the logic inside of\n    asyncio.BaseEventLoop.create_task. Then if there is a current\n    task and the current task has a context then share that context\n    with the new task\n    \"\"\"\n    task = asyncio.Task(coro, loop=loop)\n    if task._source_traceback:  # flake8: noqa\n        del task._source_traceback[-1]  # flake8: noqa\n\n    # Share context with new task if possible\n    current_task = asyncio.current_task(loop=loop)\n    if current_task is not None and hasattr(current_task, 'context'):\n        if current_task.context.get('entities'):\n            # NOTE: (enowell) Because the `AWSXRayRecorder`'s `Context` decides\n            # the parent by looking at its `_local.entities`, we must copy the entities\n            # for concurrent subsegments. Otherwise, the subsegments would be\n            # modifying the same `entities` list and sugsegments would take other\n            # subsegments as parents instead of the original `segment`.\n            #\n            # See more: https://github.com/aws/aws-xray-sdk-python/blob/0f13101e4dba7b5c735371cb922f727b1d9f46d8/aws_xray_sdk/core/context.py#L90-L101\n            new_context = copy.copy(current_task.context)\n            new_context['entities'] = [item for item in current_task.context['entities']]\n        else:\n            new_context = current_task.context\n        setattr(task, 'context', new_context)\n\n    return task\n"
  },
  {
    "path": "aws_xray_sdk/core/async_recorder.py",
    "content": "import time\n\nfrom aws_xray_sdk.core.recorder import AWSXRayRecorder\nfrom aws_xray_sdk.core.utils import stacktrace\nfrom aws_xray_sdk.core.models.subsegment import SubsegmentContextManager, is_already_recording, subsegment_decorator\nfrom aws_xray_sdk.core.models.segment import SegmentContextManager\n\n\nclass AsyncSegmentContextManager(SegmentContextManager):\n    async def __aenter__(self):\n        return self.__enter__()\n\n    async def __aexit__(self, exc_type, exc_val, exc_tb):\n        return self.__exit__(exc_type, exc_val, exc_tb)\n\nclass AsyncSubsegmentContextManager(SubsegmentContextManager):\n\n    @subsegment_decorator\n    async def __call__(self, wrapped, instance, args, kwargs):\n        if is_already_recording(wrapped):\n            # The wrapped function is already decorated, the subsegment will be created later,\n            # just return the result\n            return await wrapped(*args, **kwargs)\n\n        func_name = self.name\n        if not func_name:\n            func_name = wrapped.__name__\n\n        return await self.recorder.record_subsegment_async(\n            wrapped, instance, args, kwargs,\n            name=func_name,\n            namespace='local',\n            meta_processor=None,\n        )\n\n    async def __aenter__(self):\n        return self.__enter__()\n\n    async def __aexit__(self, exc_type, exc_val, exc_tb):\n        return self.__exit__(exc_type, exc_val, exc_tb)\n\n\nclass AsyncAWSXRayRecorder(AWSXRayRecorder):\n    def capture_async(self, name=None):\n        \"\"\"\n        A decorator that records enclosed function in a subsegment.\n        It only works with asynchronous functions.\n\n        params str name: The name of the subsegment. If not specified\n        the function name will be used.\n        \"\"\"\n        return self.in_subsegment_async(name=name)\n\n    def in_segment_async(self, name=None, **segment_kwargs):\n        \"\"\"\n        Return a segment async context manager.\n\n        :param str name: the name of the segment\n        :param dict segment_kwargs: remaining arguments passed directly to `begin_segment`\n        \"\"\"\n        return AsyncSegmentContextManager(self, name=name, **segment_kwargs)\n\n    def in_subsegment_async(self, name=None, **subsegment_kwargs):\n        \"\"\"\n        Return a subsegment async context manager.\n\n        :param str name: the name of the segment\n        :param dict segment_kwargs: remaining arguments passed directly to `begin_segment`\n        \"\"\"\n        return AsyncSubsegmentContextManager(self, name=name, **subsegment_kwargs)\n\n    async def record_subsegment_async(self, wrapped, instance, args, kwargs, name,\n                                      namespace, meta_processor):\n\n        subsegment = self.begin_subsegment(name, namespace)\n\n        exception = None\n        stack = None\n        return_value = None\n\n        try:\n            return_value = await wrapped(*args, **kwargs)\n            return return_value\n        except Exception as e:\n            exception = e\n            stack = stacktrace.get_stacktrace(limit=self._max_trace_back)\n            raise\n        finally:\n            # No-op if subsegment is `None` due to `LOG_ERROR`.\n            if subsegment is not None:\n                end_time = time.time()\n                if callable(meta_processor):\n                    meta_processor(\n                        wrapped=wrapped,\n                        instance=instance,\n                        args=args,\n                        kwargs=kwargs,\n                        return_value=return_value,\n                        exception=exception,\n                        subsegment=subsegment,\n                        stack=stack,\n                    )\n                elif exception:\n                    if subsegment:\n                        subsegment.add_exception(exception, stack)\n\n                self.end_subsegment(end_time)\n"
  },
  {
    "path": "aws_xray_sdk/core/context.py",
    "content": "import threading\nimport logging\nimport os\n\nfrom .exceptions.exceptions import SegmentNotFoundException\nfrom .models.dummy_entities import DummySegment\nfrom aws_xray_sdk import global_sdk_config\n\n\nlog = logging.getLogger(__name__)\n\nMISSING_SEGMENT_MSG = 'cannot find the current segment/subsegment, please make sure you have a segment open'\nSUPPORTED_CONTEXT_MISSING = ('RUNTIME_ERROR', 'LOG_ERROR', 'IGNORE_ERROR')\nCXT_MISSING_STRATEGY_KEY = 'AWS_XRAY_CONTEXT_MISSING'\n\n\nclass Context:\n    \"\"\"\n    The context storage class to store trace entities(segments/subsegments).\n    The default implementation uses threadlocal to store these entities.\n    It also provides interfaces to manually inject trace entities which will\n    replace the current stored entities and to clean up the storage.\n\n    For any data access or data mutation, if there is no active segment present\n    it will use user-defined behavior to handle such case. By default it throws\n    an runtime error.\n\n    This data structure is thread-safe.\n    \"\"\"\n    def __init__(self, context_missing='LOG_ERROR'):\n\n        self._local = threading.local()\n        strategy = os.getenv(CXT_MISSING_STRATEGY_KEY, context_missing)\n        self._context_missing = strategy\n\n    def put_segment(self, segment):\n        \"\"\"\n        Store the segment created by ``xray_recorder`` to the context.\n        It overrides the current segment if there is already one.\n        \"\"\"\n        setattr(self._local, 'entities', [segment])\n\n    def end_segment(self, end_time=None):\n        \"\"\"\n        End the current active segment.\n\n        :param float end_time: epoch in seconds. If not specified the current\n            system time will be used.\n        \"\"\"\n        entity = self.get_trace_entity()\n        if not entity:\n            log.warning(\"No segment to end\")\n            return\n        if self._is_subsegment(entity):\n            entity.parent_segment.close(end_time)\n        else:\n            entity.close(end_time)\n\n    def put_subsegment(self, subsegment):\n        \"\"\"\n        Store the subsegment created by ``xray_recorder`` to the context.\n        If you put a new subsegment while there is already an open subsegment,\n        the new subsegment becomes the child of the existing subsegment.\n        \"\"\"\n        entity = self.get_trace_entity()\n        if not entity:\n            log.warning(\"Active segment or subsegment not found. Discarded %s.\" % subsegment.name)\n            return\n\n        entity.add_subsegment(subsegment)\n        self._local.entities.append(subsegment)\n\n    def end_subsegment(self, end_time=None):\n        \"\"\"\n        End the current active segment. Return False if there is no\n        subsegment to end.\n\n        :param float end_time: epoch in seconds. If not specified the current\n            system time will be used.\n        \"\"\"\n        entity = self.get_trace_entity()\n        if self._is_subsegment(entity):\n            entity.close(end_time)\n            self._local.entities.pop()\n            return True\n        elif isinstance(entity, DummySegment):\n            return False\n        else:\n            log.warning(\"No subsegment to end.\")\n            return False\n\n    def get_trace_entity(self):\n        \"\"\"\n        Return the current trace entity(segment/subsegment). If there is none,\n        it behaves based on pre-defined ``context_missing`` strategy.\n        If the SDK is disabled, returns a DummySegment\n        \"\"\"\n        if not getattr(self._local, 'entities', None):\n            if not global_sdk_config.sdk_enabled():\n                return DummySegment()\n            return self.handle_context_missing()\n\n        return self._local.entities[-1]\n\n    def set_trace_entity(self, trace_entity):\n        \"\"\"\n        Store the input trace_entity to local context. It will overwrite all\n        existing ones if there is any.\n        \"\"\"\n        setattr(self._local, 'entities', [trace_entity])\n\n    def clear_trace_entities(self):\n        \"\"\"\n        clear all trace_entities stored in the local context.\n        In case of using threadlocal to store trace entites, it will\n        clean up all trace entities created by the current thread.\n        \"\"\"\n        self._local.__dict__.clear()\n\n    def handle_context_missing(self):\n        \"\"\"\n        Called whenever there is no trace entity to access or mutate.\n        \"\"\"\n        if self.context_missing == 'RUNTIME_ERROR':\n            raise SegmentNotFoundException(MISSING_SEGMENT_MSG)\n        elif self.context_missing == 'LOG_ERROR':\n            log.error(MISSING_SEGMENT_MSG)\n\n    def _is_subsegment(self, entity):\n\n        return hasattr(entity, 'type') and entity.type == 'subsegment'\n\n    @property\n    def context_missing(self):\n        return self._context_missing\n\n    @context_missing.setter\n    def context_missing(self, value):\n        if value not in SUPPORTED_CONTEXT_MISSING:\n            log.warning('specified context_missing not supported, using default.')\n            return\n\n        self._context_missing = value\n"
  },
  {
    "path": "aws_xray_sdk/core/daemon_config.py",
    "content": "import os\n\nfrom .exceptions.exceptions import InvalidDaemonAddressException\n\nDAEMON_ADDRESS_KEY = \"AWS_XRAY_DAEMON_ADDRESS\"\nDEFAULT_ADDRESS = '127.0.0.1:2000'\n\n\nclass DaemonConfig:\n    \"\"\"The class that stores X-Ray daemon configuration about\n    the ip address and port for UDP and TCP port. It gets the address\n    string from ``AWS_TRACING_DAEMON_ADDRESS`` and then from recorder's\n    configuration for ``daemon_address``.\n    A notation of '127.0.0.1:2000' or 'tcp:127.0.0.1:2000 udp:127.0.0.2:2001'\n    are both acceptable. The former one means UDP and TCP are running at\n    the same address.\n    By default it assumes a X-Ray daemon running at 127.0.0.1:2000\n    listening to both UDP and TCP traffic.\n    \"\"\"\n    def __init__(self, daemon_address=DEFAULT_ADDRESS):\n        if daemon_address is None:\n            daemon_address = DEFAULT_ADDRESS\n\n        val = os.getenv(DAEMON_ADDRESS_KEY, daemon_address)\n        configs = val.split(' ')\n        if len(configs) == 1:\n            self._parse_single_form(configs[0])\n        elif len(configs) == 2:\n            self._parse_double_form(configs[0], configs[1], val)\n        else:\n            raise InvalidDaemonAddressException('Invalid daemon address %s specified.' % val)\n\n    def _parse_single_form(self, val):\n        try:\n            configs = val.split(':')\n            self._udp_ip = configs[0]\n            self._udp_port = int(configs[1])\n            self._tcp_ip = configs[0]\n            self._tcp_port = int(configs[1])\n        except Exception:\n            raise InvalidDaemonAddressException('Invalid daemon address %s specified.' % val)\n\n    def _parse_double_form(self, val1, val2, origin):\n        try:\n            configs1 = val1.split(':')\n            configs2 = val2.split(':')\n            mapping = {\n                configs1[0]: configs1,\n                configs2[0]: configs2,\n            }\n\n            tcp_info = mapping.get('tcp')\n            udp_info = mapping.get('udp')\n\n            self._tcp_ip = tcp_info[1]\n            self._tcp_port = int(tcp_info[2])\n            self._udp_ip = udp_info[1]\n            self._udp_port = int(udp_info[2])\n        except Exception:\n            raise InvalidDaemonAddressException('Invalid daemon address %s specified.' % origin)\n\n    @property\n    def udp_ip(self):\n        return self._udp_ip\n\n    @property\n    def udp_port(self):\n        return self._udp_port\n\n    @property\n    def tcp_ip(self):\n        return self._tcp_ip\n\n    @property\n    def tcp_port(self):\n        return self._tcp_port\n"
  },
  {
    "path": "aws_xray_sdk/core/emitters/__init__.py",
    "content": ""
  },
  {
    "path": "aws_xray_sdk/core/emitters/udp_emitter.py",
    "content": "import logging\nimport socket\n\nfrom aws_xray_sdk.core.daemon_config import DaemonConfig\nfrom ..exceptions.exceptions import InvalidDaemonAddressException\n\nlog = logging.getLogger(__name__)\n\n\nPROTOCOL_HEADER = \"{\\\"format\\\":\\\"json\\\",\\\"version\\\":1}\"\nPROTOCOL_DELIMITER = '\\n'\nDEFAULT_DAEMON_ADDRESS = '127.0.0.1:2000'\n\n\nclass UDPEmitter:\n    \"\"\"\n    The default emitter the X-Ray recorder uses to send segments/subsegments\n    to the X-Ray daemon over UDP using a non-blocking socket. If there is an\n    exception on the actual data transfer between the socket and the daemon,\n    it logs the exception and continue.\n    \"\"\"\n    def __init__(self, daemon_address=DEFAULT_DAEMON_ADDRESS):\n\n        self._socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\n        self._socket.setblocking(0)\n        self.set_daemon_address(daemon_address)\n\n    def send_entity(self, entity):\n        \"\"\"\n        Serializes a segment/subsegment and sends it to the X-Ray daemon\n        over UDP. By default it doesn't retry on failures.\n\n        :param entity: a trace entity to send to the X-Ray daemon\n        \"\"\"\n        try:\n            message = \"%s%s%s\" % (PROTOCOL_HEADER,\n                                  PROTOCOL_DELIMITER,\n                                  entity.serialize())\n\n            log.debug(\"sending: %s to %s:%s.\" % (message, self._ip, self._port))\n            self._send_data(message)\n        except Exception:\n            log.exception(\"Failed to send entity to Daemon.\")\n\n    def set_daemon_address(self, address):\n        \"\"\"\n        Set up UDP ip and port from the raw daemon address\n        string using ``DaemonConfig`` class utlities.\n        \"\"\"\n        if address:\n            daemon_config = DaemonConfig(address)\n            self._ip, self._port = daemon_config.udp_ip, daemon_config.udp_port\n\n    @property\n    def ip(self):\n        return self._ip\n\n    @property\n    def port(self):\n        return self._port\n\n    def _send_data(self, data):\n        self._socket.sendto(data.encode('utf-8'), (self._ip, self._port))\n\n    def _parse_address(self, daemon_address):\n        try:\n            val = daemon_address.split(':')\n            return val[0], int(val[1])\n        except Exception:\n            raise InvalidDaemonAddressException('Invalid daemon address %s specified.' % daemon_address)\n"
  },
  {
    "path": "aws_xray_sdk/core/exceptions/__init__.py",
    "content": ""
  },
  {
    "path": "aws_xray_sdk/core/exceptions/exceptions.py",
    "content": "class InvalidSamplingManifestError(Exception):\n    pass\n\n\nclass SegmentNotFoundException(Exception):\n    pass\n\n\nclass InvalidDaemonAddressException(Exception):\n    pass\n\n\nclass SegmentNameMissingException(Exception):\n    pass\n\n\nclass SubsegmentNameMissingException(Exception):\n    pass\n\n\nclass FacadeSegmentMutationException(Exception):\n    pass\n\n\nclass MissingPluginNames(Exception):\n    pass\n\n\nclass AlreadyEndedException(Exception):\n    pass\n"
  },
  {
    "path": "aws_xray_sdk/core/lambda_launcher.py",
    "content": "import os\nimport logging\nimport threading\n\nfrom aws_xray_sdk import global_sdk_config\nfrom .models.dummy_entities import DummySegment\nfrom .models.facade_segment import FacadeSegment\nfrom .models.trace_header import TraceHeader\nfrom .context import Context\n\nlog = logging.getLogger(__name__)\n\n\nLAMBDA_TRACE_HEADER_KEY = '_X_AMZN_TRACE_ID'\nLAMBDA_TASK_ROOT_KEY = 'LAMBDA_TASK_ROOT'\nTOUCH_FILE_DIR = '/tmp/.aws-xray/'\nTOUCH_FILE_PATH = '/tmp/.aws-xray/initialized'\n\n\ndef check_in_lambda():\n    \"\"\"\n    Return None if SDK is not loaded in AWS Lambda worker.\n    Otherwise drop a touch file and return a lambda context.\n    \"\"\"\n    if not os.getenv(LAMBDA_TASK_ROOT_KEY):\n        return None\n\n    try:\n        os.mkdir(TOUCH_FILE_DIR)\n    except OSError:\n        log.debug('directory %s already exists', TOUCH_FILE_DIR)\n\n    try:\n        f = open(TOUCH_FILE_PATH, 'w+')\n        f.close()\n        # utime force second parameter in python2.7\n        os.utime(TOUCH_FILE_PATH, None)\n    except (IOError, OSError):\n        log.warning(\"Unable to write to %s. Failed to signal SDK initialization.\" % TOUCH_FILE_PATH)\n\n    return LambdaContext()\n\n\nclass LambdaContext(Context):\n    \"\"\"\n    Lambda service will generate a segment for each function invocation which\n    cannot be mutated. The context doesn't keep any manually created segment\n    but instead every time ``get_trace_entity()`` gets called it refresh the\n    segment based on environment variables set by Lambda worker.\n    \"\"\"\n    def __init__(self):\n\n        self._local = threading.local()\n\n    def put_segment(self, segment):\n        \"\"\"\n        No-op.\n        \"\"\"\n        log.warning('Cannot create segments inside Lambda function. Discarded.')\n\n    def end_segment(self, end_time=None):\n        \"\"\"\n        No-op.\n        \"\"\"\n        log.warning('Cannot end segment inside Lambda function. Ignored.')\n\n    def put_subsegment(self, subsegment):\n        \"\"\"\n        Refresh the segment every time this function is invoked to prevent\n        a new subsegment from being attached to a leaked segment/subsegment.\n        \"\"\"\n        current_entity = self.get_trace_entity()\n\n        if not self._is_subsegment(current_entity) and (getattr(current_entity, 'initializing', None) or isinstance(current_entity, DummySegment)):\n            if global_sdk_config.sdk_enabled() and not os.getenv(LAMBDA_TRACE_HEADER_KEY):\n                log.warning(\"Subsegment %s discarded due to Lambda worker still initializing\" % subsegment.name)\n            return\n\n        current_entity.add_subsegment(subsegment)\n        self._local.entities.append(subsegment)\n\n    def set_trace_entity(self, trace_entity):\n        \"\"\"\n        For Lambda context, we additionally store the segment in the thread local.\n        \"\"\"\n        if self._is_subsegment(trace_entity):\n            segment = trace_entity.parent_segment\n        else:\n            segment = trace_entity\n\n        setattr(self._local, 'segment', segment)\n        setattr(self._local, 'entities', [trace_entity])\n\n    def get_trace_entity(self):\n        self._refresh_context()\n        if getattr(self._local, 'entities', None):\n            return self._local.entities[-1]\n        else:\n            return self._local.segment\n\n    def _refresh_context(self):\n        \"\"\"\n        Get current segment. To prevent resource leaking in Lambda worker,\n        every time there is segment present, we compare its trace id to current\n        environment variables. If it is different we create a new segment\n        and clean up subsegments stored.\n        \"\"\"\n        header_str = os.getenv(LAMBDA_TRACE_HEADER_KEY)\n        trace_header = TraceHeader.from_header_str(header_str)\n        if not global_sdk_config.sdk_enabled():\n            trace_header._sampled = False\n\n        segment = getattr(self._local, 'segment', None)\n\n        if segment:\n            # Ensure customers don't have leaked subsegments across invocations\n            if not trace_header.root or trace_header.root == segment.trace_id:\n                return\n            else:\n                self._initialize_context(trace_header)\n        else:\n            self._initialize_context(trace_header)\n\n    @property\n    def context_missing(self):\n        return None\n\n    @context_missing.setter\n    def context_missing(self, value):\n        pass\n\n    def handle_context_missing(self):\n        \"\"\"\n        No-op.\n        \"\"\"\n        pass\n\n    def _initialize_context(self, trace_header):\n        \"\"\"\n        Create a segment based on environment variables set by\n        AWS Lambda and initialize storage for subsegments.\n        \"\"\"\n        sampled = None\n        if not global_sdk_config.sdk_enabled():\n            # Force subsequent subsegments to be disabled and turned into DummySegments.\n            sampled = False\n        elif trace_header.sampled == 0:\n            sampled = False\n        elif trace_header.sampled == 1:\n            sampled = True\n\n        segment = None\n        if not trace_header.root or not trace_header.parent or trace_header.sampled is None:\n            segment = DummySegment()\n            log.debug(\"Creating NoOp/Dummy parent segment\")\n        else:\n            segment = FacadeSegment(\n                name='facade',\n                traceid=trace_header.root,\n                entityid=trace_header.parent,\n                sampled=sampled,\n            )\n        segment.save_origin_trace_header(trace_header)\n        setattr(self._local, 'segment', segment)\n        setattr(self._local, 'entities', [])\n"
  },
  {
    "path": "aws_xray_sdk/core/models/__init__.py",
    "content": ""
  },
  {
    "path": "aws_xray_sdk/core/models/default_dynamic_naming.py",
    "content": "from ..utils.search_pattern import wildcard_match\n\n\nclass DefaultDynamicNaming:\n    \"\"\"\n    Decides what name to use on a segment generated from an incoming request.\n    By default it takes the host name and compares it to a pre-defined pattern.\n    If the host name matches that pattern, it returns the host name, otherwise\n    it returns the fallback name. The host name usually comes from the incoming\n    request's headers.\n    \"\"\"\n    def __init__(self, pattern, fallback):\n        \"\"\"\n        :param str pattern: the regex-like pattern to be compared against.\n            Right now only ? and * are supported. An asterisk (*) represents\n            any combination of characters. A question mark (?) represents\n            any single character.\n        :param str fallback: the fallback name to be used if the candidate name\n            doesn't match the provided pattern.\n        \"\"\"\n        self._pattern = pattern\n        self._fallback = fallback\n\n    def get_name(self, host_name):\n        \"\"\"\n        Returns the segment name based on the input host name.\n        \"\"\"\n        if wildcard_match(self._pattern, host_name):\n            return host_name\n        else:\n            return self._fallback\n"
  },
  {
    "path": "aws_xray_sdk/core/models/dummy_entities.py",
    "content": "import os\nfrom .noop_traceid import NoOpTraceId\nfrom .traceid import TraceId\nfrom .segment import Segment\nfrom .subsegment import Subsegment\n\n\nclass DummySegment(Segment):\n    \"\"\"\n    A dummy segment is created when ``xray_recorder`` decide to not sample\n    the segment based on sampling rules.\n    Adding data to a dummy segment becomes a no-op except for\n    subsegments. This is to reduce the memory footprint of the SDK.\n    A dummy segment will not be sent to the X-Ray daemon. Manually creating\n    dummy segments is not recommended.\n    \"\"\"\n\n    def __init__(self, name='dummy'):\n        no_op_id = os.getenv('AWS_XRAY_NOOP_ID')\n        if no_op_id and no_op_id.lower() == 'false':\n            super().__init__(name=name, traceid=TraceId().to_id())\n        else:\n            super().__init__(name=name, traceid=NoOpTraceId().to_id(), entityid='0000000000000000')\n        self.sampled = False\n\n    def set_aws(self, aws_meta):\n        \"\"\"\n        No-op\n        \"\"\"\n        pass\n\n    def put_http_meta(self, key, value):\n        \"\"\"\n        No-op\n        \"\"\"\n        pass\n\n    def put_annotation(self, key, value):\n        \"\"\"\n        No-op\n        \"\"\"\n        pass\n\n    def put_metadata(self, key, value, namespace='default'):\n        \"\"\"\n        No-op\n        \"\"\"\n        pass\n\n    def set_user(self, user):\n        \"\"\"\n        No-op\n        \"\"\"\n        pass\n\n    def set_service(self, service_info):\n        \"\"\"\n        No-op\n        \"\"\"\n        pass\n\n    def apply_status_code(self, status_code):\n        \"\"\"\n        No-op\n        \"\"\"\n        pass\n\n    def add_exception(self, exception, stack, remote=False):\n        \"\"\"\n        No-op\n        \"\"\"\n        pass\n\n    def serialize(self):\n        \"\"\"\n        No-op\n        \"\"\"\n        pass\n\n\nclass DummySubsegment(Subsegment):\n    \"\"\"\n    A dummy subsegment will be created when ``xray_recorder`` tries\n    to create a subsegment under a not sampled segment. Adding data\n    to a dummy subsegment becomes no-op. Dummy subsegment will not\n    be sent to the X-Ray daemon.\n    \"\"\"\n\n    def __init__(self, segment, name='dummy'):\n        super().__init__(name, 'dummy', segment)\n        no_op_id = os.getenv('AWS_XRAY_NOOP_ID')\n        if no_op_id and no_op_id.lower() == 'false':\n            super(Subsegment, self).__init__(name)\n        else:\n            super(Subsegment, self).__init__(name, entity_id='0000000000000000')\n        self.sampled = False\n\n    def set_aws(self, aws_meta):\n        \"\"\"\n        No-op\n        \"\"\"\n        pass\n\n    def put_http_meta(self, key, value):\n        \"\"\"\n        No-op\n        \"\"\"\n        pass\n\n    def put_annotation(self, key, value):\n        \"\"\"\n        No-op\n        \"\"\"\n        pass\n\n    def put_metadata(self, key, value, namespace='default'):\n        \"\"\"\n        No-op\n        \"\"\"\n        pass\n\n    def set_sql(self, sql):\n        \"\"\"\n        No-op\n        \"\"\"\n        pass\n\n    def apply_status_code(self, status_code):\n        \"\"\"\n        No-op\n        \"\"\"\n        pass\n\n    def add_exception(self, exception, stack, remote=False):\n        \"\"\"\n        No-op\n        \"\"\"\n        pass\n\n    def serialize(self):\n        \"\"\"\n        No-op\n        \"\"\"\n        pass\n"
  },
  {
    "path": "aws_xray_sdk/core/models/entity.py",
    "content": "import logging\nimport os\nimport binascii\nimport time\nimport string\n\nimport json\n\nfrom ..utils.compat import annotation_value_types\nfrom ..utils.conversion import metadata_to_dict\nfrom .throwable import Throwable\nfrom . import http\nfrom ..exceptions.exceptions import AlreadyEndedException\n\nlog = logging.getLogger(__name__)\n\n# Valid characters can be found at http://docs.aws.amazon.com/xray/latest/devguide/xray-api-segmentdocuments.html\n_common_invalid_name_characters = '?;*()!$~^<>'\n_valid_annotation_key_characters = string.ascii_letters + string.digits + '_'\n\nORIGIN_TRACE_HEADER_ATTR_KEY = '_origin_trace_header'\n\n\nclass Entity:\n    \"\"\"\n    The parent class for segment/subsegment. It holds common properties\n    and methods on segment and subsegment.\n    \"\"\"\n\n    def __init__(self, name, entity_id=None):\n        if not entity_id:\n            self.id = self._generate_random_id()\n        else:\n            self.id = entity_id\n\n        # required attributes\n        self.name = name\n        self.name = ''.join([c for c in name if c not in _common_invalid_name_characters])\n        self.start_time = time.time()\n        self.parent_id = None\n\n        if self.name != name:\n            log.warning(\"Removing Segment/Subsugment Name invalid characters from {}.\".format(name))\n\n        # sampling\n        self.sampled = True\n\n        # state\n        self.in_progress = True\n\n        # meta fields\n        self.http = {}\n        self.annotations = {}\n        self.metadata = {}\n        self.aws = {}\n        self.cause = {}\n\n        # child subsegments\n        # list is thread-safe\n        self.subsegments = []\n\n    def close(self, end_time=None):\n        \"\"\"\n        Close the trace entity by setting `end_time`\n        and flip the in progress flag to False.\n\n        :param float end_time: Epoch in seconds. If not specified\n            current time will be used.\n        \"\"\"\n        self._check_ended()\n\n        if end_time:\n            self.end_time = end_time\n        else:\n            self.end_time = time.time()\n        self.in_progress = False\n\n    def add_subsegment(self, subsegment):\n        \"\"\"\n        Add input subsegment as a child subsegment.\n        \"\"\"\n        self._check_ended()\n        subsegment.parent_id = self.id\n\n        if not self.sampled and subsegment.sampled:\n            log.warning(\"This sampled subsegment is being added to an unsampled parent segment/subsegment and will be orphaned.\")\n\n        self.subsegments.append(subsegment)\n\n    def remove_subsegment(self, subsegment):\n        \"\"\"\n        Remove input subsegment from child subsegments.\n        \"\"\"\n        self.subsegments.remove(subsegment)\n\n    def put_http_meta(self, key, value):\n        \"\"\"\n        Add http related metadata.\n\n        :param str key: Currently supported keys are:\n            * url\n            * method\n            * user_agent\n            * client_ip\n            * status\n            * content_length\n        :param value: status and content_length are int and for other\n            supported keys string should be used.\n        \"\"\"\n        self._check_ended()\n\n        if value is None:\n            return\n\n        if key == http.STATUS:\n            if isinstance(value, str):\n                value = int(value)\n            self.apply_status_code(value)\n\n        if key in http.request_keys:\n            if 'request' not in self.http:\n                self.http['request'] = {}\n            self.http['request'][key] = value\n        elif key in http.response_keys:\n            if 'response' not in self.http:\n                self.http['response'] = {}\n            self.http['response'][key] = value\n        else:\n            log.warning(\"ignoring unsupported key %s in http meta.\", key)\n\n    def put_annotation(self, key, value):\n        \"\"\"\n        Annotate segment or subsegment with a key-value pair.\n        Annotations will be indexed for later search query.\n\n        :param str key: annotation key\n        :param object value: annotation value. Any type other than\n            string/number/bool will be dropped\n        \"\"\"\n        self._check_ended()\n\n        if not isinstance(key, str):\n            log.warning(\"ignoring non string type annotation key with type %s.\", type(key))\n            return\n\n        if not isinstance(value, annotation_value_types):\n            log.warning(\"ignoring unsupported annotation value type %s.\", type(value))\n            return\n\n        if any(character not in _valid_annotation_key_characters for character in key):\n            log.warning(\"ignoring annnotation with unsupported characters in key: '%s'.\", key)\n            return\n\n        self.annotations[key] = value\n\n    def put_metadata(self, key, value, namespace='default'):\n        \"\"\"\n        Add metadata to segment or subsegment. Metadata is not indexed\n        but can be later retrieved by BatchGetTraces API.\n\n        :param str namespace: optional. Default namespace is `default`.\n            It must be a string and prefix `AWS.` is reserved.\n        :param str key: metadata key under specified namespace\n        :param object value: any object that can be serialized into JSON string\n        \"\"\"\n        self._check_ended()\n\n        if not isinstance(namespace, str):\n            log.warning(\"ignoring non string type metadata namespace\")\n            return\n\n        if namespace.startswith('AWS.'):\n            log.warning(\"Prefix 'AWS.' is reserved, drop metadata with namespace %s\", namespace)\n            return\n\n        if self.metadata.get(namespace, None):\n            self.metadata[namespace][key] = value\n        else:\n            self.metadata[namespace] = {key: value}\n\n    def set_aws(self, aws_meta):\n        \"\"\"\n        set aws section of the entity.\n        This method is called by global recorder and botocore patcher\n        to provide additonal information about AWS runtime.\n        It is not recommended to manually set aws section.\n        \"\"\"\n        self._check_ended()\n        self.aws = aws_meta\n\n    def add_throttle_flag(self):\n        self.throttle = True\n\n    def add_fault_flag(self):\n        self.fault = True\n\n    def add_error_flag(self):\n        self.error = True\n\n    def apply_status_code(self, status_code):\n        \"\"\"\n        When a trace entity is generated under the http context,\n        the status code will affect this entity's fault/error/throttle flags.\n        Flip these flags based on status code.\n        \"\"\"\n        self._check_ended()\n        if not status_code:\n            return\n\n        if status_code >= 500:\n            self.add_fault_flag()\n        elif status_code == 429:\n            self.add_throttle_flag()\n            self.add_error_flag()\n        elif status_code >= 400:\n            self.add_error_flag()\n\n    def add_exception(self, exception, stack, remote=False):\n        \"\"\"\n        Add an exception to trace entities.\n\n        :param Exception exception: the caught exception.\n        :param list stack: the output from python built-in\n            `traceback.extract_stack()`.\n        :param bool remote: If False it means it's a client error\n            instead of a downstream service.\n        \"\"\"\n        self._check_ended()\n        self.add_fault_flag()\n\n        if hasattr(exception, '_recorded'):\n            setattr(self, 'cause', getattr(exception, '_cause_id'))\n            return\n\n        if not isinstance(self.cause, dict):\n            log.warning(\"The current cause object is not a dict but an id: {}. Resetting the cause and recording the \"\n                        \"current exception\".format(self.cause))\n            self.cause = {}\n\n        if 'exceptions' in self.cause:\n            exceptions = self.cause['exceptions']\n        else:\n            exceptions = []\n\n        exceptions.append(Throwable(exception, stack, remote))\n\n        self.cause['exceptions'] = exceptions\n        self.cause['working_directory'] = os.getcwd()\n\n    def save_origin_trace_header(self, trace_header):\n        \"\"\"\n        Temporarily store additional data fields in trace header\n        to the entity for later propagation. The data will be\n        cleaned up upon serialization.\n        \"\"\"\n        setattr(self, ORIGIN_TRACE_HEADER_ATTR_KEY, trace_header)\n\n    def get_origin_trace_header(self):\n        \"\"\"\n        Retrieve saved trace header data.\n        \"\"\"\n        return getattr(self, ORIGIN_TRACE_HEADER_ATTR_KEY, None)\n\n    def serialize(self):\n        \"\"\"\n        Serialize to JSON document that can be accepted by the\n        X-Ray backend service. It uses json to perform serialization.\n        \"\"\"\n        return json.dumps(self.to_dict(), default=str)\n\n    def to_dict(self):\n        \"\"\"\n        Convert Entity(Segment/Subsegment) object to dict\n        with required properties that have non-empty values.\n        \"\"\"\n        entity_dict = {}\n\n        for key, value in vars(self).items():\n            if isinstance(value, bool) or value:\n                if key == 'subsegments':\n                    # child subsegments are stored as List\n                    subsegments = []\n                    for subsegment in value:\n                        subsegments.append(subsegment.to_dict())\n                    entity_dict[key] = subsegments\n                elif key == 'cause':\n                    if isinstance(self.cause, dict):\n                        entity_dict[key] = {}\n                        entity_dict[key]['working_directory'] = self.cause['working_directory']\n                        # exceptions are stored as List\n                        throwables = []\n                        for throwable in value['exceptions']:\n                            throwables.append(throwable.to_dict())\n                        entity_dict[key]['exceptions'] = throwables\n                    else:\n                        entity_dict[key] = self.cause\n                elif key == 'metadata':\n                    entity_dict[key] = metadata_to_dict(value)\n                elif key != 'sampled' and key != ORIGIN_TRACE_HEADER_ATTR_KEY:\n                    entity_dict[key] = value\n\n        return entity_dict\n\n    def _check_ended(self):\n        if not self.in_progress:\n            raise AlreadyEndedException(\"Already ended segment and subsegment cannot be modified.\")\n\n    def _generate_random_id(self):\n        \"\"\"\n        Generate a random 16-digit hex str.\n        This is used for generating segment/subsegment id.\n        \"\"\"\n        return binascii.b2a_hex(os.urandom(8)).decode('utf-8')\n"
  },
  {
    "path": "aws_xray_sdk/core/models/facade_segment.py",
    "content": "from .segment import Segment\nfrom ..exceptions.exceptions import FacadeSegmentMutationException\n\n\nMUTATION_UNSUPPORTED_MESSAGE = 'FacadeSegments cannot be mutated.'\n\n\nclass FacadeSegment(Segment):\n    \"\"\"\n    This type of segment should only be used in an AWS Lambda environment.\n    It holds the same id, traceid and sampling decision as\n    the segment generated by Lambda service but its properties cannot\n    be mutated except for its subsegments. If this segment is created\n    before Lambda worker finishes initializatioin, all the child\n    subsegments will be discarded.\n    \"\"\"\n    def __init__(self, name, entityid, traceid, sampled):\n\n        self.initializing = self._is_initializing(\n            entityid=entityid,\n            traceid=traceid,\n            sampled=sampled,\n        )\n\n        super().__init__(\n            name=name,\n            entityid=entityid,\n            traceid=traceid,\n            sampled=sampled,\n        )\n\n    def close(self, end_time=None):\n        \"\"\"\n        Unsupported operation. Will raise an exception.\n        \"\"\"\n        raise FacadeSegmentMutationException(MUTATION_UNSUPPORTED_MESSAGE)\n\n    def put_http_meta(self, key, value):\n        \"\"\"\n        Unsupported operation. Will raise an exception.\n        \"\"\"\n        raise FacadeSegmentMutationException(MUTATION_UNSUPPORTED_MESSAGE)\n\n    def put_annotation(self, key, value):\n        \"\"\"\n        Unsupported operation. Will raise an exception.\n        \"\"\"\n        raise FacadeSegmentMutationException(MUTATION_UNSUPPORTED_MESSAGE)\n\n    def put_metadata(self, key, value, namespace='default'):\n        \"\"\"\n        Unsupported operation. Will raise an exception.\n        \"\"\"\n        raise FacadeSegmentMutationException(MUTATION_UNSUPPORTED_MESSAGE)\n\n    def set_aws(self, aws_meta):\n        \"\"\"\n        Unsupported operation. Will raise an exception.\n        \"\"\"\n        raise FacadeSegmentMutationException(MUTATION_UNSUPPORTED_MESSAGE)\n\n    def set_user(self, user):\n        \"\"\"\n        Unsupported operation. Will raise an exception.\n        \"\"\"\n        raise FacadeSegmentMutationException(MUTATION_UNSUPPORTED_MESSAGE)\n\n    def add_throttle_flag(self):\n        \"\"\"\n        Unsupported operation. Will raise an exception.\n        \"\"\"\n        raise FacadeSegmentMutationException(MUTATION_UNSUPPORTED_MESSAGE)\n\n    def add_fault_flag(self):\n        \"\"\"\n        Unsupported operation. Will raise an exception.\n        \"\"\"\n        raise FacadeSegmentMutationException(MUTATION_UNSUPPORTED_MESSAGE)\n\n    def add_error_flag(self):\n        \"\"\"\n        Unsupported operation. Will raise an exception.\n        \"\"\"\n        raise FacadeSegmentMutationException(MUTATION_UNSUPPORTED_MESSAGE)\n\n    def add_exception(self, exception, stack, remote=False):\n        \"\"\"\n        Unsupported operation. Will raise an exception.\n        \"\"\"\n        raise FacadeSegmentMutationException(MUTATION_UNSUPPORTED_MESSAGE)\n\n    def apply_status_code(self, status_code):\n        \"\"\"\n        Unsupported operation. Will raise an exception.\n        \"\"\"\n        raise FacadeSegmentMutationException(MUTATION_UNSUPPORTED_MESSAGE)\n\n    def serialize(self):\n        \"\"\"\n        Unsupported operation. Will raise an exception.\n        \"\"\"\n        raise FacadeSegmentMutationException(MUTATION_UNSUPPORTED_MESSAGE)\n\n    def ready_to_send(self):\n        \"\"\"\n        Facade segment should never be sent out. This always\n        return False.\n        \"\"\"\n        return False\n\n    def increment(self):\n        \"\"\"\n        Increment total subsegments counter by 1.\n        \"\"\"\n        self._subsegments_counter.increment()\n\n    def decrement_ref_counter(self):\n        \"\"\"\n        No-op\n        \"\"\"\n        pass\n\n    def _is_initializing(self, entityid, traceid, sampled):\n        return not entityid or not traceid or sampled is None\n"
  },
  {
    "path": "aws_xray_sdk/core/models/http.py",
    "content": "URL = \"url\"\nMETHOD = \"method\"\nUSER_AGENT = \"user_agent\"\nCLIENT_IP = \"client_ip\"\nX_FORWARDED_FOR = \"x_forwarded_for\"\n\nSTATUS = \"status\"\nCONTENT_LENGTH = \"content_length\"\n\nXRAY_HEADER = \"X-Amzn-Trace-Id\"\n# for proxy header re-write\nALT_XRAY_HEADER = \"HTTP_X_AMZN_TRACE_ID\"\n\nrequest_keys = (URL, METHOD, USER_AGENT, CLIENT_IP, X_FORWARDED_FOR)\nresponse_keys = (STATUS, CONTENT_LENGTH)\n"
  },
  {
    "path": "aws_xray_sdk/core/models/noop_traceid.py",
    "content": "class NoOpTraceId:\n    \"\"\"\n    A trace ID tracks the path of a request through your application.\n    A trace collects all the segments generated by a single request.\n    A trace ID is required for a segment.\n    \"\"\"\n    VERSION = '1'\n    DELIMITER = '-'\n\n    def __init__(self):\n        \"\"\"\n        Generate a no-op trace id.\n        \"\"\"\n        self.start_time = '00000000'\n        self.__number = '000000000000000000000000'\n\n    def to_id(self):\n        \"\"\"\n        Convert TraceId object to a string.\n        \"\"\"\n        return \"%s%s%s%s%s\" % (NoOpTraceId.VERSION, NoOpTraceId.DELIMITER,\n                               self.start_time,\n                               NoOpTraceId.DELIMITER, self.__number)\n"
  },
  {
    "path": "aws_xray_sdk/core/models/segment.py",
    "content": "import copy\nimport traceback\n\nfrom .entity import Entity\nfrom .traceid import TraceId\nfrom ..utils.atomic_counter import AtomicCounter\nfrom ..exceptions.exceptions import SegmentNameMissingException\n\nORIGIN_TRACE_HEADER_ATTR_KEY = '_origin_trace_header'\n\n\nclass SegmentContextManager:\n    \"\"\"\n    Wrapper for segment and recorder to provide segment context manager.\n    \"\"\"\n\n    def __init__(self, recorder, name=None, **segment_kwargs):\n        self.name = name\n        self.segment_kwargs = segment_kwargs\n        self.recorder = recorder\n        self.segment = None\n\n    def __enter__(self):\n        self.segment = self.recorder.begin_segment(\n            name=self.name, **self.segment_kwargs)\n        return self.segment\n\n    def __exit__(self, exc_type, exc_val, exc_tb):\n        if self.segment is None:\n            return\n\n        if exc_type is not None:\n            self.segment.add_exception(\n                exc_val,\n                traceback.extract_tb(\n                    exc_tb,\n                    limit=self.recorder.max_trace_back,\n                )\n            )\n        self.recorder.end_segment()\n\n\nclass Segment(Entity):\n    \"\"\"\n    The compute resources running your application logic send data\n    about their work as segments. A segment provides the resource's name,\n    details about the request, and details about the work done.\n    \"\"\"\n    def __init__(self, name, entityid=None, traceid=None,\n                 parent_id=None, sampled=True):\n        \"\"\"\n        Create a segment object.\n\n        :param str name: segment name. If not specified a\n            SegmentNameMissingException will be thrown.\n        :param str entityid: hexdigits segment id.\n        :param str traceid: The trace id of the segment.\n        :param str parent_id: The parent id of the segment. It comes\n            from id of an upstream segment or subsegment.\n        :param bool sampled: If False this segment will not be sent\n            to the X-Ray daemon.\n        \"\"\"\n        if not name:\n            raise SegmentNameMissingException(\"Segment name is required.\")\n\n        super().__init__(name)\n\n        if not traceid:\n            traceid = TraceId().to_id()\n        self.trace_id = traceid\n        if entityid:\n            self.id = entityid\n\n        self.in_progress = True\n        self.sampled = sampled\n        self.user = None\n        self.ref_counter = AtomicCounter()\n        self._subsegments_counter = AtomicCounter()\n\n        if parent_id:\n            self.parent_id = parent_id\n\n    def add_subsegment(self, subsegment):\n        \"\"\"\n        Add input subsegment as a child subsegment and increment\n        reference counter and total subsegments counter.\n        \"\"\"\n        super().add_subsegment(subsegment)\n        self.increment()\n\n    def increment(self):\n        \"\"\"\n        Increment reference counter to track on open subsegments\n        and total subsegments counter to track total size of subsegments\n        it currently hold.\n        \"\"\"\n        self.ref_counter.increment()\n        self._subsegments_counter.increment()\n\n    def decrement_ref_counter(self):\n        \"\"\"\n        Decrement reference counter by 1 when a subsegment is closed.\n        \"\"\"\n        self.ref_counter.decrement()\n\n    def ready_to_send(self):\n        \"\"\"\n        Return True if the segment doesn't have any open subsegments\n        and itself is not in progress.\n        \"\"\"\n        return self.ref_counter.get_current() <= 0 and not self.in_progress\n\n    def get_total_subsegments_size(self):\n        \"\"\"\n        Return the number of total subsegments regardless of open or closed.\n        \"\"\"\n        return self._subsegments_counter.get_current()\n\n    def decrement_subsegments_size(self):\n        \"\"\"\n        Decrement total subsegments by 1. This usually happens when\n        a subsegment is streamed out.\n        \"\"\"\n        return self._subsegments_counter.decrement()\n\n    def remove_subsegment(self, subsegment):\n        \"\"\"\n        Remove the reference of input subsegment.\n        \"\"\"\n        super().remove_subsegment(subsegment)\n        self.decrement_subsegments_size()\n\n    def set_user(self, user):\n        \"\"\"\n        set user of a segment. One segment can only have one user.\n        User is indexed and can be later queried.\n        \"\"\"\n        super()._check_ended()\n        self.user = user\n\n    def set_service(self, service_info):\n        \"\"\"\n        Add python runtime and version info.\n        This method should be only used by the recorder.\n        \"\"\"\n        self.service = service_info\n\n    def set_rule_name(self, rule_name):\n        \"\"\"\n        Add the matched centralized sampling rule name\n        if a segment is sampled because of that rule.\n        This method should be only used by the recorder.\n        \"\"\"\n        if not self.aws.get('xray', None):\n            self.aws['xray'] = {}\n        self.aws['xray']['sampling_rule_name'] = rule_name\n\n    def to_dict(self):   \n        \"\"\"\n        Convert Segment object to dict with required properties\n        that have non-empty values. \n        \"\"\" \n        segment_dict = super().to_dict()\n          \n        del segment_dict['ref_counter']\n        del segment_dict['_subsegments_counter']\n        \n        return segment_dict\n"
  },
  {
    "path": "aws_xray_sdk/core/models/subsegment.py",
    "content": "import copy\nimport traceback\n\nimport wrapt\n\nfrom .entity import Entity\nfrom ..exceptions.exceptions import SegmentNotFoundException\n\n\n# Attribute starts with _self_ to prevent wrapt proxying to underlying function\nSUBSEGMENT_RECORDING_ATTRIBUTE = '_self___SUBSEGMENT_RECORDING_ATTRIBUTE__'\n\n\ndef set_as_recording(decorated_func, wrapped):\n    # If the wrapped function has the attribute, then it has already been patched\n    setattr(decorated_func, SUBSEGMENT_RECORDING_ATTRIBUTE, hasattr(wrapped, SUBSEGMENT_RECORDING_ATTRIBUTE))\n\n\ndef is_already_recording(func):\n    # The function might have the attribute, but its value might still be false\n    # as it might be the first decorator\n    return getattr(func, SUBSEGMENT_RECORDING_ATTRIBUTE, False)\n\n\n@wrapt.decorator\ndef subsegment_decorator(wrapped, instance, args, kwargs):\n    decorated_func = wrapt.decorator(wrapped)(*args, **kwargs)\n    set_as_recording(decorated_func, wrapped)\n    return decorated_func\n\n\nclass SubsegmentContextManager:\n    \"\"\"\n    Wrapper for segment and recorder to provide segment context manager.\n    \"\"\"\n\n    def __init__(self, recorder, name=None, **subsegment_kwargs):\n        self.name = name\n        self.subsegment_kwargs = subsegment_kwargs\n        self.recorder = recorder\n        self.subsegment = None\n\n    @subsegment_decorator\n    def __call__(self, wrapped, instance, args, kwargs):\n        if is_already_recording(wrapped):\n            # The wrapped function is already decorated, the subsegment will be created later,\n            # just return the result\n            return wrapped(*args, **kwargs)\n\n        func_name = self.name\n        if not func_name:\n            func_name = wrapped.__name__\n\n        return self.recorder.record_subsegment(\n            wrapped, instance, args, kwargs,\n            name=func_name,\n            namespace='local',\n            meta_processor=None,\n        )\n\n    def __enter__(self):\n        self.subsegment = self.recorder.begin_subsegment(\n            name=self.name, **self.subsegment_kwargs)\n        return self.subsegment\n\n    def __exit__(self, exc_type, exc_val, exc_tb):\n        if self.subsegment is None:\n            return\n\n        if exc_type is not None:\n            self.subsegment.add_exception(\n                exc_val,\n                traceback.extract_tb(\n                    exc_tb,\n                    limit=self.recorder.max_trace_back,\n                )\n            )\n        self.recorder.end_subsegment()\n\n\nclass Subsegment(Entity):\n    \"\"\"\n    The work done in a single segment can be broke down into subsegments.\n    Subsegments provide more granular timing information and details about\n    downstream calls that your application made to fulfill the original request.\n    A subsegment can contain additional details about a call to an AWS service,\n    an external HTTP API, or an SQL database.\n    \"\"\"\n    def __init__(self, name, namespace, segment):\n        \"\"\"\n        Create a new subsegment.\n\n        :param str name: Subsegment name is required.\n        :param str namespace: The namespace of the subsegment. Currently\n            support `aws`, `remote` and `local`.\n        :param Segment segment: The parent segment\n        \"\"\"\n        super().__init__(name)\n\n        if not segment:\n            raise SegmentNotFoundException(\"A parent segment is required for creating subsegments.\")\n\n        self.parent_segment = segment\n        self.trace_id = segment.trace_id\n\n        self.type = 'subsegment'\n        self.namespace = namespace\n\n        self.sql = {}\n\n    def add_subsegment(self, subsegment):\n        \"\"\"\n        Add input subsegment as a child subsegment and increment\n        reference counter and total subsegments counter of the\n        parent segment.\n        \"\"\"\n        super().add_subsegment(subsegment)\n        self.parent_segment.increment()\n\n    def remove_subsegment(self, subsegment):\n        \"\"\"\n        Remove input subsegment from child subsegemnts and\n        decrement parent segment total subsegments count.\n\n        :param Subsegment: subsegment to remove.\n        \"\"\"\n        super().remove_subsegment(subsegment)\n        self.parent_segment.decrement_subsegments_size()\n\n    def close(self, end_time=None):\n        \"\"\"\n        Close the trace entity by setting `end_time`\n        and flip the in progress flag to False. Also decrement\n        parent segment's ref counter by 1.\n\n        :param float end_time: Epoch in seconds. If not specified\n            current time will be used.\n        \"\"\"\n        super().close(end_time)\n        self.parent_segment.decrement_ref_counter()\n\n    def set_sql(self, sql):\n        \"\"\"\n        Set sql related metadata. This function is used by patchers\n        for database connectors and is not recommended to\n        invoke manually.\n\n        :param dict sql: sql related metadata\n        \"\"\"\n        self.sql = sql\n\n    def to_dict(self): \n        \"\"\"\n        Convert Subsegment object to dict with required properties\n        that have non-empty values. \n        \"\"\"    \n        subsegment_dict = super().to_dict()\n        \n        del subsegment_dict['parent_segment']\n\n        return subsegment_dict\n"
  },
  {
    "path": "aws_xray_sdk/core/models/throwable.py",
    "content": "import copy\nimport os\nimport binascii\nimport logging\n\nlog = logging.getLogger(__name__)\n\n\nclass Throwable:\n    \"\"\"\n    An object recording exception infomation under trace entity\n    `cause` section. The information includes the stack trace,\n    working directory and message from the original exception.\n    \"\"\"\n    def __init__(self, exception, stack, remote=False):\n        \"\"\"\n        :param Exception exception: the catched exception.\n        :param list stack: the formatted stack trace gathered\n            through `traceback` module.\n        :param bool remote: If False it means it's a client error\n            instead of a downstream service.\n        \"\"\"\n        self.id = binascii.b2a_hex(os.urandom(8)).decode('utf-8')\n\n        try:\n            message = str(exception)\n            # in case there is an exception cannot be converted to str\n        except Exception:\n            message = None\n\n        # do not record non-string exception message\n        if isinstance(message, str):\n            self.message = message\n\n        self.type = type(exception).__name__\n        self.remote = remote\n\n        try:\n            self._normalize_stack_trace(stack)\n        except Exception:\n            self.stack = None\n            log.warning(\"can not parse stack trace string, ignore stack field.\")\n\n        if exception:\n            setattr(exception, '_recorded', True)\n            setattr(exception, '_cause_id', self.id)\n\t\t\t\n    def to_dict(self):  \n        \"\"\"\n        Convert Throwable object to dict with required properties that\n        have non-empty values. \n        \"\"\"  \n        throwable_dict = {}\n        \n        for key, value in vars(self).items():  \n            if isinstance(value, bool) or value:\n                throwable_dict[key] = value       \n        \n        return throwable_dict\n\n    def _normalize_stack_trace(self, stack):\n        if stack is None:\n            return None\n\n        self.stack = []\n\n        for entry in stack:\n            path = entry[0]\n            line = entry[1]\n            label = entry[2]\n            if 'aws_xray_sdk/' in path:\n                continue\n\n            normalized = {}\n            normalized['path'] = os.path.basename(path).replace('\\\"', ' ').strip()\n            normalized['line'] = line\n            normalized['label'] = label.strip()\n\n            self.stack.append(normalized)\n"
  },
  {
    "path": "aws_xray_sdk/core/models/trace_header.py",
    "content": "import logging\n\nlog = logging.getLogger(__name__)\n\nROOT = 'Root'\nPARENT = 'Parent'\nSAMPLE = 'Sampled'\nSELF = 'Self'\n\nHEADER_DELIMITER = \";\"\n\n\nclass TraceHeader:\n    \"\"\"\n    The sampling decision and trace ID are added to HTTP requests in\n    tracing headers named ``X-Amzn-Trace-Id``. The first X-Ray-integrated\n    service that the request hits adds a tracing header, which is read\n    by the X-Ray SDK and included in the response. Learn more about\n    `Tracing Header <http://docs.aws.amazon.com/xray/latest/devguide/xray-concepts.html#xray-concepts-tracingheader>`_.\n    \"\"\"\n    def __init__(self, root=None, parent=None, sampled=None, data=None):\n        \"\"\"\n        :param str root: trace id\n        :param str parent: parent id\n        :param int sampled: 0 means not sampled, 1 means sampled\n        :param dict data: arbitrary data fields\n        \"\"\"\n        self._root = root\n        self._parent = parent\n        self._sampled = None\n        self._data = data\n\n        if sampled is not None:\n            if sampled == '?':\n                self._sampled = sampled\n            if sampled is True or sampled == '1' or sampled == 1:\n                self._sampled = 1\n            if sampled is False or sampled == '0' or sampled == 0:\n                self._sampled = 0\n\n    @classmethod\n    def from_header_str(cls, header):\n        \"\"\"\n        Create a TraceHeader object from a tracing header string\n        extracted from a http request headers.\n        \"\"\"\n        if not header:\n            return cls()\n\n        try:\n            params = header.strip().split(HEADER_DELIMITER)\n            header_dict = {}\n            data = {}\n\n            for param in params:\n                entry = param.split('=')\n                key = entry[0]\n                if key in (ROOT, PARENT, SAMPLE):\n                    header_dict[key] = entry[1]\n                # Ignore any \"Self=\" trace ids injected from ALB.\n                elif key != SELF:\n                    data[key] = entry[1]\n\n            return cls(\n                root=header_dict.get(ROOT, None),\n                parent=header_dict.get(PARENT, None),\n                sampled=header_dict.get(SAMPLE, None),\n                data=data,\n            )\n\n        except Exception:\n            log.warning(\"malformed tracing header %s, ignore.\", header)\n            return cls()\n\n    def to_header_str(self):\n        \"\"\"\n        Convert to a tracing header string that can be injected to\n        outgoing http request headers.\n        \"\"\"\n        h_parts = []\n        if self.root:\n            h_parts.append(ROOT + '=' + self.root)\n        if self.parent:\n            h_parts.append(PARENT + '=' + self.parent)\n        if self.sampled is not None:\n            h_parts.append(SAMPLE + '=' + str(self.sampled))\n        if self.data:\n            for key in self.data:\n                h_parts.append(key + '=' + self.data[key])\n\n        return HEADER_DELIMITER.join(h_parts)\n\n    @property\n    def root(self):\n        \"\"\"\n        Return trace id of the header\n        \"\"\"\n        return self._root\n\n    @property\n    def parent(self):\n        \"\"\"\n        Return the parent segment id in the header\n        \"\"\"\n        return self._parent\n\n    @property\n    def sampled(self):\n        \"\"\"\n        Return the sampling decision in the header.\n        It's 0 or 1 or '?'.\n        \"\"\"\n        return self._sampled\n\n    @property\n    def data(self):\n        \"\"\"\n        Return the arbitrary fields in the trace header.\n        \"\"\"\n        return self._data\n"
  },
  {
    "path": "aws_xray_sdk/core/models/traceid.py",
    "content": "import os\nimport time\nimport binascii\n\n\nclass TraceId:\n    \"\"\"\n    A trace ID tracks the path of a request through your application.\n    A trace collects all the segments generated by a single request.\n    A trace ID is required for a segment.\n    \"\"\"\n    VERSION = '1'\n    DELIMITER = '-'\n\n    def __init__(self):\n        \"\"\"\n        Generate a random trace id.\n        \"\"\"\n        self.start_time = int(time.time())\n        self.__number = binascii.b2a_hex(os.urandom(12)).decode('utf-8')\n\n    def to_id(self):\n        \"\"\"\n        Convert TraceId object to a string.\n        \"\"\"\n        return \"%s%s%s%s%s\" % (TraceId.VERSION, TraceId.DELIMITER,\n                               format(self.start_time, 'x'),\n                               TraceId.DELIMITER, self.__number)\n"
  },
  {
    "path": "aws_xray_sdk/core/patcher.py",
    "content": "import importlib\nimport inspect\nimport logging\nimport os\nimport pkgutil\nimport re\nimport sys\nimport wrapt\n\nfrom aws_xray_sdk import global_sdk_config\nfrom .utils.compat import is_classmethod, is_instance_method\n\nlog = logging.getLogger(__name__)\n\nSUPPORTED_MODULES = (\n    'aiobotocore',\n    'botocore',\n    'pynamodb',\n    'requests',\n    'sqlite3',\n    'mysql',\n    'httplib',\n    'pymongo',\n    'pymysql',\n    'psycopg2',\n    'psycopg',\n    'pg8000',\n    'sqlalchemy_core',\n    'httpx',\n)\n\nNO_DOUBLE_PATCH = (\n    'aiobotocore',\n    'botocore',\n    'pynamodb',\n    'requests',\n    'sqlite3',\n    'mysql',\n    'pymongo',\n    'pymysql',\n    'psycopg2',\n    'psycopg',\n    'pg8000',\n    'sqlalchemy_core',\n    'httpx',\n)\n\n_PATCHED_MODULES = set()\n\n\ndef patch_all(double_patch=False):\n    \"\"\"\n    The X-Ray Python SDK supports patching aioboto3, aiobotocore, boto3, botocore, pynamodb, requests, \n    sqlite3, mysql, httplib, pymongo, pymysql, psycopg2, pg8000, sqlalchemy_core, httpx, and mysql-connector.\n\n    To patch all supported libraries::\n\n        from aws_xray_sdk.core import patch_all\n\n        patch_all()\n\n    :param bool double_patch: enable or disable patching of indirect dependencies.\n    \"\"\"\n    if double_patch:\n        patch(SUPPORTED_MODULES, raise_errors=False)\n    else:\n        patch(NO_DOUBLE_PATCH, raise_errors=False)\n\n\ndef _is_valid_import(module):\n    module = module.replace('.', '/')\n    realpath = os.path.realpath(module)\n    is_module = os.path.isdir(realpath) and (\n        os.path.isfile('{}/__init__.py'.format(module)) or os.path.isfile('{}/__init__.pyc'.format(module))\n    )\n    is_file = not is_module and (\n            os.path.isfile('{}.py'.format(module)) or os.path.isfile('{}.pyc'.format(module))\n    )\n    return is_module or is_file\n\n\ndef patch(modules_to_patch, raise_errors=True, ignore_module_patterns=None):\n    \"\"\"\n    To patch specific modules::\n\n        from aws_xray_sdk.core import patch\n\n        i_want_to_patch = ('botocore') # a tuple that contains the libs you want to patch\n        patch(i_want_to_patch)\n\n    :param tuple modules_to_patch: a tuple containing the list of libraries to be patched\n    \"\"\"\n    enabled = global_sdk_config.sdk_enabled()\n    if not enabled:\n        log.debug(\"Skipped patching modules %s because the SDK is currently disabled.\" % ', '.join(modules_to_patch))\n        return  # Disable module patching if the SDK is disabled.\n    modules = set()\n    for module_to_patch in modules_to_patch:\n        # boto3 depends on botocore and patching botocore is sufficient\n        if module_to_patch == 'boto3':\n            modules.add('botocore')\n        # aioboto3 depends on aiobotocore and patching aiobotocore is sufficient\n        elif module_to_patch == 'aioboto3':\n            modules.add('aiobotocore')\n        # pynamodb requires botocore to be patched as well\n        elif module_to_patch == 'pynamodb':\n            modules.add('botocore')\n            modules.add(module_to_patch)\n        else:\n            modules.add(module_to_patch)\n\n    unsupported_modules = set(module for module in modules if module not in SUPPORTED_MODULES)\n    native_modules = modules - unsupported_modules\n\n    external_modules = set(module for module in unsupported_modules if _is_valid_import(module))\n    unsupported_modules = unsupported_modules - external_modules\n\n    if unsupported_modules:\n        raise Exception('modules %s are currently not supported for patching'\n                        % ', '.join(unsupported_modules))\n\n    for m in native_modules:\n        _patch_module(m, raise_errors)\n\n    ignore_module_patterns = [re.compile(pattern) for pattern in ignore_module_patterns or []]\n    for m in external_modules:\n        _external_module_patch(m, ignore_module_patterns)\n\n\ndef _patch_module(module_to_patch, raise_errors=True):\n    try:\n        _patch(module_to_patch)\n    except Exception:\n        if raise_errors:\n            raise\n        log.debug('failed to patch module %s', module_to_patch)\n\n\ndef _patch(module_to_patch):\n\n    path = 'aws_xray_sdk.ext.%s' % module_to_patch\n\n    if module_to_patch in _PATCHED_MODULES:\n        log.debug('%s already patched', module_to_patch)\n        return\n\n    imported_module = importlib.import_module(path)\n    imported_module.patch()\n\n    _PATCHED_MODULES.add(module_to_patch)\n    log.info('successfully patched module %s', module_to_patch)\n\n\ndef _patch_func(parent, func_name, func, modifier=lambda x: x):\n    if func_name not in parent.__dict__:\n        # Ignore functions not directly defined in parent, i.e. exclude inherited ones\n        return\n\n    from aws_xray_sdk.core import xray_recorder\n\n    capture_name = func_name\n    if func_name.startswith('__') and func_name.endswith('__'):\n        capture_name = '{}.{}'.format(parent.__name__, capture_name)\n    setattr(parent, func_name, modifier(xray_recorder.capture(name=capture_name)(func)))\n\n\ndef _patch_class(module, cls):\n    for member_name, member in inspect.getmembers(cls, inspect.isclass):\n        if member.__module__ == module.__name__:\n            # Only patch classes of the module, ignore imports\n            _patch_class(module, member)\n\n    for member_name, member in inspect.getmembers(cls, inspect.ismethod):\n        if member.__module__ == module.__name__:\n            # Only patch methods of the class defined in the module, ignore other modules\n            if is_classmethod(member):\n                # classmethods are internally generated through descriptors. The classmethod\n                # decorator must be the last applied, so we cannot apply another one on top\n                log.warning('Cannot automatically patch classmethod %s.%s, '\n                            'please apply decorator manually', cls.__name__, member_name)\n            else:\n                _patch_func(cls, member_name, member)\n\n    for member_name, member in inspect.getmembers(cls, inspect.isfunction):\n        if member.__module__ == module.__name__:\n            # Only patch static methods of the class defined in the module, ignore other modules\n            if is_instance_method(cls, member_name, member):\n                _patch_func(cls, member_name, member)\n            else:\n                _patch_func(cls, member_name, member, modifier=staticmethod)\n\n\ndef _on_import(module):\n    for member_name, member in inspect.getmembers(module, inspect.isfunction):\n        if member.__module__ == module.__name__:\n            # Only patch functions of the module, ignore imports\n            _patch_func(module, member_name, member)\n\n    for member_name, member in inspect.getmembers(module, inspect.isclass):\n        if member.__module__ == module.__name__:\n            # Only patch classes of the module, ignore imports\n            _patch_class(module, member)\n\n\ndef _external_module_patch(module, ignore_module_patterns):\n    if module.startswith('.'):\n        raise Exception('relative packages not supported for patching: {}'.format(module))\n\n    if module in _PATCHED_MODULES:\n        log.debug('%s already patched', module)\n    elif any(pattern.match(module) for pattern in ignore_module_patterns):\n        log.debug('%s ignored due to rules: %s', module, ignore_module_patterns)\n    else:\n        if module in sys.modules:\n            _on_import(sys.modules[module])\n        else:\n            wrapt.importer.when_imported(module)(_on_import)\n\n    for loader, submodule_name, is_module in pkgutil.iter_modules([module.replace('.', '/')]):\n        submodule = '.'.join([module, submodule_name])\n        if is_module:\n            _external_module_patch(submodule, ignore_module_patterns)\n        else:\n            if submodule in _PATCHED_MODULES:\n                log.debug('%s already patched', submodule)\n                continue\n            elif any(pattern.match(submodule) for pattern in ignore_module_patterns):\n                log.debug('%s ignored due to rules: %s', submodule, ignore_module_patterns)\n                continue\n\n            if submodule in sys.modules:\n                _on_import(sys.modules[submodule])\n            else:\n                wrapt.importer.when_imported(submodule)(_on_import)\n\n            _PATCHED_MODULES.add(submodule)\n            log.info('successfully patched module %s', submodule)\n\n    if module not in _PATCHED_MODULES:\n        _PATCHED_MODULES.add(module)\n        log.info('successfully patched module %s', module)\n"
  },
  {
    "path": "aws_xray_sdk/core/plugins/__init__.py",
    "content": ""
  },
  {
    "path": "aws_xray_sdk/core/plugins/ec2_plugin.py",
    "content": "import json\nimport logging\nfrom urllib.request import Request, urlopen\n\nlog = logging.getLogger(__name__)\n\nSERVICE_NAME = 'ec2'\nORIGIN = 'AWS::EC2::Instance'\nIMDS_URL = 'http://169.254.169.254/latest/'\n\n\ndef initialize():\n    \"\"\"\n    Try to get EC2 instance-id and AZ if running on EC2\n    by querying http://169.254.169.254/latest/meta-data/.\n    If not continue.\n    \"\"\"\n    global runtime_context\n\n    # get session token with 60 seconds TTL to not have the token lying around for a long time\n    token = get_token()\n\n    # get instance metadata\n    runtime_context = get_metadata(token)\n\n\ndef get_token():\n    \"\"\"\n    Get the session token for IMDSv2 endpoint valid for 60 seconds\n    by specifying the X-aws-ec2-metadata-token-ttl-seconds header.\n    \"\"\"\n    token = None\n    try:\n        headers = {\"X-aws-ec2-metadata-token-ttl-seconds\": \"60\"}\n        token = do_request(url=IMDS_URL + \"api/token\",\n                           headers=headers,\n                           method=\"PUT\")\n    except Exception:\n        log.warning(\"Failed to get token for IMDSv2\")\n    return token\n\n\ndef get_metadata(token=None):\n    try:\n        header = None\n        if token:\n            header = {\"X-aws-ec2-metadata-token\": token}\n\n        metadata_json = do_request(url=IMDS_URL + \"dynamic/instance-identity/document\",\n                                   headers=header,\n                                   method=\"GET\")\n\n        return parse_metadata_json(metadata_json)\n    except Exception:\n        log.warning(\"Failed to get EC2 metadata\")\n        return {}\n\n\ndef parse_metadata_json(json_str):\n    data = json.loads(json_str)\n    dict = {\n        'instance_id': data['instanceId'],\n        'availability_zone': data['availabilityZone'],\n        'instance_type': data['instanceType'],\n        'ami_id': data['imageId']\n    }\n\n    return dict\n\n\ndef do_request(url, headers=None, method=\"GET\"):\n    if headers is None:\n        headers = {}\n\n    if url is None:\n        return None\n\n    req = Request(url=url)\n    req.headers = headers\n    req.method = method\n    res = urlopen(req, timeout=1)\n    return res.read().decode('utf-8')\n"
  },
  {
    "path": "aws_xray_sdk/core/plugins/ecs_plugin.py",
    "content": "import socket\nimport logging\n\nlog = logging.getLogger(__name__)\n\nSERVICE_NAME = 'ecs'\nORIGIN = 'AWS::ECS::Container'\n\n\ndef initialize():\n    global runtime_context\n    try:\n        runtime_context = {}\n        host_name = socket.gethostname()\n        if host_name:\n            runtime_context['container'] = host_name\n\n    except Exception:\n        runtime_context = None\n        log.warning(\"failed to get ecs container metadata\")\n"
  },
  {
    "path": "aws_xray_sdk/core/plugins/elasticbeanstalk_plugin.py",
    "content": "import logging\nimport json\n\nlog = logging.getLogger(__name__)\n\nCONF_PATH = '/var/elasticbeanstalk/xray/environment.conf'\nSERVICE_NAME = 'elastic_beanstalk'\nORIGIN = 'AWS::ElasticBeanstalk::Environment'\n\n\ndef initialize():\n    global runtime_context\n    try:\n        with open(CONF_PATH) as f:\n            runtime_context = json.load(f)\n    except Exception:\n        runtime_context = None\n        log.warning(\"failed to load Elastic Beanstalk environment config file\")\n"
  },
  {
    "path": "aws_xray_sdk/core/plugins/utils.py",
    "content": "import importlib\nfrom ..exceptions.exceptions import MissingPluginNames\n\nmodule_prefix = 'aws_xray_sdk.core.plugins.'\n\nPLUGIN_MAPPING = {\n    'elasticbeanstalkplugin': 'elasticbeanstalk_plugin',\n    'ec2plugin': 'ec2_plugin',\n    'ecsplugin': 'ecs_plugin'\n}\n\n\ndef get_plugin_modules(plugins):\n    \"\"\"\n    Get plugin modules from input strings\n    :param tuple plugins: a tuple of plugin names in str\n    \"\"\"\n    if not plugins:\n        raise MissingPluginNames(\"input plugin names are required\")\n\n    modules = []\n\n    for plugin in plugins:\n        short_name = PLUGIN_MAPPING.get(plugin.lower(), plugin.lower())\n        full_path = '%s%s' % (module_prefix, short_name)\n        modules.append(importlib.import_module(full_path))\n\n    return tuple(modules)\n"
  },
  {
    "path": "aws_xray_sdk/core/recorder.py",
    "content": "import copy\nimport json\nimport logging\nimport os\nimport platform\nimport time\n\nfrom aws_xray_sdk import global_sdk_config\nfrom aws_xray_sdk.version import VERSION\nfrom .models.segment import Segment, SegmentContextManager\nfrom .models.subsegment import Subsegment, SubsegmentContextManager\nfrom .models.default_dynamic_naming import DefaultDynamicNaming\nfrom .models.dummy_entities import DummySegment, DummySubsegment\nfrom .emitters.udp_emitter import UDPEmitter\nfrom .streaming.default_streaming import DefaultStreaming\nfrom .context import Context\nfrom .daemon_config import DaemonConfig\nfrom .plugins.utils import get_plugin_modules\nfrom .lambda_launcher import check_in_lambda\nfrom .exceptions.exceptions import SegmentNameMissingException, SegmentNotFoundException\nfrom .utils import stacktrace\n\nlog = logging.getLogger(__name__)\n\nTRACING_NAME_KEY = 'AWS_XRAY_TRACING_NAME'\nDAEMON_ADDR_KEY = 'AWS_XRAY_DAEMON_ADDRESS'\nCONTEXT_MISSING_KEY = 'AWS_XRAY_CONTEXT_MISSING'\n\nXRAY_META = {\n    'xray': {\n        'sdk': 'X-Ray for Python',\n        'sdk_version': VERSION\n    }\n}\n\nSERVICE_INFO = {\n    'runtime': platform.python_implementation(),\n    'runtime_version': platform.python_version()\n}\n\n\nclass AWSXRayRecorder:\n    \"\"\"\n    A global AWS X-Ray recorder that will begin/end segments/subsegments\n    and send them to the X-Ray daemon. This recorder is initialized during\n    loading time so you can use::\n\n        from aws_xray_sdk.core import xray_recorder\n\n    in your module to access it\n    \"\"\"\n    def __init__(self):\n\n        self._streaming = DefaultStreaming()\n        context = check_in_lambda()\n        if context:\n            # Special handling when running on AWS Lambda.\n            from .sampling.local.sampler import LocalSampler\n            self._context = context\n            self.streaming_threshold = 0\n            self._sampler = LocalSampler()\n        else:\n            from .sampling.sampler import DefaultSampler\n            self._context = Context()\n            self._sampler = DefaultSampler()\n\n        self._emitter = UDPEmitter()\n        self._sampling = True\n        self._max_trace_back = 10\n        self._plugins = None\n        self._service = os.getenv(TRACING_NAME_KEY)\n        self._dynamic_naming = None\n        self._aws_metadata = copy.deepcopy(XRAY_META)\n        self._origin = None\n        self._stream_sql = True\n\n        if type(self.sampler).__name__ == 'DefaultSampler':\n            self.sampler.load_settings(DaemonConfig(), self.context)\n\n    def configure(self, sampling=None, plugins=None,\n                  context_missing=None, sampling_rules=None,\n                  daemon_address=None, service=None,\n                  context=None, emitter=None, streaming=None,\n                  dynamic_naming=None, streaming_threshold=None,\n                  max_trace_back=None, sampler=None,\n                  stream_sql=True):\n        \"\"\"Configure global X-Ray recorder.\n\n        Configure needs to run before patching thrid party libraries\n        to avoid creating dangling subsegment.\n\n        :param bool sampling: If sampling is enabled, every time the recorder\n            creates a segment it decides whether to send this segment to\n            the X-Ray daemon. This setting is not used if the recorder\n            is running in AWS Lambda. The recorder always respect the incoming\n            sampling decisions regardless of this setting.\n        :param sampling_rules: Pass a set of local custom sampling rules.\n            Can be an absolute path of the sampling rule config json file\n            or a dictionary that defines those rules. This will also be the\n            fallback rules in case of centralized sampling opted-in while\n            the cetralized sampling rules are not available.\n        :param sampler: The sampler used to make sampling decisions. The SDK\n            provides two built-in samplers. One is centralized rules based and\n            the other is local rules based. The former is the default.\n        :param tuple plugins: plugins that add extra metadata to each segment.\n            Currently available plugins are EC2Plugin, ECS plugin and\n            ElasticBeanstalkPlugin.\n            If you want to disable all previously enabled plugins,\n            pass an empty tuple ``()``.\n        :param str context_missing: recorder behavior when it tries to mutate\n            a segment or add a subsegment but there is no active segment.\n            RUNTIME_ERROR means the recorder will raise an exception.\n            LOG_ERROR means the recorder will only log the error and\n            do nothing.\n            IGNORE_ERROR means the recorder will do nothing\n        :param str daemon_address: The X-Ray daemon address where the recorder\n            sends data to.\n        :param str service: default segment name if creating a segment without\n            providing a name.\n        :param context: You can pass your own implementation of context storage\n            for active segment/subsegment by overriding the default\n            ``Context`` class.\n        :param emitter: The emitter that sends a segment/subsegment to\n            the X-Ray daemon. You can override ``UDPEmitter`` class.\n        :param dynamic_naming: a string that defines a pattern that host names\n            should match. Alternatively you can pass a module which\n            overrides ``DefaultDynamicNaming`` module.\n        :param streaming: The streaming module to stream out trace documents\n            when they grow too large. You can override ``DefaultStreaming``\n            class to have your own implementation of the streaming process.\n        :param streaming_threshold: If breaks within a single segment it will\n            start streaming out children subsegments. By default it is the\n            maximum number of subsegments within a segment.\n        :param int max_trace_back: The maxinum number of stack traces recorded\n            by auto-capture. Lower this if a single document becomes too large.\n        :param bool stream_sql: Whether SQL query texts should be streamed.\n\n        Environment variables AWS_XRAY_DAEMON_ADDRESS, AWS_XRAY_CONTEXT_MISSING\n        and AWS_XRAY_TRACING_NAME respectively overrides arguments\n        daemon_address, context_missing and service.\n        \"\"\"\n\n        if sampling is not None:\n            self.sampling = sampling\n        if sampler:\n            self.sampler = sampler\n        if service:\n            self.service = os.getenv(TRACING_NAME_KEY, service)\n        if sampling_rules:\n            self._load_sampling_rules(sampling_rules)\n        if emitter:\n            self.emitter = emitter\n        if daemon_address:\n            self.emitter.set_daemon_address(os.getenv(DAEMON_ADDR_KEY, daemon_address))\n        if context:\n            self.context = context\n        if context_missing:\n            self.context.context_missing = os.getenv(CONTEXT_MISSING_KEY, context_missing)\n        if dynamic_naming:\n            self.dynamic_naming = dynamic_naming\n        if streaming:\n            self.streaming = streaming\n        if streaming_threshold is not None:\n            self.streaming_threshold = streaming_threshold\n        if type(max_trace_back) == int and max_trace_back >= 0:\n            self.max_trace_back = max_trace_back\n        if stream_sql is not None:\n            self.stream_sql = stream_sql\n\n        if plugins:\n            plugin_modules = get_plugin_modules(plugins)\n            for plugin in plugin_modules:\n                plugin.initialize()\n                if plugin.runtime_context:\n                    self._aws_metadata[plugin.SERVICE_NAME] = plugin.runtime_context\n                    self._origin = plugin.ORIGIN\n        # handling explicitly using empty list to clean up plugins.\n        elif plugins is not None:\n            self._aws_metadata = copy.deepcopy(XRAY_META)\n            self._origin = None\n\n        if type(self.sampler).__name__ == 'DefaultSampler':\n            self.sampler.load_settings(DaemonConfig(daemon_address),\n                                       self.context, self._origin)\n\n    def in_segment(self, name=None, **segment_kwargs):\n        \"\"\"\n        Return a segment context manager.\n\n        :param str name: the name of the segment\n        :param dict segment_kwargs: remaining arguments passed directly to `begin_segment`\n        \"\"\"\n        return SegmentContextManager(self, name=name, **segment_kwargs)\n\n    def in_subsegment(self, name=None, **subsegment_kwargs):\n        \"\"\"\n        Return a subsegment context manager.\n\n        :param str name: the name of the subsegment\n        :param dict subsegment_kwargs: remaining arguments passed directly to `begin_subsegment`\n        \"\"\"\n        return SubsegmentContextManager(self, name=name, **subsegment_kwargs)\n\n    def begin_segment(self, name=None, traceid=None,\n                      parent_id=None, sampling=None):\n        \"\"\"\n        Begin a segment on the current thread and return it. The recorder\n        only keeps one segment at a time. Create the second one without\n        closing existing one will overwrite it.\n\n        :param str name: the name of the segment\n        :param str traceid: trace id of the segment\n        :param int sampling: 0 means not sampled, 1 means sampled\n        \"\"\"\n        # Disable the recorder; return a generated dummy segment.\n        if not global_sdk_config.sdk_enabled():\n            return DummySegment(global_sdk_config.DISABLED_ENTITY_NAME)\n\n        seg_name = name or self.service\n        if not seg_name:\n            raise SegmentNameMissingException(\"Segment name is required.\")\n\n        # Sampling decision is None if not sampled.\n        # In a sampled case it could be either a string or 1\n        # depending on if centralized or local sampling rule takes effect.\n        decision = True\n\n        # we respect the input sampling decision\n        # regardless of recorder configuration.\n        if sampling == 0:\n            decision = False\n        elif sampling:\n            decision = sampling\n        elif self.sampling:\n            decision = self._sampler.should_trace({'service': seg_name})\n\n        if not decision:\n            segment = DummySegment(seg_name)\n        else:\n            segment = Segment(name=seg_name, traceid=traceid,\n                              parent_id=parent_id)\n            self._populate_runtime_context(segment, decision)\n\n        self.context.put_segment(segment)\n        return segment\n\n    def end_segment(self, end_time=None):\n        \"\"\"\n        End the current segment and send it to X-Ray daemon\n        if it is ready to send. Ready means segment and\n        all its subsegments are closed.\n\n        :param float end_time: segment completion in unix epoch in seconds.\n        \"\"\"\n        # When the SDK is disabled we return\n        if not global_sdk_config.sdk_enabled():\n            return\n\n        self.context.end_segment(end_time)\n        segment = self.current_segment()\n        if segment and segment.ready_to_send():\n            self._send_segment()\n\n    def current_segment(self):\n        \"\"\"\n        Return the currently active segment. In a multithreading environment,\n        this will make sure the segment returned is the one created by the\n        same thread.\n        \"\"\"\n\n        entity = self.get_trace_entity()\n        if self._is_subsegment(entity):\n            return entity.parent_segment\n        else:\n            return entity\n\n    def _begin_subsegment_helper(self, name, namespace='local', beginWithoutSampling=False):\n        '''\n        Helper method to begin_subsegment and begin_subsegment_without_sampling\n        '''\n        # Generating the parent dummy segment is necessary.\n        # We don't need to store anything in context. Assumption here\n        # is that we only work with recorder-level APIs.\n        if not global_sdk_config.sdk_enabled():\n            return DummySubsegment(DummySegment(global_sdk_config.DISABLED_ENTITY_NAME))\n\n        segment = self.current_segment()\n        if not segment:\n            log.warning(\"No segment found, cannot begin subsegment %s.\" % name)\n            return None\n\n        current_entity = self.get_trace_entity()\n        if not current_entity.sampled or beginWithoutSampling:\n            subsegment = DummySubsegment(segment, name)\n        else:\n            subsegment = Subsegment(name, namespace, segment)\n\n        self.context.put_subsegment(subsegment)\n        return subsegment\n\n\n\n    def begin_subsegment(self, name, namespace='local'):\n        \"\"\"\n        Begin a new subsegment.\n        If there is open subsegment, the newly created subsegment will be the\n        child of latest opened subsegment.\n        If not, it will be the child of the current open segment.\n\n        :param str name: the name of the subsegment.\n        :param str namespace: currently can only be 'local', 'remote', 'aws'.\n        \"\"\"\n        return self._begin_subsegment_helper(name, namespace)\n\n\n    def begin_subsegment_without_sampling(self, name):\n        \"\"\"\n        Begin a new unsampled subsegment.\n        If there is open subsegment, the newly created subsegment will be the\n        child of latest opened subsegment.\n        If not, it will be the child of the current open segment.\n\n        :param str name: the name of the subsegment.\n        \"\"\"\n        return self._begin_subsegment_helper(name, beginWithoutSampling=True)\n\n    def current_subsegment(self):\n        \"\"\"\n        Return the latest opened subsegment. In a multithreading environment,\n        this will make sure the subsegment returned is one created\n        by the same thread.\n        \"\"\"\n        if not global_sdk_config.sdk_enabled():\n            return DummySubsegment(DummySegment(global_sdk_config.DISABLED_ENTITY_NAME))\n\n        entity = self.get_trace_entity()\n        if self._is_subsegment(entity):\n            return entity\n        else:\n            return None\n\n    def end_subsegment(self, end_time=None):\n        \"\"\"\n        End the current active subsegment. If this is the last one open\n        under its parent segment, the entire segment will be sent.\n\n        :param float end_time: subsegment compeletion in unix epoch in seconds.\n        \"\"\"\n        if not global_sdk_config.sdk_enabled():\n            return\n\n        if not self.context.end_subsegment(end_time):\n            return\n\n        # if segment is already close, we check if we can send entire segment\n        # otherwise we check if we need to stream some subsegments\n        if self.current_segment().ready_to_send():\n            self._send_segment()\n        else:\n            self.stream_subsegments()\n\n    def put_annotation(self, key, value):\n        \"\"\"\n        Annotate current active trace entity with a key-value pair.\n        Annotations will be indexed for later search query.\n\n        :param str key: annotation key\n        :param object value: annotation value. Any type other than\n            string/number/bool will be dropped\n        \"\"\"\n        if not global_sdk_config.sdk_enabled():\n            return\n        entity = self.get_trace_entity()\n        if entity and entity.sampled:\n            entity.put_annotation(key, value)\n\n    def put_metadata(self, key, value, namespace='default'):\n        \"\"\"\n        Add metadata to the current active trace entity.\n        Metadata is not indexed but can be later retrieved\n        by BatchGetTraces API.\n\n        :param str namespace: optional. Default namespace is `default`.\n            It must be a string and prefix `AWS.` is reserved.\n        :param str key: metadata key under specified namespace\n        :param object value: any object that can be serialized into JSON string\n        \"\"\"\n        if not global_sdk_config.sdk_enabled():\n            return\n        entity = self.get_trace_entity()\n        if entity and entity.sampled:\n            entity.put_metadata(key, value, namespace)\n\n    def is_sampled(self):\n        \"\"\"\n        Check if the current trace entity is sampled or not.\n        Return `False` if no active entity found.\n        \"\"\"\n        if not global_sdk_config.sdk_enabled():\n            # Disabled SDK is never sampled\n            return False\n        entity = self.get_trace_entity()\n        if entity:\n            return entity.sampled\n        return False\n\n    def get_trace_entity(self):\n        \"\"\"\n        A pass through method to ``context.get_trace_entity()``.\n        \"\"\"\n        return self.context.get_trace_entity()\n\n    def set_trace_entity(self, trace_entity):\n        \"\"\"\n        A pass through method to ``context.set_trace_entity()``.\n        \"\"\"\n        self.context.set_trace_entity(trace_entity)\n\n    def clear_trace_entities(self):\n        \"\"\"\n        A pass through method to ``context.clear_trace_entities()``.\n        \"\"\"\n        self.context.clear_trace_entities()\n\n    def stream_subsegments(self):\n        \"\"\"\n        Stream all closed subsegments to the daemon\n        and remove reference to the parent segment.\n        No-op for a not sampled segment.\n        \"\"\"\n        segment = self.current_segment()\n\n        if self.streaming.is_eligible(segment):\n            self.streaming.stream(segment, self._stream_subsegment_out)\n\n    def capture(self, name=None):\n        \"\"\"\n        A decorator that records enclosed function in a subsegment.\n        It only works with synchronous functions.\n\n        params str name: The name of the subsegment. If not specified\n        the function name will be used.\n        \"\"\"\n        return self.in_subsegment(name=name)\n\n    def record_subsegment(self, wrapped, instance, args, kwargs, name,\n                          namespace, meta_processor):\n\n        subsegment = self.begin_subsegment(name, namespace)\n\n        exception = None\n        stack = None\n        return_value = None\n\n        try:\n            return_value = wrapped(*args, **kwargs)\n            return return_value\n        except Exception as e:\n            exception = e\n            stack = stacktrace.get_stacktrace(limit=self.max_trace_back)\n            raise\n        finally:\n            # No-op if subsegment is `None` due to `LOG_ERROR`.\n            if subsegment is not None:\n                end_time = time.time()\n                if callable(meta_processor):\n                    meta_processor(\n                        wrapped=wrapped,\n                        instance=instance,\n                        args=args,\n                        kwargs=kwargs,\n                        return_value=return_value,\n                        exception=exception,\n                        subsegment=subsegment,\n                        stack=stack,\n                    )\n                elif exception:\n                    subsegment.add_exception(exception, stack)\n\n                self.end_subsegment(end_time)\n\n    def _populate_runtime_context(self, segment, sampling_decision):\n        if self._origin:\n            setattr(segment, 'origin', self._origin)\n\n        segment.set_aws(copy.deepcopy(self._aws_metadata))\n        segment.set_service(SERVICE_INFO)\n\n        if isinstance(sampling_decision, str):\n            segment.set_rule_name(sampling_decision)\n\n    def _send_segment(self):\n        \"\"\"\n        Send the current segment to X-Ray daemon if it is present and\n        sampled, then clean up context storage.\n        The emitter will handle failures.\n        \"\"\"\n        segment = self.current_segment()\n\n        if not segment:\n            return\n\n        if segment.sampled:\n            self.emitter.send_entity(segment)\n        self.clear_trace_entities()\n\n    def _stream_subsegment_out(self, subsegment):\n        log.debug(\"streaming subsegments...\")\n        if subsegment.sampled:\n            self.emitter.send_entity(subsegment)\n\n    def _load_sampling_rules(self, sampling_rules):\n\n        if not sampling_rules:\n            return\n\n        if isinstance(sampling_rules, dict):\n            self.sampler.load_local_rules(sampling_rules)\n        else:\n            with open(sampling_rules) as f:\n                self.sampler.load_local_rules(json.load(f))\n\n    def _is_subsegment(self, entity):\n\n        return (hasattr(entity, 'type') and entity.type == 'subsegment')\n\n    @property\n    def enabled(self):\n        return self._enabled\n\n    @enabled.setter\n    def enabled(self, value):\n        self._enabled = value\n\n    @property\n    def sampling(self):\n        return self._sampling\n\n    @sampling.setter\n    def sampling(self, value):\n        self._sampling = value\n\n    @property\n    def sampler(self):\n        return self._sampler\n\n    @sampler.setter\n    def sampler(self, value):\n        self._sampler = value\n\n    @property\n    def service(self):\n        return self._service\n\n    @service.setter\n    def service(self, value):\n        self._service = value\n\n    @property\n    def dynamic_naming(self):\n        return self._dynamic_naming\n\n    @dynamic_naming.setter\n    def dynamic_naming(self, value):\n        if isinstance(value, str):\n            self._dynamic_naming = DefaultDynamicNaming(value, self.service)\n        else:\n            self._dynamic_naming = value\n\n    @property\n    def context(self):\n        return self._context\n\n    @context.setter\n    def context(self, cxt):\n        self._context = cxt\n\n    @property\n    def emitter(self):\n        return self._emitter\n\n    @emitter.setter\n    def emitter(self, value):\n        self._emitter = value\n\n    @property\n    def streaming(self):\n        return self._streaming\n\n    @streaming.setter\n    def streaming(self, value):\n        self._streaming = value\n\n    @property\n    def streaming_threshold(self):\n        \"\"\"\n        Proxy method to Streaming module's `streaming_threshold` property.\n        \"\"\"\n        return self.streaming.streaming_threshold\n\n    @streaming_threshold.setter\n    def streaming_threshold(self, value):\n        \"\"\"\n        Proxy method to Streaming module's `streaming_threshold` property.\n        \"\"\"\n        self.streaming.streaming_threshold = value\n\n    @property\n    def max_trace_back(self):\n        return self._max_trace_back\n\n    @max_trace_back.setter\n    def max_trace_back(self, value):\n        self._max_trace_back = value\n\n    @property\n    def stream_sql(self):\n        return self._stream_sql\n\n    @stream_sql.setter\n    def stream_sql(self, value):\n        self._stream_sql = value\n"
  },
  {
    "path": "aws_xray_sdk/core/sampling/__init__.py",
    "content": ""
  },
  {
    "path": "aws_xray_sdk/core/sampling/connector.py",
    "content": "import binascii\nimport os\nimport time\nfrom datetime import datetime\n\nimport botocore.session\nfrom botocore import UNSIGNED\nfrom botocore.client import Config\n\nfrom .sampling_rule import SamplingRule\nfrom aws_xray_sdk.core.models.dummy_entities import DummySegment\nfrom aws_xray_sdk.core.context import Context\n\n\nclass ServiceConnector:\n    \"\"\"\n    Connector class that translates Centralized Sampling poller functions to\n    actual X-Ray back-end APIs and communicates with X-Ray daemon as the\n    signing proxy.\n    \"\"\"\n    def __init__(self):\n        self._xray_client = self._create_xray_client()\n        self._client_id = binascii.b2a_hex(os.urandom(12)).decode('utf-8')\n        self._context = Context()\n\n    def _context_wrapped(func):\n        \"\"\"\n        Wrapping boto calls with dummy segment. This is because botocore\n        has two dependencies (requests and httplib) that might be\n        monkey-patched in user code to capture subsegments. The wrapper\n        makes sure there is always a non-sampled segment present when\n        the connector makes an  AWS API call using botocore.\n        This context wrapper doesn't work with asyncio based context\n        as event loop is not thread-safe.\n        \"\"\"\n        def wrapper(self, *args, **kargs):\n            if type(self.context).__name__ == 'AsyncContext':\n                return func(self, *args, **kargs)\n            segment = DummySegment()\n            self.context.set_trace_entity(segment)\n            result = func(self, *args, **kargs)\n            self.context.clear_trace_entities()\n            return result\n\n        return wrapper\n\n    @_context_wrapped\n    def fetch_sampling_rules(self):\n        \"\"\"\n        Use X-Ray botocore client to get the centralized sampling rules\n        from X-Ray service. The call is proxied and signed by X-Ray Daemon.\n        \"\"\"\n        new_rules = []\n\n        resp = self._xray_client.get_sampling_rules()\n        records = resp['SamplingRuleRecords']\n\n        for record in records:\n            rule_def = record['SamplingRule']\n            if self._is_rule_valid(rule_def):\n                rule = SamplingRule(name=rule_def['RuleName'],\n                                    priority=rule_def['Priority'],\n                                    rate=rule_def['FixedRate'],\n                                    reservoir_size=rule_def['ReservoirSize'],\n                                    host=rule_def['Host'],\n                                    service=rule_def['ServiceName'],\n                                    method=rule_def['HTTPMethod'],\n                                    path=rule_def['URLPath'],\n                                    service_type=rule_def['ServiceType'])\n                new_rules.append(rule)\n\n        return new_rules\n\n    @_context_wrapped\n    def fetch_sampling_target(self, rules):\n        \"\"\"\n        Report the current statistics of sampling rules and\n        get back the new assgiend quota/TTL froom the X-Ray service.\n        The call is proxied and signed via X-Ray Daemon.\n        \"\"\"\n        now = int(time.time())\n        report_docs = self._generate_reporting_docs(rules, now)\n        resp = self._xray_client.get_sampling_targets(\n            SamplingStatisticsDocuments=report_docs\n        )\n        new_docs = resp['SamplingTargetDocuments']\n\n        targets_mapping = {}\n        for doc in new_docs:\n            TTL = self._dt_to_epoch(doc['ReservoirQuotaTTL']) if doc.get('ReservoirQuotaTTL', None) else None\n            target = {\n                'rate': doc['FixedRate'],\n                'quota': doc.get('ReservoirQuota', None),\n                'TTL': TTL,\n                'interval': doc.get('Interval', None),\n            }\n            targets_mapping[doc['RuleName']] = target\n\n        return targets_mapping, self._dt_to_epoch(resp['LastRuleModification'])\n\n    def setup_xray_client(self, ip, port, client):\n        \"\"\"\n        Setup the xray client based on ip and port.\n        If a preset client is specified, ip and port\n        will be ignored.\n        \"\"\"\n        if not client:\n            client = self._create_xray_client(ip, port)\n        self._xray_client = client\n\n    @property\n    def context(self):\n        return self._context\n\n    @context.setter\n    def context(self, v):\n        self._context = v\n\n    def _generate_reporting_docs(self, rules, now):\n        report_docs = []\n\n        for rule in rules:\n            statistics = rule.snapshot_statistics()\n            doc = {\n                'RuleName': rule.name,\n                'ClientID': self._client_id,\n                'RequestCount': statistics['request_count'],\n                'BorrowCount': statistics['borrow_count'],\n                'SampledCount': statistics['sampled_count'],\n                'Timestamp': now,\n            }\n            report_docs.append(doc)\n        return report_docs\n\n    def _dt_to_epoch(self, dt):\n        \"\"\"\n        Convert a offset-aware datetime to POSIX time.\n        \"\"\"\n        # Added in python 3.3+ and directly returns POSIX time.\n        return int(dt.timestamp())\n\n    def _is_rule_valid(self, record):\n        # We currently only handle v1 sampling rules.\n        return record.get('Version', None) == 1 and \\\n            record.get('ResourceARN', None) == '*' and \\\n            record.get('ServiceType', None) and \\\n            not record.get('Attributes', None)\n\n    def _create_xray_client(self, ip='127.0.0.1', port='2000'):\n        session = botocore.session.get_session()\n        url = 'http://%s:%s' % (ip, port)\n        return session.create_client('xray', endpoint_url=url,\n                                     region_name='us-west-2',\n                                     config=Config(signature_version=UNSIGNED),\n                                     aws_access_key_id='', aws_secret_access_key=''\n                                     )\n"
  },
  {
    "path": "aws_xray_sdk/core/sampling/local/__init__.py",
    "content": ""
  },
  {
    "path": "aws_xray_sdk/core/sampling/local/reservoir.py",
    "content": "import time\nimport threading\n\n\nclass Reservoir:\n    \"\"\"\n    Keeps track of the number of sampled segments within\n    a single second. This class is implemented to be\n    thread-safe to achieve accurate sampling.\n    \"\"\"\n    def __init__(self, traces_per_sec=0):\n        \"\"\"\n        :param int traces_per_sec: number of guranteed\n            sampled segments.\n        \"\"\"\n        self._lock = threading.Lock()\n        self.traces_per_sec = traces_per_sec\n        self.used_this_sec = 0\n        self.this_sec = int(time.time())\n\n    def take(self):\n        \"\"\"\n        Returns True if there are segments left within the\n        current second, otherwise return False.\n        \"\"\"\n        with self._lock:\n            now = int(time.time())\n\n            if now != self.this_sec:\n                self.used_this_sec = 0\n                self.this_sec = now\n\n            if self.used_this_sec >= self.traces_per_sec:\n                return False\n\n            self.used_this_sec = self.used_this_sec + 1\n            return True\n"
  },
  {
    "path": "aws_xray_sdk/core/sampling/local/sampler.py",
    "content": "import json\nimport pkgutil\nfrom random import Random\n\nfrom .sampling_rule import SamplingRule\nfrom ...exceptions.exceptions import InvalidSamplingManifestError\n\n# `.decode('utf-8')` needed for Python 3.4, 3.5.\nlocal_sampling_rule = json.loads(pkgutil.get_data(__name__, 'sampling_rule.json').decode('utf-8'))\n\nSUPPORTED_RULE_VERSION = (1, 2)\n\n\nclass LocalSampler:\n    \"\"\"\n    The local sampler that holds either custom sampling rules\n    or default sampling rules defined locally. The X-Ray recorder\n    use it to calculate if this segment should be sampled or not\n    when local rules are neccessary.\n    \"\"\"\n    def __init__(self, rules=local_sampling_rule):\n        \"\"\"\n        :param dict rules: a dict that defines custom sampling rules.\n        An example configuration:\n        {\n            \"version\": 2,\n            \"rules\": [\n                {\n                    \"description\": \"Player moves.\",\n                    \"host\": \"*\",\n                    \"http_method\": \"*\",\n                    \"url_path\": \"/api/move/*\",\n                    \"fixed_target\": 0,\n                    \"rate\": 0.05\n                }\n            ],\n            \"default\": {\n                \"fixed_target\": 1,\n                \"rate\": 0.1\n            }\n        }\n        This example defines one custom rule and a default rule.\n        The custom rule applies a five-percent sampling rate with no minimum\n        number of requests to trace for paths under /api/move/. The default\n        rule traces the first request each second and 10 percent of additional requests.\n        The SDK applies custom rules in the order in which they are defined.\n        If a request matches multiple custom rules, the SDK applies only the first rule.\n        \"\"\"\n        self.load_local_rules(rules)\n        self._random = Random()\n\n    def should_trace(self, sampling_req=None):\n        \"\"\"\n        Return True if the sampler decide to sample based on input\n        information and sampling rules. It will first check if any\n        custom rule should be applied, if not it falls back to the\n        default sampling rule.\n\n        All optional arugments are extracted from incoming requests by\n        X-Ray middleware to perform path based sampling.\n        \"\"\"\n        if sampling_req is None:\n            return self._should_trace(self._default_rule)\n\n        host = sampling_req.get('host', None)\n        method = sampling_req.get('method', None)\n        path = sampling_req.get('path', None)\n\n        for rule in self._rules:\n            if rule.applies(host, method, path):\n                return self._should_trace(rule)\n\n        return self._should_trace(self._default_rule)\n\n    def load_local_rules(self, rules):\n        version = rules.get('version', None)\n        if version not in SUPPORTED_RULE_VERSION:\n            raise InvalidSamplingManifestError('Manifest version: %s is not supported.', version)\n\n        if 'default' not in rules:\n            raise InvalidSamplingManifestError('A default rule must be provided.')\n\n        self._default_rule = SamplingRule(rule_dict=rules['default'],\n                                          version=version,\n                                          default=True)\n\n        self._rules = []\n        if 'rules' in rules:\n            for rule in rules['rules']:\n                self._rules.append(SamplingRule(rule, version))\n\n    def _should_trace(self, sampling_rule):\n\n        if sampling_rule.reservoir.take():\n            return True\n        else:\n            return self._random.random() < sampling_rule.rate\n"
  },
  {
    "path": "aws_xray_sdk/core/sampling/local/sampling_rule.json",
    "content": "{\n   \"version\": 2,\n   \"default\": {\n     \"fixed_target\": 1,\n     \"rate\": 0.05\n   },\n   \"rules\": [\n   ]\n }"
  },
  {
    "path": "aws_xray_sdk/core/sampling/local/sampling_rule.py",
    "content": "from .reservoir import Reservoir\nfrom ...exceptions.exceptions import InvalidSamplingManifestError\nfrom aws_xray_sdk.core.utils.search_pattern import wildcard_match\n\n\nclass SamplingRule:\n    \"\"\"\n    One SamplingRule represents one rule defined from local rule json file\n    or from a dictionary. It can be either a custom rule or default rule.\n    \"\"\"\n    FIXED_TARGET = 'fixed_target'\n    RATE = 'rate'\n\n    HOST = 'host'\n    METHOD = 'http_method'\n    PATH = 'url_path'\n    SERVICE_NAME = 'service_name'\n\n    def __init__(self, rule_dict, version=2, default=False):\n        \"\"\"\n        :param dict rule_dict: The dictionary that defines a single rule.\n        :param bool default: Indicates if this is the default rule. A default\n            rule cannot have `host`, `http_method` or `url_path`.\n        \"\"\"\n        if version == 2:\n            self._host_key = self.HOST\n        elif version == 1:\n            self._host_key = self.SERVICE_NAME\n\n        self._fixed_target = rule_dict.get(self.FIXED_TARGET, None)\n        self._rate = rule_dict.get(self.RATE, None)\n\n        self._host = rule_dict.get(self._host_key, None)\n        self._method = rule_dict.get(self.METHOD, None)\n        self._path = rule_dict.get(self.PATH, None)\n\n        self._default = default\n\n        self._validate()\n\n        self._reservoir = Reservoir(self.fixed_target)\n\n    def applies(self, host, method, path):\n        \"\"\"\n        Determines whether or not this sampling rule applies to\n        the incoming request based on some of the request's parameters.\n        Any None parameters provided will be considered an implicit match.\n        \"\"\"\n        return (not host or wildcard_match(self.host, host)) \\\n            and (not method or wildcard_match(self.method, method)) \\\n            and (not path or wildcard_match(self.path, path))\n\n    @property\n    def fixed_target(self):\n        \"\"\"\n        Defines fixed number of sampled segments per second.\n        This doesn't count for sampling rate.\n        \"\"\"\n        return self._fixed_target\n\n    @property\n    def rate(self):\n        \"\"\"\n        A float number less than 1.0 defines the sampling rate.\n        \"\"\"\n        return self._rate\n\n    @property\n    def host(self):\n        \"\"\"\n        The host name of the reqest to sample.\n        \"\"\"\n        return self._host\n\n    @property\n    def method(self):\n        \"\"\"\n        HTTP method of the request to sample.\n        \"\"\"\n        return self._method\n\n    @property\n    def path(self):\n        \"\"\"\n        The url path of the request to sample.\n        \"\"\"\n        return self._path\n\n    @property\n    def reservoir(self):\n        \"\"\"\n        Keeps track of used sampled targets within the second.\n        \"\"\"\n        return self._reservoir\n\n    @property\n    def version(self):\n        \"\"\"\n        Keeps track of used sampled targets within the second.\n        \"\"\"\n        return self._version\n\n    def _validate(self):\n        if self.fixed_target < 0 or self.rate < 0:\n            raise InvalidSamplingManifestError('All rules must have non-negative values for '\n                                               'fixed_target and rate')\n\n        if self._default:\n            if self.host or self.method or self.path:\n                raise InvalidSamplingManifestError('The default rule must not specify values for '\n                                                   'url_path, %s, or http_method', self._host_key)\n        else:\n            if not self.host or not self.method or not self.path:\n                raise InvalidSamplingManifestError('All non-default rules must have values for '\n                                                   'url_path, %s, and http_method', self._host_key)\n"
  },
  {
    "path": "aws_xray_sdk/core/sampling/reservoir.py",
    "content": "import threading\nfrom enum import Enum\n\n\nclass Reservoir:\n    \"\"\"\n    Centralized thread-safe reservoir which holds fixed sampling\n    quota, borrowed count and TTL.\n    \"\"\"\n    def __init__(self):\n        self._lock = threading.Lock()\n\n        self._quota = None\n        self._TTL = None\n\n        self._this_sec = 0\n        self._taken_this_sec = 0\n        self._borrowed_this_sec = 0\n\n        self._report_interval = 1\n        self._report_elapsed = 0\n\n    def borrow_or_take(self, now, can_borrow):\n        \"\"\"\n        Decide whether to borrow or take one quota from\n        the reservoir. Return ``False`` if it can neither\n        borrow nor take. This method is thread-safe.\n        \"\"\"\n        with self._lock:\n            return self._borrow_or_take(now, can_borrow)\n\n    def load_quota(self, quota, TTL, interval):\n        \"\"\"\n        Load new quota with a TTL. If the input is None,\n        the reservoir will continue using old quota until it\n        expires or has a non-None quota/TTL in a future load.\n        \"\"\"\n        if quota is not None:\n            self._quota = quota\n        if TTL is not None:\n            self._TTL = TTL\n        if interval is not None:\n            self._report_interval = interval / 10\n\n    @property\n    def quota(self):\n        return self._quota\n\n    @property\n    def TTL(self):\n        return self._TTL\n\n    def _time_to_report(self):\n        if self._report_elapsed + 1 >= self._report_interval:\n            self._report_elapsed = 0\n            return True\n        else:\n            self._report_elapsed += 1\n\n    def _borrow_or_take(self, now, can_borrow):\n        self._adjust_this_sec(now)\n        # Don't borrow if the quota is available and fresh.\n        if (self._quota is not None and self._quota >= 0 and\n                self._TTL is not None and self._TTL >= now):\n            if(self._taken_this_sec >= self._quota):\n                return ReservoirDecision.NO\n\n            self._taken_this_sec = self._taken_this_sec + 1\n            return ReservoirDecision.TAKE\n\n        # Otherwise try to borrow if the quota is not present or expired.\n        if can_borrow:\n            if self._borrowed_this_sec >= 1:\n                return ReservoirDecision.NO\n\n            self._borrowed_this_sec = self._borrowed_this_sec + 1\n            return ReservoirDecision.BORROW\n\n    def _adjust_this_sec(self, now):\n        if now != self._this_sec:\n            self._taken_this_sec = 0\n            self._borrowed_this_sec = 0\n            self._this_sec = now\n\n\nclass ReservoirDecision(Enum):\n    \"\"\"\n    An Enum of decisions the reservoir could make based on\n    assigned quota with TTL and the current timestamp/usage.\n    \"\"\"\n    TAKE = 'take'\n    BORROW = 'borrow'\n    NO = 'no'\n"
  },
  {
    "path": "aws_xray_sdk/core/sampling/rule_cache.py",
    "content": "import threading\nfrom operator import attrgetter\n\nTTL = 60 * 60  # The cache expires 1 hour after the last refresh time.\n\n\nclass RuleCache:\n    \"\"\"\n    Cache sampling rules and quota retrieved by ``TargetPoller``\n    and ``RulePoller``. It will not return anything if it expires.\n    \"\"\"\n    def __init__(self):\n\n        self._last_updated = None\n        self._rules = []\n        self._lock = threading.Lock()\n\n    def get_matched_rule(self, sampling_req, now):\n        if self._is_expired(now):\n            return None\n        matched_rule = None\n        for rule in self.rules:\n            if(not matched_rule and rule.match(sampling_req)):\n                matched_rule = rule\n            if(not matched_rule and rule.is_default()):\n                matched_rule = rule\n        return matched_rule\n\n    def load_rules(self, rules):\n        # Record the old rules for later merging.\n        with self._lock:\n            self._load_rules(rules)\n\n    def load_targets(self, targets_dict):\n        with self._lock:\n            self._load_targets(targets_dict)\n\n    def _load_rules(self, rules):\n        oldRules = {}\n        for rule in self.rules:\n            oldRules[rule.name] = rule\n\n        # Update the rules in the cache.\n        self.rules = rules\n\n        # Transfer state information to refreshed rules.\n        for rule in self.rules:\n            old = oldRules.get(rule.name, None)\n            if old:\n                rule.merge(old)\n\n        # The cache should maintain the order of the rules based on\n        # priority. If priority is the same we sort name by alphabet\n        # as rule name is unique.\n        self.rules.sort(key=attrgetter('priority', 'name'))\n\n    def _load_targets(self, targets_dict):\n        for rule in self.rules:\n            target = targets_dict.get(rule.name, None)\n            if target:\n                rule.reservoir.load_quota(target['quota'],\n                                          target['TTL'],\n                                          target['interval'])\n                rule.rate = target['rate']\n\n    def _is_expired(self, now):\n        # The cache is treated as expired if it is never loaded.\n        if not self._last_updated:\n            return True\n        return now > self.last_updated + TTL\n\n    @property\n    def rules(self):\n        return self._rules\n\n    @rules.setter\n    def rules(self, v):\n        self._rules = v\n\n    @property\n    def last_updated(self):\n        return self._last_updated\n\n    @last_updated.setter\n    def last_updated(self, v):\n        self._last_updated = v\n"
  },
  {
    "path": "aws_xray_sdk/core/sampling/rule_poller.py",
    "content": "import logging\nfrom random import Random\nimport time\nimport threading\n\nlog = logging.getLogger(__name__)\n\nDEFAULT_INTERVAL = 5 * 60  # 5 minutes on sampling rules fetch\n\n\nclass RulePoller:\n\n    def __init__(self, cache, connector):\n\n        self._cache = cache\n        self._random = Random()\n        self._time_to_wait = 0\n        self._time_elapsed = 0\n        self._connector = connector\n\n    def start(self):\n        poller_thread = threading.Thread(target=self._worker)\n        poller_thread.daemon = True\n        poller_thread.start()\n\n    def _worker(self):\n        frequency = 1\n        while True:\n            if self._time_elapsed >= self._time_to_wait:\n                self._refresh_cache()\n                self._time_elapsed = 0\n                self._reset_time_to_wait()\n            else:\n                time.sleep(frequency)\n                self._time_elapsed = self._time_elapsed + frequency\n\n    def wake_up(self):\n        \"\"\"\n        Force the rule poller to pull the sampling rules from the service\n        regardless of the polling interval.\n        This method is intended to be used by ``TargetPoller`` only.\n        \"\"\"\n        self._time_elapsed = self._time_to_wait + 1000\n\n    def _refresh_cache(self):\n        try:\n            now = int(time.time())\n            new_rules = self._connector.fetch_sampling_rules()\n            if new_rules:\n                self._cache.load_rules(new_rules)\n                self._cache.last_updated = now\n        except Exception:\n            log.error(\"Encountered an issue while polling sampling rules.\", exc_info=True)\n\n    def _reset_time_to_wait(self):\n        \"\"\"\n        A random jitter of up to 5 seconds is injected after each run\n        to ensure the calls eventually get evenly distributed over\n        the 5 minute window.\n        \"\"\"\n        self._time_to_wait = DEFAULT_INTERVAL + self._random.random() * 5\n"
  },
  {
    "path": "aws_xray_sdk/core/sampling/sampler.py",
    "content": "import logging\nfrom random import Random\nimport time\nimport threading\n\nfrom .local.sampler import LocalSampler\nfrom .rule_cache import RuleCache\nfrom .rule_poller import RulePoller\nfrom .target_poller import TargetPoller\nfrom .connector import ServiceConnector\nfrom .reservoir import ReservoirDecision\nfrom aws_xray_sdk import global_sdk_config\n\nlog = logging.getLogger(__name__)\n\n\nclass DefaultSampler:\n    \"\"\"Making sampling decisions based on centralized sampling rules defined\n    by X-Ray control plane APIs. It will fall back to local sampler if\n    centralized sampling rules are not available.\n    \"\"\"\n    def __init__(self):\n        self._local_sampler = LocalSampler()\n        self._cache = RuleCache()\n        self._connector = ServiceConnector()\n        self._rule_poller = RulePoller(self._cache, self._connector)\n        self._target_poller = TargetPoller(self._cache,\n                                           self._rule_poller, self._connector)\n\n        self._xray_client = None\n        self._random = Random()\n        self._started = False\n        self._origin = None\n        self._lock = threading.Lock()\n\n    def start(self):\n        \"\"\"\n        Start rule poller and target poller once X-Ray daemon address\n        and context manager is in place.\n        \"\"\"\n        if not global_sdk_config.sdk_enabled():\n            return\n\n        with self._lock:\n            if not self._started:\n                self._rule_poller.start()\n                self._target_poller.start()\n                self._started = True\n\n    def should_trace(self, sampling_req=None):\n        \"\"\"\n        Return the matched sampling rule name if the sampler finds one\n        and decide to sample. If no sampling rule matched, it falls back\n        to the local sampler's ``should_trace`` implementation.\n        All optional arguments are extracted from incoming requests by\n        X-Ray middleware to perform path based sampling.\n        \"\"\"\n        if not global_sdk_config.sdk_enabled():\n            return False\n\n        if not self._started:\n            self.start() # only front-end that actually uses the sampler spawns poller threads\n\n        now = int(time.time())\n        if sampling_req and not sampling_req.get('service_type', None):\n            sampling_req['service_type'] = self._origin\n        elif sampling_req is None:\n            sampling_req = {'service_type': self._origin}\n        matched_rule = self._cache.get_matched_rule(sampling_req, now)\n        if matched_rule:\n            log.debug('Rule %s is selected to make a sampling decision.', matched_rule.name)\n            return self._process_matched_rule(matched_rule, now)\n        else:\n            log.info('No effective centralized sampling rule match. Fallback to local rules.')\n            return self._local_sampler.should_trace(sampling_req)\n\n    def load_local_rules(self, rules):\n        \"\"\"\n        Load specified local rules to local fallback sampler.\n        \"\"\"\n        self._local_sampler.load_local_rules(rules)\n\n    def load_settings(self, daemon_config, context, origin=None):\n        \"\"\"\n        The pollers have dependency on the context manager\n        of the X-Ray recorder. They will respect the customer\n        specified xray client to poll sampling rules/targets.\n        Otherwise they falls back to use the same X-Ray daemon\n        as the emitter.\n        \"\"\"\n        self._connector.setup_xray_client(ip=daemon_config.tcp_ip,\n                                          port=daemon_config.tcp_port,\n                                          client=self.xray_client)\n\n        self._connector.context = context\n        self._origin = origin\n\n    def _process_matched_rule(self, rule, now):\n        # As long as a rule is matched we increment request counter.\n        rule.increment_request_count()\n        reservoir = rule.reservoir\n        sample = True\n        # We check if we can borrow or take from reservoir first.\n        decision = reservoir.borrow_or_take(now, rule.can_borrow)\n        if(decision == ReservoirDecision.BORROW):\n            rule.increment_borrow_count()\n        elif (decision == ReservoirDecision.TAKE):\n            rule.increment_sampled_count()\n        # Otherwise we compute based on fixed rate of this sampling rule.\n        elif (self._random.random() <= rule.rate):\n            rule.increment_sampled_count()\n        else:\n            sample = False\n\n        if sample:\n            return rule.name\n        else:\n            return False\n\n    @property\n    def xray_client(self):\n        return self._xray_client\n\n    @xray_client.setter\n    def xray_client(self, v):\n        self._xray_client = v\n"
  },
  {
    "path": "aws_xray_sdk/core/sampling/sampling_rule.py",
    "content": "import threading\n\nfrom .reservoir import Reservoir\nfrom aws_xray_sdk.core.utils.search_pattern import wildcard_match\n\n\nclass SamplingRule:\n    \"\"\"\n    Data model for a single centralized sampling rule definition.\n    \"\"\"\n    def __init__(self, name, priority, rate, reservoir_size,\n                 host=None, method=None, path=None, service=None,\n                 service_type=None):\n        self._name = name\n        self._priority = priority\n        self._rate = rate\n        self._can_borrow = not not reservoir_size\n\n        self._host = host\n        self._method = method\n        self._path = path\n        self._service = service\n        self._service_type = service_type\n\n        self._reservoir = Reservoir()\n        self._reset_statistics()\n\n        self._lock = threading.Lock()\n\n    def match(self, sampling_req):\n        \"\"\"\n        Determines whether or not this sampling rule applies to the incoming\n        request based on some of the request's parameters.\n        Any ``None`` parameter provided will be considered an implicit match.\n        \"\"\"\n        if sampling_req is None:\n            return False\n\n        host = sampling_req.get('host', None)\n        method = sampling_req.get('method', None)\n        path = sampling_req.get('path', None)\n        service = sampling_req.get('service', None)\n        service_type = sampling_req.get('service_type', None)\n\n        return (not host or wildcard_match(self._host, host)) \\\n            and (not method or wildcard_match(self._method, method)) \\\n            and (not path or wildcard_match(self._path, path)) \\\n            and (not service or wildcard_match(self._service, service)) \\\n            and (not service_type or wildcard_match(self._service_type, service_type))\n\n    def is_default(self):\n        # ``Default`` is a reserved keyword on X-Ray back-end.\n        return self.name == 'Default'\n\n    def snapshot_statistics(self):\n        \"\"\"\n        Take a snapshot of request/borrow/sampled count for reporting\n        back to X-Ray back-end by ``TargetPoller`` and reset those counters.\n        \"\"\"\n        with self._lock:\n\n            stats = {\n                'request_count': self.request_count,\n                'borrow_count': self.borrow_count,\n                'sampled_count': self.sampled_count,\n            }\n\n            self._reset_statistics()\n            return stats\n\n    def merge(self, rule):\n        \"\"\"\n        Migrate all stateful attributes from the old rule\n        \"\"\"\n        with self._lock:\n            self._request_count = rule.request_count\n            self._borrow_count = rule.borrow_count\n            self._sampled_count = rule.sampled_count\n            self._reservoir = rule.reservoir\n            rule.reservoir = None\n\n    def ever_matched(self):\n        \"\"\"\n        Returns ``True`` if this sample rule has ever been matched\n        with an incoming request within the reporting interval.\n        \"\"\"\n        return self._request_count > 0\n\n    def time_to_report(self):\n        \"\"\"\n        Returns ``True`` if it is time to report sampling statistics\n        of this rule to refresh quota information for its reservoir.\n        \"\"\"\n        return self.reservoir._time_to_report()\n\n    def increment_request_count(self):\n        with self._lock:\n            self._request_count += 1\n\n    def increment_borrow_count(self):\n        with self._lock:\n            self._borrow_count += 1\n\n    def increment_sampled_count(self):\n        with self._lock:\n            self._sampled_count += 1\n\n    def _reset_statistics(self):\n        self._request_count = 0\n        self._borrow_count = 0\n        self._sampled_count = 0\n\n    @property\n    def rate(self):\n        return self._rate\n\n    @rate.setter\n    def rate(self, v):\n        self._rate = v\n\n    @property\n    def name(self):\n        return self._name\n\n    @property\n    def priority(self):\n        return self._priority\n\n    @property\n    def reservoir(self):\n        return self._reservoir\n\n    @reservoir.setter\n    def reservoir(self, v):\n        self._reservoir = v\n\n    @property\n    def can_borrow(self):\n        return self._can_borrow\n\n    @property\n    def request_count(self):\n        return self._request_count\n\n    @property\n    def borrow_count(self):\n        return self._borrow_count\n\n    @property\n    def sampled_count(self):\n        return self._sampled_count\n"
  },
  {
    "path": "aws_xray_sdk/core/sampling/target_poller.py",
    "content": "import logging\nfrom random import Random\nimport time\nimport threading\n\nlog = logging.getLogger(__name__)\n\n\nclass TargetPoller:\n    \"\"\"\n    The poller to report the current statistics of all\n    centralized sampling rules and retrieve the new allocated\n    sampling quota and TTL from X-Ray service.\n    \"\"\"\n    def __init__(self, cache, rule_poller, connector):\n        self._cache = cache\n        self._rule_poller = rule_poller\n        self._connector = connector\n        self._random = Random()\n        self._interval = 10 # default 10 seconds interval on sampling targets fetch\n\n    def start(self):\n        poller_thread = threading.Thread(target=self._worker)\n        poller_thread.daemon = True\n        poller_thread.start()\n\n    def _worker(self):\n        while True:\n            try:\n                time.sleep(self._interval + self._get_jitter())\n                self._do_work()\n            except Exception:\n                log.error(\"Encountered an issue while polling targets.\", exc_info=True)\n\n    def _do_work(self):\n        candidates = self._get_candidates(self._cache.rules)\n        if not candidates:\n            log.debug('There is no sampling rule statistics to report. Skipping')\n            return None\n        targets, rule_freshness = self._connector.fetch_sampling_target(candidates)\n        self._cache.load_targets(targets)\n\n        if rule_freshness > self._cache.last_updated:\n            log.info('Performing out-of-band sampling rule polling to fetch updated rules.')\n            self._rule_poller.wake_up()\n\n    def _get_candidates(self, all_rules):\n        \"\"\"\n        Don't report a rule statistics if any of the conditions is met:\n        1. The report time hasn't come(some rules might have larger report intervals).\n        2. The rule is never matched.\n        \"\"\"\n        candidates = []\n        for rule in all_rules:\n            if rule.ever_matched() and rule.time_to_report():\n                candidates.append(rule)\n        return candidates\n\n    def _get_jitter(self):\n        \"\"\"\n        A random jitter of up to 0.1 seconds is injected after every run\n        to ensure all poller calls eventually get evenly distributed\n        over the polling interval window.\n        \"\"\"\n        return self._random.random() / self._interval\n"
  },
  {
    "path": "aws_xray_sdk/core/streaming/__init__.py",
    "content": ""
  },
  {
    "path": "aws_xray_sdk/core/streaming/default_streaming.py",
    "content": "import threading\n\n\nclass DefaultStreaming:\n    \"\"\"\n    The default streaming strategy. It uses the total count of a\n    segment's children subsegments as a threshold. If the threshold is\n    breached, it uses subtree streaming to stream out.\n    \"\"\"\n    def __init__(self, streaming_threshold=30):\n        self._threshold = streaming_threshold\n        self._lock = threading.Lock()\n\n    def is_eligible(self, segment):\n        \"\"\"\n        A segment is eligible to have its children subsegments streamed\n        if it is sampled and it breaches streaming threshold.\n        \"\"\"\n        if not segment or not segment.sampled:\n            return False\n\n        return segment.get_total_subsegments_size() > self.streaming_threshold\n\n    def stream(self, entity, callback):\n        \"\"\"\n        Stream out all eligible children of the input entity.\n\n        :param entity: The target entity to be streamed.\n        :param callback: The function that takes the node and\n            actually send it out.\n        \"\"\"\n        with self._lock:\n            self._stream(entity, callback)\n\n    def _stream(self, entity, callback):\n        children = entity.subsegments\n\n        children_ready = []\n        if len(children) > 0:\n            for child in children:\n                if self._stream(child, callback):\n                    children_ready.append(child)\n\n        # If all children subtrees and this root are ready, don't stream yet.\n        # Mark this root ready and return to parent.\n        if len(children_ready) == len(children) and not entity.in_progress:\n            return True\n\n        # Otherwise stream all ready children subtrees and return False\n        for child in children_ready:\n            callback(child)\n            entity.remove_subsegment(child)\n\n        return False\n\n    @property\n    def streaming_threshold(self):\n        return self._threshold\n\n    @streaming_threshold.setter\n    def streaming_threshold(self, value):\n        self._threshold = value\n"
  },
  {
    "path": "aws_xray_sdk/core/utils/__init__.py",
    "content": ""
  },
  {
    "path": "aws_xray_sdk/core/utils/atomic_counter.py",
    "content": "import threading\n\n\nclass AtomicCounter:\n    \"\"\"\n    A helper class that implements a thread-safe counter.\n    \"\"\"\n    def __init__(self, initial=0):\n\n        self.value = initial\n        self._lock = threading.Lock()\n        self._initial = initial\n\n    def increment(self, num=1):\n\n        with self._lock:\n            self.value += num\n            return self.value\n\n    def decrement(self, num=1):\n\n        with self._lock:\n            self.value -= num\n            return self.value\n\n    def get_current(self):\n\n        with self._lock:\n            return self.value\n\n    def reset(self):\n\n        with self._lock:\n            self.value = self._initial\n            return self.value\n"
  },
  {
    "path": "aws_xray_sdk/core/utils/compat.py",
    "content": "import inspect\n\nannotation_value_types = (int, float, bool, str)\n\n\ndef is_classmethod(func):\n    return getattr(func, '__self__', None) is not None\n\n\ndef is_instance_method(parent_class, func_name, func):\n    try:\n        func_from_dict = parent_class.__dict__[func_name]\n    except KeyError:\n        for base in inspect.getmro(parent_class):\n            if func_name in base.__dict__:\n                func_from_dict = base.__dict__[func_name]\n                break\n        else:\n            return True\n\n    return not is_classmethod(func) and not isinstance(func_from_dict, staticmethod)\n"
  },
  {
    "path": "aws_xray_sdk/core/utils/conversion.py",
    "content": "import logging\n\nlog = logging.getLogger(__name__)\n\ndef metadata_to_dict(obj):\n    \"\"\"\n    Convert object to dict with all serializable properties like:\n    dict, list, set, tuple, str, bool, int, float, type, object, etc.\n    \"\"\"\n    try:\n        if isinstance(obj, dict):\n            metadata = {}\n            for key, value in obj.items():\n                metadata[key] = metadata_to_dict(value)\n            return metadata\n        elif isinstance(obj, type):\n            return str(obj)\n        elif hasattr(obj, \"_ast\"):\n            return metadata_to_dict(obj._ast())\n        elif hasattr(obj, \"__iter__\") and not isinstance(obj, str):\n            metadata = []\n            for item in obj:\n                metadata.append(metadata_to_dict(item))\n            return metadata\n        elif hasattr(obj, \"__dict__\"):\n            metadata = {}\n            for key, value in vars(obj).items():\n                if not callable(value) and not key.startswith('_'):\n                    metadata[key] = metadata_to_dict(value)\n            return metadata\n        else:\n            return obj\n    except Exception as e:\n        import pprint\n        log.warning(\"Failed to convert metadata to dict:\\n%s\", pprint.pformat(getattr(e, \"args\", None)))\n        return {}\n"
  },
  {
    "path": "aws_xray_sdk/core/utils/search_pattern.py",
    "content": "def wildcard_match(pattern, text, case_insensitive=True):\n    \"\"\"\n    Performs a case-insensitive wildcard match against two strings.\n    This method works with pseduo-regex chars; specifically ? and * are supported.\n    An asterisk (*) represents any combination of characters.\n    A question mark (?) represents any single character.\n    :param str pattern: the regex-like pattern to be compared against\n    :param str text: the string to compare against the pattern\n    :param boolean case_insensitive: dafault is True\n    return whether the text matches the pattern\n    \"\"\"\n    if pattern is None or text is None:\n        return False\n\n    if len(pattern) == 0:\n        return len(text) == 0\n\n    # Check the special case of a single * pattern, as it's common\n    if pattern == '*':\n        return True\n\n    # If elif logic Checking different conditions like match between the first i chars in text\n    # and the first p chars in pattern, checking pattern has '?' or '*' also check for case_insensitivity\n    # iStar is introduced to store length of the text and i, p and pStar for indexing\n    i = 0\n    p = 0\n    iStar = len(text)\n    pStar = 0\n    while i < len(text):\n        if p < len(pattern) and text[i] == pattern[p]:\n            i = i + 1\n            p = p + 1\n\n        elif p < len(pattern) and case_insensitive and text[i].lower() == pattern[p].lower():\n            i = i + 1\n            p = p + 1\n\n        elif p < len(pattern) and pattern[p] == '?':\n            i = i + 1\n            p = p + 1\n\n        elif p < len(pattern) and pattern[p] == '*':\n            iStar = i\n            pStar = p\n            p += 1\n\n        elif iStar != len(text):\n            iStar += 1\n            i = iStar\n            p = pStar + 1\n\n        else:\n            return False\n\n    while p < len(pattern) and pattern[p] == '*':\n        p = p + 1\n\n    return p == len(pattern) and i == len(text)\n"
  },
  {
    "path": "aws_xray_sdk/core/utils/sqs_message_helper.py",
    "content": "SQS_XRAY_HEADER = \"AWSTraceHeader\"\nclass SqsMessageHelper:\n    \n    @staticmethod \n    def isSampled(sqs_message):\n        attributes = sqs_message['attributes']\n\n        if SQS_XRAY_HEADER not in attributes:\n            return False\n\n        return 'Sampled=1' in attributes[SQS_XRAY_HEADER]"
  },
  {
    "path": "aws_xray_sdk/core/utils/stacktrace.py",
    "content": "import sys\nimport traceback\n\n\ndef get_stacktrace(limit=None):\n    \"\"\"\n    Get a full stacktrace for the current state of execution.\n\n    Include the current state of the stack, minus this function.\n    If there is an active exception, include the stacktrace information from\n    the exception as well.\n\n    :param int limit:\n        Optionally limit stack trace size results. This parmaeters has the same\n        meaning as the `limit` parameter in `traceback.print_stack`.\n    :returns:\n        List of stack trace objects, in the same form as\n        `traceback.extract_stack`.\n    \"\"\"\n    if limit is not None and limit == 0:\n        # Nothing to return. This is consistent with the behavior of the\n        # functions in the `traceback` module.\n        return []\n\n    stack = traceback.extract_stack()\n    # Remove this `get_stacktrace()` function call from the stack info.\n    # For what we want to report, this is superfluous information and arguably\n    # adds garbage to the report.\n    # Also drop the `traceback.extract_stack()` call above from the returned\n    # stack info, since this is also superfluous.\n    stack = stack[:-2]\n\n    _exc_type, _exc, exc_traceback = sys.exc_info()\n    if exc_traceback is not None:\n        # If and only if there is a currently triggered exception, combine the\n        # exception traceback information with the current stack state to get a\n        # complete trace.\n        exc_stack = traceback.extract_tb(exc_traceback)\n        stack += exc_stack\n\n    # Limit the stack trace size, if a limit was specified:\n    if limit is not None:\n        # Copy the behavior of `traceback` functions with a `limit` argument.\n        # See https://docs.python.org/3/library/traceback.html.\n        if limit > 0:\n            # limit > 0: include the last `limit` items\n            stack = stack[-limit:]\n        else:\n            # limit < 0: include the first `abs(limit)` items\n            stack = stack[:abs(limit)]\n    return stack\n"
  },
  {
    "path": "aws_xray_sdk/ext/__init__.py",
    "content": ""
  },
  {
    "path": "aws_xray_sdk/ext/aiobotocore/__init__.py",
    "content": "from .patch import patch\n\n__all__ = ['patch']\n"
  },
  {
    "path": "aws_xray_sdk/ext/aiobotocore/patch.py",
    "content": "import aiobotocore.client\nimport wrapt\n\nfrom aws_xray_sdk.core import xray_recorder\nfrom aws_xray_sdk.ext.boto_utils import inject_header, aws_meta_processor\n\n\ndef patch():\n    \"\"\"\n    Patch aiobotocore client so it generates subsegments\n    when calling AWS services.\n    \"\"\"\n    if hasattr(aiobotocore.client, '_xray_enabled'):\n        return\n    setattr(aiobotocore.client, '_xray_enabled', True)\n\n    wrapt.wrap_function_wrapper(\n        'aiobotocore.client',\n        'AioBaseClient._make_api_call',\n        _xray_traced_aiobotocore,\n    )\n\n    wrapt.wrap_function_wrapper(\n        'aiobotocore.endpoint',\n        'AioEndpoint.prepare_request',\n        inject_header,\n    )\n\n\nasync def _xray_traced_aiobotocore(wrapped, instance, args, kwargs):\n    service = instance._service_model.metadata[\"endpointPrefix\"]\n    result = await xray_recorder.record_subsegment_async(\n        wrapped, instance, args, kwargs,\n        name=service,\n        namespace='aws',\n        meta_processor=aws_meta_processor,\n    )\n\n    return result\n"
  },
  {
    "path": "aws_xray_sdk/ext/aiohttp/__init__.py",
    "content": ""
  },
  {
    "path": "aws_xray_sdk/ext/aiohttp/client.py",
    "content": "\"\"\"\nAioHttp Client tracing, only compatible with Aiohttp 3.X versions\n\"\"\"\nimport aiohttp\n\nfrom types import SimpleNamespace\n\nfrom aws_xray_sdk.core import xray_recorder\nfrom aws_xray_sdk.core.models import http\nfrom aws_xray_sdk.core.utils import stacktrace\nfrom aws_xray_sdk.ext.util import inject_trace_header, strip_url, get_hostname\n\n# All aiohttp calls will entail outgoing HTTP requests, only in some ad-hoc\n# exceptions the namespace will be flip back to local.\nREMOTE_NAMESPACE = 'remote'\nLOCAL_NAMESPACE = 'local'\nLOCAL_EXCEPTIONS = (\n    aiohttp.client_exceptions.ClientConnectionError,\n    # DNS issues\n    OSError\n)\n\n\nasync def begin_subsegment(session, trace_config_ctx, params):\n    name = trace_config_ctx.name if trace_config_ctx.name else get_hostname(str(params.url))\n    subsegment = xray_recorder.begin_subsegment(name, REMOTE_NAMESPACE)\n\n    # No-op if subsegment is `None` due to `LOG_ERROR`.\n    if not subsegment:\n        trace_config_ctx.give_up = True\n    else:\n        trace_config_ctx.give_up = False\n        subsegment.put_http_meta(http.METHOD, params.method)\n        subsegment.put_http_meta(http.URL, strip_url(params.url.human_repr()))\n        inject_trace_header(params.headers, subsegment)\n\n\nasync def end_subsegment(session, trace_config_ctx, params):\n    if trace_config_ctx.give_up:\n        return\n\n    subsegment = xray_recorder.current_subsegment()\n    subsegment.put_http_meta(http.STATUS, params.response.status)\n    xray_recorder.end_subsegment()\n\n\nasync def end_subsegment_with_exception(session, trace_config_ctx, params):\n    if trace_config_ctx.give_up:\n        return\n\n    subsegment = xray_recorder.current_subsegment()\n    subsegment.add_exception(\n        params.exception,\n        stacktrace.get_stacktrace(limit=xray_recorder._max_trace_back)\n    )\n\n    if isinstance(params.exception, LOCAL_EXCEPTIONS):\n        subsegment.namespace = LOCAL_NAMESPACE\n\n    xray_recorder.end_subsegment()\n\n\ndef aws_xray_trace_config(name=None):\n    \"\"\"\n    :param name: name used to identify the subsegment, with None internally the URL will\n                 be used as identifier.\n    :returns: TraceConfig.\n    \"\"\"\n\n    def _trace_config_ctx_factory(trace_request_ctx):\n        return SimpleNamespace(\n            name=name,\n            trace_request_ctx=trace_request_ctx\n        )\n\n    trace_config = aiohttp.TraceConfig(trace_config_ctx_factory=_trace_config_ctx_factory)\n    trace_config.on_request_start.append(begin_subsegment)\n    trace_config.on_request_end.append(end_subsegment)\n    trace_config.on_request_exception.append(end_subsegment_with_exception)\n    return trace_config\n"
  },
  {
    "path": "aws_xray_sdk/ext/aiohttp/middleware.py",
    "content": "\"\"\"\nAioHttp Middleware\n\"\"\"\nfrom aiohttp import web\nfrom aiohttp.web_exceptions import HTTPException\n\nfrom aws_xray_sdk.core import xray_recorder\nfrom aws_xray_sdk.core.models import http\nfrom aws_xray_sdk.core.utils import stacktrace\nfrom aws_xray_sdk.ext.util import calculate_sampling_decision, \\\n    calculate_segment_name, construct_xray_header, prepare_response_header\n\n\n@web.middleware\nasync def middleware(request, handler):\n    \"\"\"\n    Main middleware function, deals with all the X-Ray segment logic\n    \"\"\"\n    # Create X-Ray headers\n    xray_header = construct_xray_header(request.headers)\n    # Get name of service or generate a dynamic one from host\n    name = calculate_segment_name(request.headers['host'].split(':', 1)[0], xray_recorder)\n\n    sampling_req = {\n        'host': request.headers['host'],\n        'method': request.method,\n        'path': request.path,\n        'service': name,\n    }\n\n    sampling_decision = calculate_sampling_decision(\n        trace_header=xray_header,\n        recorder=xray_recorder,\n        sampling_req=sampling_req,\n    )\n\n    # Start a segment\n    segment = xray_recorder.begin_segment(\n        name=name,\n        traceid=xray_header.root,\n        parent_id=xray_header.parent,\n        sampling=sampling_decision,\n    )\n\n    segment.save_origin_trace_header(xray_header)\n    # Store request metadata in the current segment\n    segment.put_http_meta(http.URL, str(request.url))\n    segment.put_http_meta(http.METHOD, request.method)\n\n    if 'User-Agent' in request.headers:\n        segment.put_http_meta(http.USER_AGENT, request.headers['User-Agent'])\n\n    if 'X-Forwarded-For' in request.headers:\n        segment.put_http_meta(http.CLIENT_IP, request.headers['X-Forwarded-For'])\n        segment.put_http_meta(http.X_FORWARDED_FOR, True)\n    elif 'remote_addr' in request.headers:\n        segment.put_http_meta(http.CLIENT_IP, request.headers['remote_addr'])\n    else:\n        segment.put_http_meta(http.CLIENT_IP, request.remote)\n\n    try:\n        # Call next middleware or request handler\n        response = await handler(request)\n    except HTTPException as exc:\n        # Non 2XX responses are raised as HTTPExceptions\n        response = exc\n        raise\n    except BaseException as err:\n        # Store exception information including the stacktrace to the segment\n        response = None\n        segment.put_http_meta(http.STATUS, 500)\n        stack = stacktrace.get_stacktrace(limit=xray_recorder.max_trace_back)\n        segment.add_exception(err, stack)\n        raise\n    finally:\n        if response is not None:\n            segment.put_http_meta(http.STATUS, response.status)\n            if 'Content-Length' in response.headers:\n                length = int(response.headers['Content-Length'])\n                segment.put_http_meta(http.CONTENT_LENGTH, length)\n\n            header_str = prepare_response_header(xray_header, segment)\n            response.headers[http.XRAY_HEADER] = header_str\n\n        xray_recorder.end_segment()\n\n    return response\n"
  },
  {
    "path": "aws_xray_sdk/ext/boto_utils.py",
    "content": "import json\nimport pkgutil\n\nfrom botocore.exceptions import ClientError\n\nfrom aws_xray_sdk.core import xray_recorder\nfrom aws_xray_sdk.core.models import http\nfrom aws_xray_sdk.core.exceptions.exceptions import SegmentNotFoundException\n\nfrom aws_xray_sdk.ext.util import inject_trace_header, to_snake_case\n\n# `.decode('utf-8')` needed for Python 3.4, 3.5\nwhitelist = json.loads(pkgutil.get_data(__name__, 'resources/aws_para_whitelist.json').decode('utf-8'))\n\n\ndef inject_header(wrapped, instance, args, kwargs):\n    # skip tracing for SDK built-in centralized sampling pollers\n    url = args[0].url\n    if 'GetCentralizedSamplingRules' in url or 'SamplingTargets' in url:\n        return wrapped(*args, **kwargs)\n\n    headers = args[0].headers\n    # skip if the recorder is unable to open the subsegment\n    # for the outgoing request\n    subsegment = None\n    try:\n        subsegment = xray_recorder.current_subsegment()\n    except SegmentNotFoundException:\n        pass\n    if subsegment:\n        inject_trace_header(headers, subsegment)\n    return wrapped(*args, **kwargs)\n\n\ndef aws_meta_processor(wrapped, instance, args, kwargs,\n                       return_value, exception, subsegment, stack):\n    region = instance.meta.region_name\n\n    if 'operation_name' in kwargs:\n        operation_name = kwargs['operation_name']\n    else:\n        operation_name = args[0]\n\n    aws_meta = {\n        'operation': operation_name,\n        'region': region,\n    }\n\n    if return_value:\n        resp_meta = return_value.get('ResponseMetadata')\n        if resp_meta:\n            aws_meta['request_id'] = resp_meta.get('RequestId')\n            subsegment.put_http_meta(http.STATUS,\n                                     resp_meta.get('HTTPStatusCode'))\n            # for service like S3 that returns special request id in response headers\n            if 'HTTPHeaders' in resp_meta and resp_meta['HTTPHeaders'].get('x-amz-id-2'):\n                aws_meta['id_2'] = resp_meta['HTTPHeaders']['x-amz-id-2']\n\n    elif exception:\n        _aws_error_handler(exception, stack, subsegment, aws_meta)\n\n    _extract_whitelisted_params(subsegment.name, operation_name,\n                                aws_meta, args, kwargs, return_value)\n\n    subsegment.set_aws(aws_meta)\n\n\ndef _aws_error_handler(exception, stack, subsegment, aws_meta):\n\n    if not exception or not isinstance(exception, ClientError):\n        return\n\n    response_metadata = exception.response.get('ResponseMetadata')\n\n    if not response_metadata:\n        return\n\n    aws_meta['request_id'] = response_metadata.get('RequestId')\n\n    status_code = response_metadata.get('HTTPStatusCode')\n\n    subsegment.put_http_meta(http.STATUS, status_code)\n    subsegment.add_exception(exception, stack, True)\n\n\ndef _extract_whitelisted_params(service, operation,\n                                aws_meta, args, kwargs, response):\n\n    # check if service is whitelisted\n    if service not in whitelist['services']:\n        return\n    operations = whitelist['services'][service]['operations']\n\n    # check if operation is whitelisted\n    if operation not in operations:\n        return\n    params = operations[operation]\n\n    # record whitelisted request/response parameters\n    if 'request_parameters' in params:\n        _record_params(params['request_parameters'], args[1], aws_meta)\n\n    if 'request_descriptors' in params:\n        _record_special_params(params['request_descriptors'],\n                               args[1], aws_meta)\n\n    if 'response_parameters' in params and response:\n        _record_params(params['response_parameters'], response, aws_meta)\n\n    if 'response_descriptors' in params and response:\n        _record_special_params(params['response_descriptors'],\n                               response, aws_meta)\n\n\ndef _record_params(whitelisted, actual, aws_meta):\n\n    for key in whitelisted:\n        if key in actual:\n            snake_key = to_snake_case(key)\n            aws_meta[snake_key] = actual[key]\n\n\ndef _record_special_params(whitelisted, actual, aws_meta):\n\n    for key in whitelisted:\n        if key in actual:\n            _process_descriptor(whitelisted[key], actual[key], aws_meta)\n\n\ndef _process_descriptor(descriptor, value, aws_meta):\n\n    # \"get_count\" = true\n    if 'get_count' in descriptor and descriptor['get_count']:\n        value = len(value)\n\n    # \"get_keys\" = true\n    if 'get_keys' in descriptor and descriptor['get_keys']:\n        value = value.keys()\n\n    aws_meta[descriptor['rename_to']] = value\n"
  },
  {
    "path": "aws_xray_sdk/ext/botocore/__init__.py",
    "content": "from .patch import patch\n\n__all__ = ['patch']\n"
  },
  {
    "path": "aws_xray_sdk/ext/botocore/patch.py",
    "content": "import wrapt\nimport botocore.client\n\nfrom aws_xray_sdk.core import xray_recorder\nfrom aws_xray_sdk.ext.boto_utils import inject_header, aws_meta_processor\n\n\ndef patch():\n    \"\"\"\n    Patch botocore client so it generates subsegments\n    when calling AWS services.\n    \"\"\"\n    if hasattr(botocore.client, '_xray_enabled'):\n        return\n    setattr(botocore.client, '_xray_enabled', True)\n\n    wrapt.wrap_function_wrapper(\n        'botocore.client',\n        'BaseClient._make_api_call',\n        _xray_traced_botocore,\n    )\n\n    wrapt.wrap_function_wrapper(\n        'botocore.endpoint',\n        'Endpoint.prepare_request',\n        inject_header,\n    )\n\n\ndef _xray_traced_botocore(wrapped, instance, args, kwargs):\n    service = instance._service_model.metadata[\"endpointPrefix\"]\n    if service == 'xray':\n        # skip tracing for SDK built-in sampling pollers\n        if ('GetSamplingRules' in args or\n            'GetSamplingTargets' in args or\n                'PutTraceSegments' in args):\n            return wrapped(*args, **kwargs)\n    return xray_recorder.record_subsegment(\n        wrapped, instance, args, kwargs,\n        name=service,\n        namespace='aws',\n        meta_processor=aws_meta_processor,\n    )\n"
  },
  {
    "path": "aws_xray_sdk/ext/bottle/__init__.py",
    "content": ""
  },
  {
    "path": "aws_xray_sdk/ext/bottle/middleware.py",
    "content": "from bottle import request, response, SimpleTemplate\n\nfrom aws_xray_sdk.core.lambda_launcher import check_in_lambda, LambdaContext\nfrom aws_xray_sdk.core.models import http\nfrom aws_xray_sdk.core.utils import stacktrace\nfrom aws_xray_sdk.ext.util import calculate_sampling_decision, \\\n    calculate_segment_name, construct_xray_header, prepare_response_header\n\n\nclass XRayMiddleware:\n    \"\"\"\n    Middleware that wraps each incoming request to a segment.\n    \"\"\"\n    name = 'xray'\n    api = 2\n\n    def __init__(self, recorder):\n        self._recorder = recorder\n        self._in_lambda_ctx = False\n\n        if check_in_lambda() and type(self._recorder.context) == LambdaContext:\n            self._in_lambda_ctx = True\n\n        _patch_render(recorder)\n\n    def apply(self, callback, route):\n        \"\"\"\n        Apply middleware directly to each route callback.\n        \"\"\"\n        def wrapper(*a, **ka):\n            headers = request.headers\n            xray_header = construct_xray_header(headers)\n            name = calculate_segment_name(request.urlparts[1], self._recorder)\n\n            sampling_req = {\n               'host': request.urlparts[1],\n               'method': request.method,\n               'path': request.path,\n               'service': name,\n            }\n            sampling_decision = calculate_sampling_decision(\n               trace_header=xray_header,\n               recorder=self._recorder,\n               sampling_req=sampling_req,\n            )\n\n            if self._in_lambda_ctx:\n                segment = self._recorder.begin_subsegment(name)\n            else:\n                segment = self._recorder.begin_segment(\n                    name=name,\n                    traceid=xray_header.root,\n                    parent_id=xray_header.parent,\n                    sampling=sampling_decision,\n                )\n\n            segment.save_origin_trace_header(xray_header)\n            segment.put_http_meta(http.URL, request.url)\n            segment.put_http_meta(http.METHOD, request.method)\n            segment.put_http_meta(http.USER_AGENT, headers.get('User-Agent'))\n\n            client_ip = request.environ.get('HTTP_X_FORWARDED_FOR') or request.environ.get('REMOTE_ADDR')\n            if client_ip:\n                segment.put_http_meta(http.CLIENT_IP, client_ip)\n                segment.put_http_meta(http.X_FORWARDED_FOR, True)\n            else:\n                segment.put_http_meta(http.CLIENT_IP, request.remote_addr)\n\n            try:\n                rv = callback(*a, **ka)\n            except Exception as resp:\n                segment.put_http_meta(http.STATUS, getattr(resp, 'status_code', 500))\n                stack = stacktrace.get_stacktrace(limit=self._recorder._max_trace_back)\n                segment.add_exception(resp, stack)\n                if self._in_lambda_ctx:\n                    self._recorder.end_subsegment()\n                else:\n                    self._recorder.end_segment()\n\n                raise resp\n\n            segment.put_http_meta(http.STATUS, response.status_code)\n\n            origin_header = segment.get_origin_trace_header()\n            resp_header_str = prepare_response_header(origin_header, segment)\n            response.set_header(http.XRAY_HEADER, resp_header_str)\n\n            cont_len = response.headers.get('Content-Length')\n            if cont_len:\n                segment.put_http_meta(http.CONTENT_LENGTH, int(cont_len))\n\n            if self._in_lambda_ctx:\n                self._recorder.end_subsegment()\n            else:\n                self._recorder.end_segment()\n\n            return rv\n\n        return wrapper\n\ndef _patch_render(recorder):\n\n    _render = SimpleTemplate.render\n\n    @recorder.capture('template_render')\n    def _traced_render(self, *args, **kwargs):\n        if self.filename:\n            recorder.current_subsegment().name = self.filename\n        return _render(self, *args, **kwargs)\n\n    SimpleTemplate.render = _traced_render\n"
  },
  {
    "path": "aws_xray_sdk/ext/dbapi2.py",
    "content": "import copy\nimport wrapt\n\nfrom aws_xray_sdk.core import xray_recorder\n\n\nclass XRayTracedConn(wrapt.ObjectProxy):\n\n    _xray_meta = None\n\n    def __init__(self, conn, meta={}):\n\n        super().__init__(conn)\n        self._xray_meta = meta\n\n    def cursor(self, *args, **kwargs):\n\n        cursor = self.__wrapped__.cursor(*args, **kwargs)\n        return XRayTracedCursor(cursor, self._xray_meta)\n\n\nclass XRayTracedCursor(wrapt.ObjectProxy):\n\n    _xray_meta = None\n\n    def __init__(self, cursor, meta={}):\n\n        super().__init__(cursor)\n        self._xray_meta = meta\n\n        # we preset database type if db is framework built-in\n        if not self._xray_meta.get('database_type'):\n            db_type = cursor.__class__.__module__.split('.')[0]\n            self._xray_meta['database_type'] = db_type\n\n    def __enter__(self):\n\n        value = self.__wrapped__.__enter__()\n        if value is not self.__wrapped__:\n            return value\n        return self\n\n    @xray_recorder.capture()\n    def execute(self, query, *args, **kwargs):\n\n        add_sql_meta(self._xray_meta)\n        return self.__wrapped__.execute(query, *args, **kwargs)\n\n    @xray_recorder.capture()\n    def executemany(self, query, *args, **kwargs):\n\n        add_sql_meta(self._xray_meta)\n        return self.__wrapped__.executemany(query, *args, **kwargs)\n\n    @xray_recorder.capture()\n    def callproc(self, proc, args):\n\n        add_sql_meta(self._xray_meta)\n        return self.__wrapped__.callproc(proc, args)\n\n\ndef add_sql_meta(meta):\n\n    subsegment = xray_recorder.current_subsegment()\n\n    if not subsegment:\n        return\n\n    if meta.get('name', None):\n        subsegment.name = meta['name']\n\n    sql_meta = copy.copy(meta)\n    if sql_meta.get('name', None):\n        del sql_meta['name']\n    subsegment.set_sql(sql_meta)\n    subsegment.namespace = 'remote'\n"
  },
  {
    "path": "aws_xray_sdk/ext/django/__init__.py",
    "content": "default_app_config = 'aws_xray_sdk.ext.django.apps.XRayConfig'\n"
  },
  {
    "path": "aws_xray_sdk/ext/django/apps.py",
    "content": "import logging\n\nfrom django.apps import AppConfig\n\nfrom .conf import settings\nfrom .db import patch_db\nfrom .templates import patch_template\nfrom aws_xray_sdk.core import patch, xray_recorder\nfrom aws_xray_sdk.core.exceptions.exceptions import SegmentNameMissingException\n\n\nlog = logging.getLogger(__name__)\n\n\nclass XRayConfig(AppConfig):\n    name = 'aws_xray_sdk.ext.django'\n\n    def ready(self):\n        \"\"\"\n        Configure global XRay recorder based on django settings\n        under XRAY_RECORDER namespace.\n        This method could be called twice during server startup\n        because of base command and reload command.\n        So this function must be idempotent\n        \"\"\"\n        if not settings.AWS_XRAY_TRACING_NAME:\n            raise SegmentNameMissingException('Segment name is required.')\n\n        xray_recorder.configure(\n            daemon_address=settings.AWS_XRAY_DAEMON_ADDRESS,\n            sampling=settings.SAMPLING,\n            sampling_rules=settings.SAMPLING_RULES,\n            sampler=settings.SAMPLER,\n            context_missing=settings.AWS_XRAY_CONTEXT_MISSING,\n            plugins=settings.PLUGINS,\n            service=settings.AWS_XRAY_TRACING_NAME,\n            dynamic_naming=settings.DYNAMIC_NAMING,\n            streaming_threshold=settings.STREAMING_THRESHOLD,\n            max_trace_back=settings.MAX_TRACE_BACK,\n            stream_sql=settings.STREAM_SQL,\n        )\n\n        if settings.PATCH_MODULES:\n            if settings.AUTO_PATCH_PARENT_SEGMENT_NAME is not None:\n                with xray_recorder.in_segment(settings.AUTO_PATCH_PARENT_SEGMENT_NAME):\n                    patch(settings.PATCH_MODULES, ignore_module_patterns=settings.IGNORE_MODULE_PATTERNS)\n            else:\n                patch(settings.PATCH_MODULES, ignore_module_patterns=settings.IGNORE_MODULE_PATTERNS)\n\n        # if turned on subsegment will be generated on\n        # built-in database and template rendering\n        if settings.AUTO_INSTRUMENT:\n            try:\n                patch_db()\n            except Exception:\n                log.debug('failed to patch Django built-in database')\n            try:\n                patch_template()\n            except Exception:\n                log.debug('failed to patch Django built-in template engine')\n"
  },
  {
    "path": "aws_xray_sdk/ext/django/conf.py",
    "content": "import os\n\nfrom django.conf import settings as django_settings\nfrom django.test.signals import setting_changed\n\nDEFAULTS = {\n    'AWS_XRAY_DAEMON_ADDRESS': '127.0.0.1:2000',\n    'AUTO_INSTRUMENT': True,\n    'AWS_XRAY_CONTEXT_MISSING': 'LOG_ERROR',\n    'PLUGINS': (),\n    'SAMPLING': True,\n    'SAMPLING_RULES': None,\n    'SAMPLER': None,\n    'AWS_XRAY_TRACING_NAME': None,\n    'DYNAMIC_NAMING': None,\n    'STREAMING_THRESHOLD': None,\n    'MAX_TRACE_BACK': None,\n    'STREAM_SQL': True,\n    'PATCH_MODULES': [],\n    'AUTO_PATCH_PARENT_SEGMENT_NAME': None,\n    'IGNORE_MODULE_PATTERNS': [],\n    'URLS_AS_ANNOTATION': 'LAMBDA',  # 3 valid values, NONE -> don't ever, LAMBDA -> only for AWS Lambdas, ALL -> every time  \n}\n\nXRAY_NAMESPACE = 'XRAY_RECORDER'\n\nSUPPORTED_ENV_VARS = ('AWS_XRAY_DAEMON_ADDRESS',\n                      'AWS_XRAY_CONTEXT_MISSING',\n                      'AWS_XRAY_TRACING_NAME',\n                      )\n\n\nclass XRaySettings:\n    \"\"\"\n    A object of Django settings to easily modify certain fields.\n    The precedence for configurations at different places is as follows:\n    environment variables > user settings in settings.py > default settings\n    \"\"\"\n    def __init__(self, user_settings=None):\n\n        self.defaults = DEFAULTS\n\n        if user_settings:\n            self._user_settings = user_settings\n\n    @property\n    def user_settings(self):\n\n        if not hasattr(self, '_user_settings'):\n            self._user_settings = getattr(django_settings, XRAY_NAMESPACE, {})\n\n        return self._user_settings\n\n    def __getattr__(self, attr):\n\n        if attr not in self.defaults:\n            raise AttributeError('Invalid setting: %s' % attr)\n\n        if self.user_settings.get(attr, None) is not None:\n            if attr in SUPPORTED_ENV_VARS:\n                return os.getenv(attr, self.user_settings[attr])\n            else:\n                return self.user_settings[attr]\n        elif attr in SUPPORTED_ENV_VARS:\n            return os.getenv(attr, self.defaults[attr])\n        else:\n            return self.defaults[attr]\n\n\nsettings = XRaySettings()\n\n\ndef reload_settings(*args, **kwargs):\n    \"\"\"\n    Reload X-Ray user settings upon Django server hot restart\n    \"\"\"\n    global settings\n    setting, value = kwargs['setting'], kwargs['value']\n    if setting == XRAY_NAMESPACE:\n        settings = XRaySettings(value)\n\n\nsetting_changed.connect(reload_settings)\n"
  },
  {
    "path": "aws_xray_sdk/ext/django/db.py",
    "content": "import copy\nimport logging\nimport importlib\n\nfrom django.db import connections\n\nfrom aws_xray_sdk.core import xray_recorder\nfrom aws_xray_sdk.ext.dbapi2 import XRayTracedCursor\n\nlog = logging.getLogger(__name__)\n\n\ndef patch_db():\n    for conn in connections.all():\n        module = importlib.import_module(conn.__module__)\n        _patch_conn(getattr(module, conn.__class__.__name__))\n\n\nclass DjangoXRayTracedCursor(XRayTracedCursor):\n    def execute(self, query, *args, **kwargs):\n        if xray_recorder.stream_sql:\n            _previous_meta = copy.copy(self._xray_meta)\n            self._xray_meta['sanitized_query'] = query\n        result = super().execute(query, *args, **kwargs)\n        if xray_recorder.stream_sql:\n            self._xray_meta = _previous_meta\n        return result\n\n    def executemany(self, query, *args, **kwargs):\n        if xray_recorder.stream_sql:\n            _previous_meta = copy.copy(self._xray_meta)\n            self._xray_meta['sanitized_query'] = query\n        result = super().executemany(query, *args, **kwargs)\n        if xray_recorder.stream_sql:\n            self._xray_meta = _previous_meta\n        return result\n\n    def callproc(self, proc, args):\n        if xray_recorder.stream_sql:\n            _previous_meta = copy.copy(self._xray_meta)\n            self._xray_meta['sanitized_query'] = proc\n        result = super().callproc(proc, args)\n        if xray_recorder.stream_sql:\n            self._xray_meta = _previous_meta\n        return result\n\n\ndef _patch_cursor(cursor_name, conn):\n    attr = '_xray_original_{}'.format(cursor_name)\n\n    if hasattr(conn, attr):\n        log.debug('django built-in db {} already patched'.format(cursor_name))\n        return\n\n    if not hasattr(conn, cursor_name):\n        log.debug('django built-in db does not have {}'.format(cursor_name))\n        return\n\n    setattr(conn, attr, getattr(conn, cursor_name))\n\n    meta = {}\n\n    if hasattr(conn, 'vendor'):\n        meta['database_type'] = conn.vendor\n\n    def cursor(self, *args, **kwargs):\n\n        host = None\n        user = None\n\n        if hasattr(self, 'settings_dict'):\n            settings = self.settings_dict\n            host = settings.get('HOST', None)\n            user = settings.get('USER', None)\n\n        if host:\n            meta['name'] = host\n        if user:\n            meta['user'] = user\n\n        original_cursor = getattr(self, attr)(*args, **kwargs)\n        return DjangoXRayTracedCursor(original_cursor, meta)\n\n    setattr(conn, cursor_name, cursor)\n\n\ndef _patch_conn(conn):\n    _patch_cursor('cursor', conn)\n    _patch_cursor('chunked_cursor', conn)\n"
  },
  {
    "path": "aws_xray_sdk/ext/django/middleware.py",
    "content": "import logging\nfrom .conf import settings\n\nfrom aws_xray_sdk.core import xray_recorder\nfrom aws_xray_sdk.core.models import http\nfrom aws_xray_sdk.core.utils import stacktrace\nfrom aws_xray_sdk.ext.util import calculate_sampling_decision, \\\n    calculate_segment_name, construct_xray_header, prepare_response_header\nfrom aws_xray_sdk.core.lambda_launcher import check_in_lambda, LambdaContext\n\n\nlog = logging.getLogger(__name__)\n\n# Django will rewrite some http request headers.\nUSER_AGENT_KEY = 'HTTP_USER_AGENT'\nX_FORWARDED_KEY = 'HTTP_X_FORWARDED_FOR'\nREMOTE_ADDR_KEY = 'REMOTE_ADDR'\nHOST_KEY = 'HTTP_HOST'\nCONTENT_LENGTH_KEY = 'content-length'\n\n\nclass XRayMiddleware:\n    \"\"\"\n    Middleware that wraps each incoming request to a segment.\n    \"\"\"\n    def __init__(self, get_response):\n\n        self.get_response = get_response\n        self.in_lambda_ctx = False\n\n        if check_in_lambda() and type(xray_recorder.context) == LambdaContext:\n            self.in_lambda_ctx = True\n\n    def _urls_as_annotation(self):\n        if settings.URLS_AS_ANNOTATION == \"LAMBDA\" and self.in_lambda_ctx:\n            return True\n        elif settings.URLS_AS_ANNOTATION == \"ALL\":\n            return True\n        return False\n\n\n    # hooks for django version >= 1.10\n    def __call__(self, request):\n\n        sampling_decision = None\n        meta = request.META\n        xray_header = construct_xray_header(meta)\n        # a segment name is required\n        name = calculate_segment_name(meta.get(HOST_KEY), xray_recorder)\n\n        sampling_req = {\n            'host': meta.get(HOST_KEY),\n            'method': request.method,\n            'path': request.path,\n            'service': name,\n        }\n        sampling_decision = calculate_sampling_decision(\n            trace_header=xray_header,\n            recorder=xray_recorder,\n            sampling_req=sampling_req,\n        )\n        if self.in_lambda_ctx:\n            segment = xray_recorder.begin_subsegment(name)\n            # X-Ray can't search/filter subsegments on URL but it can search annotations\n            # So for lambda to be able to filter by annotation we add these as annotations\n        else:\n            segment = xray_recorder.begin_segment(\n                name=name,\n                traceid=xray_header.root,\n                parent_id=xray_header.parent,\n                sampling=sampling_decision,\n            )\n\n        segment.save_origin_trace_header(xray_header)\n        segment.put_http_meta(http.URL, request.build_absolute_uri())\n        segment.put_http_meta(http.METHOD, request.method)\n        if self._urls_as_annotation():\n            segment.put_annotation(http.URL, request.build_absolute_uri())\n            segment.put_annotation(http.METHOD, request.method)\n\n        if meta.get(USER_AGENT_KEY):\n            segment.put_http_meta(http.USER_AGENT, meta.get(USER_AGENT_KEY))\n            if self._urls_as_annotation():\n                segment.put_annotation(http.USER_AGENT, meta.get(USER_AGENT_KEY))\n        if meta.get(X_FORWARDED_KEY):\n            # X_FORWARDED_FOR may come from untrusted source so we\n            # need to set the flag to true as additional information\n            segment.put_http_meta(http.CLIENT_IP, meta.get(X_FORWARDED_KEY))\n            segment.put_http_meta(http.X_FORWARDED_FOR, True)\n            if self._urls_as_annotation():\n                segment.put_annotation(http.CLIENT_IP, meta.get(X_FORWARDED_KEY))\n                segment.put_annotation(http.X_FORWARDED_FOR, True)\n        elif meta.get(REMOTE_ADDR_KEY):\n            segment.put_http_meta(http.CLIENT_IP, meta.get(REMOTE_ADDR_KEY))\n            if self._urls_as_annotation():\n                segment.put_annotation(http.CLIENT_IP, meta.get(REMOTE_ADDR_KEY))\n\n        response = self.get_response(request)\n        segment.put_http_meta(http.STATUS, response.status_code)\n        if self._urls_as_annotation():\n            segment.put_annotation(http.STATUS, response.status_code)\n\n        if response.has_header(CONTENT_LENGTH_KEY):\n            length = int(response[CONTENT_LENGTH_KEY])\n            segment.put_http_meta(http.CONTENT_LENGTH, length)\n            if self._urls_as_annotation():\n                segment.put_annotation(http.CONTENT_LENGTH, length)\n        response[http.XRAY_HEADER] = prepare_response_header(xray_header, segment)\n\n        if self.in_lambda_ctx:\n            xray_recorder.end_subsegment()\n        else:\n            xray_recorder.end_segment()\n\n        return response\n\n    def process_exception(self, request, exception):\n        \"\"\"\n        Add exception information and fault flag to the\n        current segment.\n        \"\"\"\n        if self.in_lambda_ctx:\n            segment = xray_recorder.current_subsegment()\n        else:\n            segment = xray_recorder.current_segment()\n        segment.put_http_meta(http.STATUS, 500)\n\n        stack = stacktrace.get_stacktrace(limit=xray_recorder._max_trace_back)\n        segment.add_exception(exception, stack)\n"
  },
  {
    "path": "aws_xray_sdk/ext/django/templates.py",
    "content": "import logging\n\nfrom django.template import Template\nfrom django.utils.safestring import SafeString\n\nfrom aws_xray_sdk.core import xray_recorder\n\nlog = logging.getLogger(__name__)\n\n\ndef patch_template():\n\n    attr = '_xray_original_render'\n\n    if getattr(Template, attr, None):\n        log.debug(\"already patched\")\n        return\n\n    setattr(Template, attr, Template.render)\n\n    @xray_recorder.capture('template_render')\n    def xray_render(self, context):\n        template_name = self.name or getattr(context, 'template_name', None)\n        if template_name:\n            name = str(template_name)\n            # SafeString are not properly serialized by jsonpickle,\n            # turn them back to str by adding a non-safe str.\n            if isinstance(name, SafeString):\n                name += ''\n            subsegment = xray_recorder.current_subsegment()\n            if subsegment:\n                subsegment.name = name\n\n        return Template._xray_original_render(self, context)\n\n    Template.render = xray_render\n"
  },
  {
    "path": "aws_xray_sdk/ext/flask/__init__.py",
    "content": ""
  },
  {
    "path": "aws_xray_sdk/ext/flask/middleware.py",
    "content": "import flask.templating\nfrom flask import request\n\nfrom aws_xray_sdk.core.models import http\nfrom aws_xray_sdk.core.utils import stacktrace\nfrom aws_xray_sdk.ext.util import calculate_sampling_decision, \\\n    calculate_segment_name, construct_xray_header, prepare_response_header\nfrom aws_xray_sdk.core.lambda_launcher import check_in_lambda, LambdaContext\n\n\nclass XRayMiddleware:\n\n    def __init__(self, app, recorder):\n        self.app = app\n        self.app.logger.info(\"initializing xray middleware\")\n\n        self._recorder = recorder\n        self.app.before_request(self._before_request)\n        self.app.after_request(self._after_request)\n        self.app.teardown_request(self._teardown_request)\n        self.in_lambda_ctx = False\n\n        if check_in_lambda() and type(self._recorder.context) == LambdaContext:\n            self.in_lambda_ctx = True\n\n        _patch_render(recorder)\n\n    def _before_request(self):\n        headers = request.headers\n        xray_header = construct_xray_header(headers)\n        req = request._get_current_object()\n\n        name = calculate_segment_name(req.host, self._recorder)\n\n        sampling_req = {\n            'host': req.host,\n            'method': req.method,\n            'path': req.path,\n            'service': name,\n        }\n        sampling_decision = calculate_sampling_decision(\n            trace_header=xray_header,\n            recorder=self._recorder,\n            sampling_req=sampling_req,\n        )\n\n        if self.in_lambda_ctx:\n            segment = self._recorder.begin_subsegment(name)\n        else:\n            segment = self._recorder.begin_segment(\n                name=name,\n                traceid=xray_header.root,\n                parent_id=xray_header.parent,\n                sampling=sampling_decision,\n            )\n\n        segment.save_origin_trace_header(xray_header)\n        segment.put_http_meta(http.URL, req.base_url)\n        segment.put_http_meta(http.METHOD, req.method)\n        segment.put_http_meta(http.USER_AGENT, headers.get('User-Agent'))\n\n        client_ip = headers.get('X-Forwarded-For') or headers.get('HTTP_X_FORWARDED_FOR')\n        if client_ip:\n            segment.put_http_meta(http.CLIENT_IP, client_ip)\n            segment.put_http_meta(http.X_FORWARDED_FOR, True)\n        else:\n            segment.put_http_meta(http.CLIENT_IP, req.remote_addr)\n\n    def _after_request(self, response):\n        if self.in_lambda_ctx:\n            segment = self._recorder.current_subsegment()\n        else:\n            segment = self._recorder.current_segment()\n        segment.put_http_meta(http.STATUS, response.status_code)\n\n        origin_header = segment.get_origin_trace_header()\n        resp_header_str = prepare_response_header(origin_header, segment)\n        response.headers[http.XRAY_HEADER] = resp_header_str\n\n        cont_len = response.headers.get('Content-Length')\n        if cont_len:\n            segment.put_http_meta(http.CONTENT_LENGTH, int(cont_len))\n\n        return response\n\n    def _teardown_request(self, exception):\n        segment = None\n        try:\n            if self.in_lambda_ctx:\n                segment = self._recorder.current_subsegment()\n            else:\n                segment = self._recorder.current_segment()\n        except Exception:\n            pass\n        if not segment:\n            return\n\n        if exception:\n            segment.put_http_meta(http.STATUS, 500)\n            stack = stacktrace.get_stacktrace(limit=self._recorder._max_trace_back)\n            segment.add_exception(exception, stack)\n\n        if self.in_lambda_ctx:\n            self._recorder.end_subsegment()\n        else:\n            self._recorder.end_segment()\n\n\ndef _patch_render(recorder):\n\n    _render = flask.templating._render\n\n    @recorder.capture('template_render')\n    def _traced_render(template, context, app):\n        if template.name:\n            recorder.current_subsegment().name = template.name\n        return _render(template, context, app)\n\n    flask.templating._render = _traced_render\n"
  },
  {
    "path": "aws_xray_sdk/ext/flask_sqlalchemy/__init__.py",
    "content": ""
  },
  {
    "path": "aws_xray_sdk/ext/flask_sqlalchemy/query.py",
    "content": "from builtins import super\nfrom flask_sqlalchemy.model import Model\nfrom sqlalchemy.orm.session import sessionmaker\nfrom flask_sqlalchemy import SQLAlchemy, BaseQuery, _SessionSignalEvents, get_state\nfrom aws_xray_sdk.ext.sqlalchemy.query import XRaySession, XRayQuery\nfrom aws_xray_sdk.ext.sqlalchemy.util.decorators import xray_on_call, decorate_all_functions\n\n\n@decorate_all_functions(xray_on_call)\nclass XRayBaseQuery(BaseQuery):\n    BaseQuery.__bases__ = (XRayQuery,)\n\n\nclass XRaySignallingSession(XRaySession):\n    \"\"\"\n    .. versionadded:: 2.0\n    .. versionadded:: 2.1\n\n    The signalling session is the default session that Flask-SQLAlchemy\n    uses. It extends the default session system with bind selection and\n    modification tracking.\n    If you want to use a different session you can override the\n    :meth:`SQLAlchemy.create_session` function.\n    The `binds` option was added, which allows a session to be joined\n    to an external transaction.\n    \"\"\"\n    def __init__(self, db, autocommit=False, autoflush=True, **options):\n        #: The application that this session belongs to.\n        self.app = app = db.get_app()\n        track_modifications = app.config['SQLALCHEMY_TRACK_MODIFICATIONS']\n        bind = options.pop('bind', None) or db.engine\n        binds = options.pop('binds', db.get_binds(app))\n\n        if track_modifications is None or track_modifications:\n            _SessionSignalEvents.register(self)\n\n        XRaySession.__init__(\n            self, autocommit=autocommit, autoflush=autoflush,\n            bind=bind, binds=binds, **options\n        )\n\n    def get_bind(self, mapper=None, clause=None):\n        # mapper is None if someone tries to just get a connection\n        if mapper is not None:\n            info = getattr(mapper.mapped_table, 'info', {})\n            bind_key = info.get('bind_key')\n            if bind_key is not None:\n                state = get_state(self.app)\n                return state.db.get_engine(self.app, bind=bind_key)\n        return XRaySession.get_bind(self, mapper, clause)\n\n\nclass XRayFlaskSqlAlchemy(SQLAlchemy):\n    def __init__(self, app=None, use_native_unicode=True, session_options=None,\n                 metadata=None, query_class=XRayBaseQuery, model_class=Model):\n        super().__init__(app, use_native_unicode, session_options,\n                         metadata, query_class, model_class)\n\n    def create_session(self, options):\n        return sessionmaker(class_=XRaySignallingSession, db=self, **options)\n"
  },
  {
    "path": "aws_xray_sdk/ext/httplib/__init__.py",
    "content": "from .patch import patch, unpatch, add_ignored, reset_ignored\n\n__all__ = ['patch', 'unpatch', 'add_ignored', 'reset_ignored']\n"
  },
  {
    "path": "aws_xray_sdk/ext/httplib/patch.py",
    "content": "import fnmatch\nfrom collections import namedtuple\n\nimport urllib3.connection\nimport wrapt\n\nfrom aws_xray_sdk.core import xray_recorder\nfrom aws_xray_sdk.core.exceptions.exceptions import SegmentNotFoundException\nfrom aws_xray_sdk.core.models import http\nfrom aws_xray_sdk.core.patcher import _PATCHED_MODULES\nfrom aws_xray_sdk.ext.util import get_hostname, inject_trace_header, strip_url, unwrap\n\nhttplib_client_module = 'http.client'\nimport http.client as httplib\n\n_XRAY_PROP = '_xray_prop'\n_XRay_Data = namedtuple('xray_data', ['method', 'host', 'url'])\n_XRay_Ignore = namedtuple('xray_ignore', ['subclass', 'hostname', 'urls'])\n# A flag indicates whether this module is X-Ray patched or not\nPATCH_FLAG = '__xray_patched'\n# Calls that should be ignored\n_XRAY_IGNORE = set()\n\n\ndef add_ignored(subclass=None, hostname=None, urls=None):\n    global _XRAY_IGNORE\n    if subclass is not None or hostname is not None or urls is not None:\n        urls = urls if urls is None else tuple(urls)\n        _XRAY_IGNORE.add(_XRay_Ignore(subclass=subclass, hostname=hostname, urls=urls))\n\n\ndef reset_ignored():\n    global _XRAY_IGNORE\n    _XRAY_IGNORE.clear()\n    _ignored_add_default()\n\n\ndef _ignored_add_default():\n    # skip httplib tracing for SDK built-in centralized sampling pollers\n    add_ignored(subclass='botocore.awsrequest.AWSHTTPConnection', urls=['/GetSamplingRules', '/SamplingTargets'])\n\n\n# make sure we have the default rules\n_ignored_add_default()\n\n\ndef http_response_processor(wrapped, instance, args, kwargs, return_value,\n                            exception, subsegment, stack):\n    xray_data = getattr(instance, _XRAY_PROP, None)\n    if not xray_data:\n        return\n\n    subsegment.put_http_meta(http.METHOD, xray_data.method)\n    subsegment.put_http_meta(http.URL, strip_url(xray_data.url))\n\n    if return_value:\n        subsegment.put_http_meta(http.STATUS, return_value.status)\n\n        # propagate to response object\n        xray_data = _XRay_Data('READ', xray_data.host, xray_data.url)\n        setattr(return_value, _XRAY_PROP, xray_data)\n\n    if exception:\n        subsegment.add_exception(exception, stack)\n\n\ndef _xray_traced_http_getresponse(wrapped, instance, args, kwargs):\n    xray_data = getattr(instance, _XRAY_PROP, None)\n    if not xray_data:\n        return wrapped(*args, **kwargs)\n\n    return xray_recorder.record_subsegment(\n        wrapped, instance, args, kwargs,\n        name=get_hostname(xray_data.url),\n        namespace='remote',\n        meta_processor=http_response_processor,\n    )\n\n\ndef http_send_request_processor(wrapped, instance, args, kwargs, return_value,\n                                exception, subsegment, stack):\n    xray_data = getattr(instance, _XRAY_PROP, None)\n    if not xray_data:\n        return\n\n    # we don't delete the attr as we can have multiple reads\n    subsegment.put_http_meta(http.METHOD, xray_data.method)\n    subsegment.put_http_meta(http.URL, strip_url(xray_data.url))\n\n    if exception:\n        subsegment.add_exception(exception, stack)\n\n\ndef _ignore_request(instance, hostname, url):\n    global _XRAY_IGNORE\n    module = instance.__class__.__module__\n    if module is None or module == str.__class__.__module__:\n        subclass = instance.__class__.__name__\n    else:\n        subclass = module + '.' + instance.__class__.__name__\n    for rule in _XRAY_IGNORE:\n        subclass_match = subclass == rule.subclass if rule.subclass is not None else True\n        host_match = fnmatch.fnmatch(hostname, rule.hostname) if rule.hostname is not None else True\n        url_match = url in rule.urls if rule.urls is not None else True\n        if url_match and host_match and subclass_match:\n            return True\n    return False\n\n\ndef _send_request(wrapped, instance, args, kwargs):\n    def decompose_args(method, url, body, headers, encode_chunked=False):\n        # skip any ignored requests\n        if _ignore_request(instance, instance.host, url):\n            return wrapped(*args, **kwargs)\n\n        # Only injects headers when the subsegment for the outgoing\n        # calls are opened successfully.\n        subsegment = None\n        try:\n            subsegment = xray_recorder.current_subsegment()\n        except SegmentNotFoundException:\n            pass\n        if subsegment:\n            inject_trace_header(headers, subsegment)\n\n        if issubclass(instance.__class__, urllib3.connection.HTTPSConnection):\n            ssl_cxt = getattr(instance, 'ssl_context', None)\n        elif issubclass(instance.__class__, httplib.HTTPSConnection):\n            ssl_cxt = getattr(instance, '_context', None)\n        else:\n            # In this case, the patcher can't determine which module the connection instance is from.\n            # We default to it to check ssl_context but may be None so that the default scheme would be\n            # (and may falsely be) http.\n            ssl_cxt = getattr(instance, 'ssl_context', None)\n        scheme = 'https' if ssl_cxt and type(ssl_cxt).__name__ == 'SSLContext' else 'http'\n        xray_url = '{}://{}{}'.format(scheme, instance.host, url)\n        xray_data = _XRay_Data(method, instance.host, xray_url)\n        setattr(instance, _XRAY_PROP, xray_data)\n\n        # we add a segment here in case connect fails\n        return xray_recorder.record_subsegment(\n            wrapped, instance, args, kwargs,\n            name=get_hostname(xray_data.url),\n            namespace='remote',\n            meta_processor=http_send_request_processor\n        )\n\n    return decompose_args(*args, **kwargs)\n\n\ndef http_read_processor(wrapped, instance, args, kwargs, return_value,\n                        exception, subsegment, stack):\n    xray_data = getattr(instance, _XRAY_PROP, None)\n    if not xray_data:\n        return\n\n    # we don't delete the attr as we can have multiple reads\n    subsegment.put_http_meta(http.METHOD, xray_data.method)\n    subsegment.put_http_meta(http.URL, strip_url(xray_data.url))\n    subsegment.put_http_meta(http.STATUS, instance.status)\n\n    if exception:\n        subsegment.add_exception(exception, stack)\n\n\ndef _xray_traced_http_client_read(wrapped, instance, args, kwargs):\n    xray_data = getattr(instance, _XRAY_PROP, None)\n    if not xray_data:\n        return wrapped(*args, **kwargs)\n\n    return xray_recorder.record_subsegment(\n        wrapped, instance, args, kwargs,\n        name=get_hostname(xray_data.url),\n        namespace='remote',\n        meta_processor=http_read_processor\n    )\n\n\ndef patch():\n    \"\"\"\n    patch the built-in `urllib/httplib/httplib.client` methods for tracing.\n    \"\"\"\n    if getattr(httplib, PATCH_FLAG, False):\n        return\n    # we set an attribute to avoid multiple wrapping\n    setattr(httplib, PATCH_FLAG, True)\n\n    wrapt.wrap_function_wrapper(\n        httplib_client_module,\n        'HTTPConnection._send_request',\n        _send_request\n    )\n\n    wrapt.wrap_function_wrapper(\n        httplib_client_module,\n        'HTTPConnection.getresponse',\n        _xray_traced_http_getresponse\n    )\n\n    wrapt.wrap_function_wrapper(\n        httplib_client_module,\n        'HTTPResponse.read',\n        _xray_traced_http_client_read\n    )\n\n\ndef unpatch():\n    \"\"\"\n    Unpatch any previously patched modules.\n    This operation is idempotent.\n    \"\"\"\n    _PATCHED_MODULES.discard('httplib')\n    setattr(httplib, PATCH_FLAG, False)\n    # _send_request encapsulates putrequest, putheader[s], and endheaders\n    unwrap(httplib.HTTPConnection, '_send_request')\n    unwrap(httplib.HTTPConnection, 'getresponse')\n    unwrap(httplib.HTTPResponse, 'read')\n"
  },
  {
    "path": "aws_xray_sdk/ext/httpx/__init__.py",
    "content": "from .patch import patch\n\n__all__ = ['patch']\n"
  },
  {
    "path": "aws_xray_sdk/ext/httpx/patch.py",
    "content": "import httpx\n\nfrom aws_xray_sdk.core import xray_recorder\nfrom aws_xray_sdk.core.models import http\nfrom aws_xray_sdk.ext.util import inject_trace_header, get_hostname\n\n\ndef patch():\n    httpx.Client = _InstrumentedClient\n    httpx.AsyncClient = _InstrumentedAsyncClient\n    httpx._api.Client = _InstrumentedClient\n\n\nclass _InstrumentedClient(httpx.Client):\n    def __init__(self, *args, **kwargs):\n        super().__init__(*args, **kwargs)\n\n        self._original_transport = self._transport\n        self._transport = SyncInstrumentedTransport(self._transport)\n\n\nclass _InstrumentedAsyncClient(httpx.AsyncClient):\n    def __init__(self, *args, **kwargs):\n        super().__init__(*args, **kwargs)\n\n        self._original_transport = self._transport\n        self._transport = AsyncInstrumentedTransport(self._transport)\n\n\nclass SyncInstrumentedTransport(httpx.BaseTransport):\n    def __init__(self, transport: httpx.BaseTransport):\n        self._wrapped_transport = transport\n\n    def handle_request(self, request: httpx.Request) -> httpx.Response:\n        with xray_recorder.in_subsegment(\n            get_hostname(str(request.url)), namespace=\"remote\"\n        ) as subsegment:\n            if subsegment is not None:\n                subsegment.put_http_meta(http.METHOD, request.method)\n                subsegment.put_http_meta(\n                    http.URL,\n                    str(request.url.copy_with(password=None, query=None, fragment=None)),\n                )\n                inject_trace_header(request.headers, subsegment)\n\n            response = self._wrapped_transport.handle_request(request)\n            if subsegment is not None:\n                subsegment.put_http_meta(http.STATUS, response.status_code)\n            return response\n\n\nclass AsyncInstrumentedTransport(httpx.AsyncBaseTransport):\n    def __init__(self, transport: httpx.AsyncBaseTransport):\n        self._wrapped_transport = transport\n\n    async def handle_async_request(self, request: httpx.Request) -> httpx.Response:\n        async with xray_recorder.in_subsegment_async(\n            get_hostname(str(request.url)), namespace=\"remote\"\n        ) as subsegment:\n            if subsegment is not None:\n                subsegment.put_http_meta(http.METHOD, request.method)\n                subsegment.put_http_meta(\n                    http.URL,\n                    str(request.url.copy_with(password=None, query=None, fragment=None)),\n                )\n                inject_trace_header(request.headers, subsegment)\n\n            response = await self._wrapped_transport.handle_async_request(request)\n            if subsegment is not None:\n                subsegment.put_http_meta(http.STATUS, response.status_code)\n            return response\n"
  },
  {
    "path": "aws_xray_sdk/ext/mysql/__init__.py",
    "content": "from .patch import patch\n\n\n__all__ = ['patch']\n"
  },
  {
    "path": "aws_xray_sdk/ext/mysql/patch.py",
    "content": "import wrapt\nimport mysql.connector\n\nfrom aws_xray_sdk.ext.dbapi2 import XRayTracedConn\n\n\nMYSQL_ATTR = {\n    '_host': 'name',\n    '_user': 'user',\n}\n\n\ndef patch():\n\n    wrapt.wrap_function_wrapper(\n        'mysql.connector',\n        'connect',\n        _xray_traced_connect\n    )\n\n    # patch alias\n    if hasattr(mysql.connector, 'Connect'):\n        mysql.connector.Connect = mysql.connector.connect\n\n\ndef _xray_traced_connect(wrapped, instance, args, kwargs):\n\n    conn = wrapped(*args, **kwargs)\n    meta = {}\n\n    for attr, key in MYSQL_ATTR.items():\n        if hasattr(conn, attr):\n            meta[key] = getattr(conn, attr)\n\n    if hasattr(conn, '_server_version'):\n        version = sanitize_db_ver(getattr(conn, '_server_version'))\n        if version:\n            meta['database_version'] = version\n\n    return XRayTracedConn(conn, meta)\n\n\ndef sanitize_db_ver(raw):\n\n    if not raw or not isinstance(raw, tuple):\n        return raw\n\n    return '.'.join(str(num) for num in raw)\n"
  },
  {
    "path": "aws_xray_sdk/ext/pg8000/README.md",
    "content": "## Requirements\n\nOnly compatible with `pg8000 <= 1.20.0`."
  },
  {
    "path": "aws_xray_sdk/ext/pg8000/__init__.py",
    "content": "from .patch import patch, unpatch\n\n\n__all__ = ['patch', 'unpatch']\n"
  },
  {
    "path": "aws_xray_sdk/ext/pg8000/patch.py",
    "content": "import pg8000\nimport wrapt\n\nfrom aws_xray_sdk.ext.dbapi2 import XRayTracedConn\nfrom aws_xray_sdk.core.patcher import _PATCHED_MODULES\nfrom aws_xray_sdk.ext.util import unwrap\n\n\ndef patch():\n\n    wrapt.wrap_function_wrapper(\n        'pg8000',\n        'connect',\n        _xray_traced_connect\n    )\n\n\ndef _xray_traced_connect(wrapped, instance, args, kwargs):\n\n    conn = wrapped(*args, **kwargs)\n    meta = {\n        'database_type': 'PostgreSQL',\n        'user': conn.user.decode('utf-8'),\n        'driver_version': 'Pg8000'\n    }\n\n    if hasattr(conn, '_server_version'):\n        version = getattr(conn, '_server_version')\n        if version:\n            meta['database_version'] = str(version)\n\n    return XRayTracedConn(conn, meta)\n\n\ndef unpatch():\n    \"\"\"\n    Unpatch any previously patched modules.\n    This operation is idempotent.\n    \"\"\"\n    _PATCHED_MODULES.discard('pg8000')\n    unwrap(pg8000, 'connect')\n"
  },
  {
    "path": "aws_xray_sdk/ext/psycopg/__init__.py",
    "content": "from .patch import patch\n\n\n__all__ = ['patch']\n"
  },
  {
    "path": "aws_xray_sdk/ext/psycopg/patch.py",
    "content": "import wrapt\nfrom operator import methodcaller\n\nfrom aws_xray_sdk.ext.dbapi2 import XRayTracedConn\n\n\ndef patch():\n    wrapt.wrap_function_wrapper(\n        'psycopg',\n        'connect',\n        _xray_traced_connect\n    )\n\n    wrapt.wrap_function_wrapper(\n        'psycopg_pool.pool',\n        'ConnectionPool._connect',\n        _xray_traced_connect\n    )\n\n\ndef _xray_traced_connect(wrapped, instance, args, kwargs):\n    conn = wrapped(*args, **kwargs)\n    parameterized_dsn = {c[0]: c[-1] for c in map(methodcaller('split', '='), conn.info.dsn.split(' '))}\n    meta = {\n        'database_type': 'PostgreSQL',\n        'url': 'postgresql://{}@{}:{}/{}'.format(\n            parameterized_dsn.get('user', 'unknown'),\n            parameterized_dsn.get('host', 'unknown'),\n            parameterized_dsn.get('port', 'unknown'),\n            parameterized_dsn.get('dbname', 'unknown'),\n        ),\n        'user': parameterized_dsn.get('user', 'unknown'),\n        'database_version': str(conn.info.server_version),\n        'driver_version': 'Psycopg 3'\n    }\n\n    return XRayTracedConn(conn, meta)\n"
  },
  {
    "path": "aws_xray_sdk/ext/psycopg2/__init__.py",
    "content": "from .patch import patch\n\n\n__all__ = ['patch']\n"
  },
  {
    "path": "aws_xray_sdk/ext/psycopg2/patch.py",
    "content": "import copy\nimport re\nimport wrapt\nfrom operator import methodcaller\n\nfrom aws_xray_sdk.ext.dbapi2 import XRayTracedConn, XRayTracedCursor\n\n\ndef patch():\n    wrapt.wrap_function_wrapper(\n        'psycopg2',\n        'connect',\n        _xray_traced_connect\n    )\n    wrapt.wrap_function_wrapper(\n        'psycopg2.extensions',\n        'register_type',\n        _xray_register_type_fix\n    )\n    wrapt.wrap_function_wrapper(\n        'psycopg2.extensions',\n        'quote_ident',\n        _xray_register_type_fix\n    )\n\n    wrapt.wrap_function_wrapper(\n        'psycopg2.extras',\n        'register_default_jsonb',\n        _xray_register_default_jsonb_fix\n    )\n\n\ndef _xray_traced_connect(wrapped, instance, args, kwargs):\n    conn = wrapped(*args, **kwargs)\n    parameterized_dsn = {c[0]: c[-1] for c in map(methodcaller('split', '='), conn.dsn.split(' '))}\n    meta = {\n        'database_type': 'PostgreSQL',\n        'url': 'postgresql://{}@{}:{}/{}'.format(\n            parameterized_dsn.get('user', 'unknown'),\n            parameterized_dsn.get('host', 'unknown'),\n            parameterized_dsn.get('port', 'unknown'),\n            parameterized_dsn.get('dbname', 'unknown'),\n        ),\n        'user': parameterized_dsn.get('user', 'unknown'),\n        'database_version': str(conn.server_version),\n        'driver_version': 'Psycopg 2'\n    }\n\n    return XRayTracedConn(conn, meta)\n\n\ndef _xray_register_type_fix(wrapped, instance, args, kwargs):\n    \"\"\"Send the actual connection or curser to register type.\"\"\"\n    our_args = list(copy.copy(args))\n    if len(our_args) == 2 and isinstance(our_args[1], (XRayTracedConn, XRayTracedCursor)):\n        our_args[1] = our_args[1].__wrapped__\n\n    return wrapped(*our_args, **kwargs)\n\n\ndef _xray_register_default_jsonb_fix(wrapped, instance, args, kwargs):\n    our_kwargs = dict()\n    for key, value in kwargs.items():\n        if key == \"conn_or_curs\" and isinstance(value, (XRayTracedConn, XRayTracedCursor)):\n            # unwrap the connection or cursor to be sent to register_default_jsonb\n            value = value.__wrapped__\n        our_kwargs[key] = value\n\n    return wrapped(*args, **our_kwargs)\n"
  },
  {
    "path": "aws_xray_sdk/ext/pymongo/__init__.py",
    "content": "# Copyright © 2018 Clarity Movement Co. All rights reserved.\nfrom .patch import patch\n\n__all__ = ['patch']\n"
  },
  {
    "path": "aws_xray_sdk/ext/pymongo/patch.py",
    "content": "# Copyright © 2018 Clarity Movement Co. All rights reserved.\nfrom pymongo import monitoring\nfrom aws_xray_sdk.core import xray_recorder\n\n\nclass XrayCommandListener(monitoring.CommandListener):\n    \"\"\"\n    A listener that traces all pymongo db commands to AWS Xray.\n    Creates a subsegment for each mongo db conmmand.\n\n    name: 'mydb@127.0.0.1:27017'\n    records all available information provided by pymongo,\n    except for `command` and `reply`. They may contain business secrets.\n    If you insist to record them, specify `record_full_documents=True`.\n    \"\"\"\n\n    def __init__(self, record_full_documents):\n        super().__init__()\n        self.record_full_documents = record_full_documents\n\n    def started(self, event):\n        host, port = event.connection_id\n        host_and_port_str = f'{host}:{port}'\n\n        subsegment = xray_recorder.begin_subsegment(\n            f'{event.database_name}@{host_and_port_str}', 'remote')\n        subsegment.put_annotation('mongodb_command_name', event.command_name)\n        subsegment.put_annotation('mongodb_connection_id', host_and_port_str)\n        subsegment.put_annotation('mongodb_database_name', event.database_name)\n        subsegment.put_annotation('mongodb_operation_id', event.operation_id)\n        subsegment.put_annotation('mongodb_request_id', event.request_id)\n        if self.record_full_documents:\n            subsegment.put_metadata('mongodb_command', event.command)\n\n    def succeeded(self, event):\n        subsegment = xray_recorder.current_subsegment()\n        subsegment.put_annotation('mongodb_duration_micros', event.duration_micros)\n        if self.record_full_documents:\n            subsegment.put_metadata('mongodb_reply', event.reply)\n        xray_recorder.end_subsegment()\n\n    def failed(self, event):\n        subsegment = xray_recorder.current_subsegment()\n        subsegment.add_fault_flag()\n        subsegment.put_annotation('mongodb_duration_micros', event.duration_micros)\n        subsegment.put_metadata('failure', event.failure)\n        xray_recorder.end_subsegment()\n\n\ndef patch(record_full_documents=False):\n    # ensure `patch()` is idempotent\n    if hasattr(monitoring, '_xray_enabled'):\n        return\n    setattr(monitoring, '_xray_enabled', True)\n    monitoring.register(XrayCommandListener(record_full_documents))\n"
  },
  {
    "path": "aws_xray_sdk/ext/pymysql/__init__.py",
    "content": "from .patch import patch, unpatch\n\n\n__all__ = ['patch', 'unpatch']\n"
  },
  {
    "path": "aws_xray_sdk/ext/pymysql/patch.py",
    "content": "import pymysql\nimport wrapt\n\nfrom aws_xray_sdk.ext.dbapi2 import XRayTracedConn\nfrom aws_xray_sdk.core.patcher import _PATCHED_MODULES\nfrom aws_xray_sdk.ext.util import unwrap\n\n\ndef patch():\n\n    wrapt.wrap_function_wrapper(\n        'pymysql',\n        'connect',\n        _xray_traced_connect\n    )\n\n    # patch alias\n    if hasattr(pymysql, 'Connect'):\n        pymysql.Connect = pymysql.connect\n\n\ndef _xray_traced_connect(wrapped, instance, args, kwargs):\n\n    conn = wrapped(*args, **kwargs)\n    meta = {\n        'database_type': 'MySQL',\n        'user': conn.user.decode('utf-8'),\n        'driver_version': 'PyMySQL'\n    }\n\n    if hasattr(conn, 'server_version'):\n        version = sanitize_db_ver(getattr(conn, 'server_version'))\n        if version:\n            meta['database_version'] = version\n\n    return XRayTracedConn(conn, meta)\n\n\ndef sanitize_db_ver(raw):\n\n    if not raw or not isinstance(raw, tuple):\n        return raw\n\n    return '.'.join(str(num) for num in raw)\n\n\ndef unpatch():\n    \"\"\"\n    Unpatch any previously patched modules.\n    This operation is idempotent.\n    \"\"\"\n    _PATCHED_MODULES.discard('pymysql')\n    unwrap(pymysql, 'connect')\n"
  },
  {
    "path": "aws_xray_sdk/ext/pynamodb/__init__.py",
    "content": "from .patch import patch\n\n__all__ = ['patch']\n"
  },
  {
    "path": "aws_xray_sdk/ext/pynamodb/patch.py",
    "content": "import json\nimport wrapt\nimport pynamodb\n\nfrom aws_xray_sdk.core import xray_recorder\nfrom aws_xray_sdk.core.models import http\nfrom aws_xray_sdk.ext.boto_utils import _extract_whitelisted_params\n\nPYNAMODB4 = int(pynamodb.__version__.split('.')[0]) >= 4\n\nif PYNAMODB4:\n    import botocore.httpsession\nelse:\n    import botocore.vendored.requests.sessions\n\n\ndef patch():\n    \"\"\"Patch PynamoDB so it generates subsegements when calling DynamoDB.\"\"\"\n\n    if PYNAMODB4:\n        if hasattr(botocore.httpsession, '_xray_enabled'):\n            return\n        setattr(botocore.httpsession, '_xray_enabled', True)\n\n        module = 'botocore.httpsession'\n        name = 'URLLib3Session.send'\n    else:\n        if hasattr(botocore.vendored.requests.sessions, '_xray_enabled'):\n            return\n        setattr(botocore.vendored.requests.sessions, '_xray_enabled', True)\n\n        module = 'botocore.vendored.requests.sessions'\n        name = 'Session.send'\n\n    wrapt.wrap_function_wrapper(\n        module, name, _xray_traced_pynamodb,\n    )\n\n\ndef _xray_traced_pynamodb(wrapped, instance, args, kwargs):\n\n    # Check if it's a request to DynamoDB and return otherwise.\n    try:\n        service = args[0].headers['X-Amz-Target'].decode('utf-8').split('_')[0]\n    except KeyError:\n        return wrapped(*args, **kwargs)\n    if service.lower() != 'dynamodb':\n        return wrapped(*args, **kwargs)\n\n    return xray_recorder.record_subsegment(\n        wrapped, instance, args, kwargs,\n        name='dynamodb',\n        namespace='aws',\n        meta_processor=pynamodb_meta_processor,\n    )\n\n\ndef pynamodb_meta_processor(wrapped, instance, args, kwargs, return_value,\n                            exception, subsegment, stack):\n    operation_name = args[0].headers['X-Amz-Target'].decode('utf-8').split('.')[1]\n    region = args[0].url.split('.')[1]\n\n    aws_meta = {\n        'operation': operation_name,\n        'region': region\n    }\n\n    # in case of client timeout the return value will be empty\n    if return_value is not None:\n        aws_meta['request_id'] = return_value.headers.get('x-amzn-RequestId')\n        subsegment.put_http_meta(http.STATUS, return_value.status_code)\n\n    if exception:\n        subsegment.add_error_flag()\n        subsegment.add_exception(exception, stack, True)\n\n    if PYNAMODB4:\n        resp = json.loads(return_value.text) if return_value else None\n    else:\n        resp = return_value.json() if return_value else None\n    _extract_whitelisted_params(subsegment.name, operation_name, aws_meta,\n                                [None, json.loads(args[0].body.decode('utf-8'))],\n                                None, resp)\n\n    subsegment.set_aws(aws_meta)\n"
  },
  {
    "path": "aws_xray_sdk/ext/requests/__init__.py",
    "content": "from .patch import patch\n\n__all__ = ['patch']\n"
  },
  {
    "path": "aws_xray_sdk/ext/requests/patch.py",
    "content": "import wrapt\n\nfrom aws_xray_sdk.core import xray_recorder\nfrom aws_xray_sdk.core.models import http\nfrom aws_xray_sdk.ext.util import inject_trace_header, strip_url, get_hostname\n\n\ndef patch():\n\n    wrapt.wrap_function_wrapper(\n        'requests',\n        'Session.request',\n        _xray_traced_requests\n    )\n\n    wrapt.wrap_function_wrapper(\n        'requests',\n        'Session.prepare_request',\n        _inject_header\n    )\n\n\ndef _xray_traced_requests(wrapped, instance, args, kwargs):\n\n    url = kwargs.get('url') or args[1]\n\n    return xray_recorder.record_subsegment(\n        wrapped, instance, args, kwargs,\n        name=get_hostname(url),\n        namespace='remote',\n        meta_processor=requests_processor,\n    )\n\n\ndef _inject_header(wrapped, instance, args, kwargs):\n    request = args[0]\n    headers = getattr(request, 'headers', {})\n    inject_trace_header(headers, xray_recorder.current_subsegment())\n    setattr(request, 'headers', headers)\n\n    return wrapped(*args, **kwargs)\n\n\ndef requests_processor(wrapped, instance, args, kwargs,\n                       return_value, exception, subsegment, stack):\n\n    method = kwargs.get('method') or args[0]\n    url = kwargs.get('url') or args[1]\n\n    subsegment.put_http_meta(http.METHOD, method)\n    subsegment.put_http_meta(http.URL, strip_url(url))\n\n    if return_value is not None:\n        subsegment.put_http_meta(http.STATUS, return_value.status_code)\n    elif exception:\n        subsegment.add_exception(exception, stack)\n"
  },
  {
    "path": "aws_xray_sdk/ext/resources/aws_para_whitelist.json",
    "content": "{\n  \"services\": {\n    \"sns\": {\n      \"operations\": {\n        \"Publish\": {\n          \"request_parameters\": [\n            \"TopicArn\"\n          ]\n        },\n        \"PublishBatch\": {\n          \"request_parameters\": [\n            \"TopicArn\"\n          ]\n        }\n      }\n    },\n    \"dynamodb\": {\n      \"operations\": {\n        \"BatchGetItem\": {\n          \"request_descriptors\": {\n            \"RequestItems\": {\n              \"map\": true,\n              \"get_keys\": true,\n              \"rename_to\": \"table_names\"\n            }\n          },\n          \"response_parameters\": [\n            \"ConsumedCapacity\"\n          ]\n        },\n        \"BatchWriteItem\": {\n          \"request_descriptors\": {\n            \"RequestItems\": {\n              \"map\": true,\n              \"get_keys\": true,\n              \"rename_to\": \"table_names\"\n            }\n          },\n          \"response_parameters\": [\n            \"ConsumedCapacity\",\n            \"ItemCollectionMetrics\"\n          ]\n        },\n        \"CreateTable\": {\n          \"request_parameters\": [\n            \"GlobalSecondaryIndexes\",\n            \"LocalSecondaryIndexes\",\n            \"ProvisionedThroughput\",\n            \"TableName\"\n          ]\n        },\n        \"DeleteItem\": {\n          \"request_parameters\": [\n            \"TableName\"\n          ],\n          \"response_parameters\": [\n            \"ConsumedCapacity\",\n            \"ItemCollectionMetrics\"\n          ]\n        },\n        \"DeleteTable\": {\n          \"request_parameters\": [\n            \"TableName\"\n          ]\n        },\n        \"DescribeTable\": {\n          \"request_parameters\": [\n            \"TableName\"\n          ]\n        },\n        \"GetItem\": {\n          \"request_parameters\": [\n            \"ConsistentRead\",\n            \"ProjectionExpression\",\n            \"TableName\"\n          ],\n          \"response_parameters\": [\n            \"ConsumedCapacity\"\n          ]\n        },\n        \"ListTables\": {\n          \"request_parameters\": [\n            \"ExclusiveStartTableName\",\n            \"Limit\"\n          ],\n          \"response_descriptors\": {\n            \"TableNames\": {\n              \"list\": true,\n              \"get_count\": true,\n              \"rename_to\": \"table_count\"\n            }\n          }\n        },\n        \"PutItem\": {\n          \"request_parameters\": [\n            \"TableName\"\n          ],\n          \"response_parameters\": [\n            \"ConsumedCapacity\",\n            \"ItemCollectionMetrics\"\n          ]\n        },\n        \"Query\": {\n          \"request_parameters\": [\n            \"AttributesToGet\",\n            \"ConsistentRead\",\n            \"IndexName\",\n            \"Limit\",\n            \"ProjectionExpression\",\n            \"ScanIndexForward\",\n            \"Select\",\n            \"TableName\"\n          ],\n          \"response_parameters\": [\n            \"ConsumedCapacity\"\n          ]\n        },\n        \"Scan\": {\n          \"request_parameters\": [\n            \"AttributesToGet\",\n            \"ConsistentRead\",\n            \"IndexName\",\n            \"Limit\",\n            \"ProjectionExpression\",\n            \"Segment\",\n            \"Select\",\n            \"TableName\",\n            \"TotalSegments\"\n          ],\n          \"response_parameters\": [\n            \"ConsumedCapacity\",\n            \"Count\",\n            \"ScannedCount\"\n          ]\n        },\n        \"UpdateItem\": {\n          \"request_parameters\": [\n            \"TableName\"\n          ],\n          \"response_parameters\": [\n            \"ConsumedCapacity\",\n            \"ItemCollectionMetrics\"\n          ]\n        },\n        \"UpdateTable\": {\n          \"request_parameters\": [\n            \"AttributeDefinitions\",\n            \"GlobalSecondaryIndexUpdates\",\n            \"ProvisionedThroughput\",\n            \"TableName\"\n          ]\n        }\n      }\n    },\n    \"sqs\": {\n      \"operations\": {\n        \"AddPermission\": {\n          \"request_parameters\": [\n            \"Label\",\n            \"QueueUrl\"\n          ]\n        },\n        \"ChangeMessageVisibility\": {\n          \"request_parameters\": [\n            \"QueueUrl\",\n            \"VisibilityTimeout\"\n          ]\n        },\n        \"ChangeMessageVisibilityBatch\": {\n          \"request_parameters\": [\n            \"QueueUrl\"\n          ],\n          \"response_parameters\": [\n            \"Failed\"\n          ]\n        },\n        \"CreateQueue\": {\n          \"request_parameters\": [\n            \"Attributes\",\n            \"QueueName\"\n          ]\n        },\n        \"DeleteMessage\": {\n          \"request_parameters\": [\n            \"QueueUrl\"\n          ]\n        },\n        \"DeleteMessageBatch\": {\n          \"request_parameters\": [\n            \"QueueUrl\"\n          ],\n          \"response_parameters\": [\n            \"Failed\"\n          ]\n        },\n        \"DeleteQueue\": {\n          \"request_parameters\": [\n            \"QueueUrl\"\n          ]\n        },\n        \"GetQueueAttributes\": {\n          \"request_parameters\": [\n            \"QueueUrl\"\n          ],\n          \"response_parameters\": [\n            \"Attributes\"\n          ]\n        },\n        \"GetQueueUrl\": {\n          \"request_parameters\": [\n            \"QueueName\",\n            \"QueueOwnerAWSAccountId\"\n          ],\n          \"response_parameters\": [\n            \"QueueUrl\"\n          ]\n        },\n        \"ListDeadLetterSourceQueues\": {\n          \"request_parameters\": [\n            \"QueueUrl\"\n          ],\n          \"response_parameters\": [\n            \"QueueUrls\"\n          ]\n        },\n        \"ListQueues\": {\n          \"request_parameters\": [\n            \"QueueNamePrefix\"\n          ],\n          \"response_descriptors\": {\n            \"QueueUrls\": {\n              \"list\": true,\n              \"get_count\": true,\n              \"rename_to\": \"queue_count\"\n            }\n          }\n        },\n        \"PurgeQueue\": {\n          \"request_parameters\": [\n            \"QueueUrl\"\n          ]\n        },\n        \"ReceiveMessage\": {\n          \"request_parameters\": [\n            \"AttributeNames\",\n            \"MaxNumberOfMessages\",\n            \"MessageAttributeNames\",\n            \"QueueUrl\",\n            \"VisibilityTimeout\",\n            \"WaitTimeSeconds\"\n          ],\n          \"response_descriptors\": {\n            \"Messages\": {\n              \"list\": true,\n              \"get_count\": true,\n              \"rename_to\": \"message_count\"\n            }\n          }\n        },\n        \"RemovePermission\": {\n          \"request_parameters\": [\n            \"QueueUrl\"\n          ]\n        },\n        \"SendMessage\": {\n          \"request_parameters\": [\n            \"DelaySeconds\",\n            \"QueueUrl\"\n          ],\n          \"request_descriptors\": {\n            \"MessageAttributes\": {\n              \"map\": true,\n              \"get_keys\": true,\n              \"rename_to\": \"message_attribute_names\"\n            }\n          },\n          \"response_parameters\": [\n            \"MessageId\"\n          ]\n        },\n        \"SendMessageBatch\": {\n          \"request_parameters\": [\n            \"QueueUrl\"\n          ],\n          \"request_descriptors\": {\n            \"Entries\": {\n              \"list\": true,\n              \"get_count\": true,\n              \"rename_to\": \"message_count\"\n            }\n          },\n          \"response_descriptors\": {\n            \"Failed\": {\n              \"list\": true,\n              \"get_count\": true,\n              \"rename_to\": \"failed_count\"\n            },\n            \"Successful\": {\n              \"list\": true,\n              \"get_count\": true,\n              \"rename_to\": \"successful_count\"\n            }\n          }\n        },\n        \"SetQueueAttributes\": {\n          \"request_parameters\": [\n            \"QueueUrl\"\n          ],\n          \"request_descriptors\": {\n            \"Attributes\": {\n              \"map\": true,\n              \"get_keys\": true,\n              \"rename_to\": \"attribute_names\"\n            }\n          }\n        }\n      }\n    },\n    \"lambda\": {\n      \"operations\": {\n        \"Invoke\": {\n          \"request_parameters\": [\n            \"FunctionName\",\n            \"InvocationType\",\n            \"LogType\",\n            \"Qualifier\"\n          ],\n          \"response_parameters\": [\n            \"FunctionError\",\n            \"StatusCode\"\n          ]\n        },\n        \"InvokeAsync\": {\n          \"request_parameters\": [\n            \"FunctionName\"\n          ],\n          \"response_parameters\": [\n            \"Status\"\n          ]\n        }\n      }\n    },\n    \"s3\": {\n      \"operations\": {\n        \"CopyObject\": {\n          \"request_parameters\": [\n            \"CopySource\",\n            \"Bucket\",\n            \"Key\"\n          ]\n        },\n        \"GetObject\": {\n          \"request_parameters\": [\n            \"Key\",\n            \"VersionId\"\n          ],\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"PutObject\": {\n          \"request_parameters\": [\n            \"Key\"\n          ],\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"GetObjectAcl\": {\n          \"request_parameters\": [\n            \"Key\",\n            \"VersionId\"\n          ],\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"CreateBucket\": {\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"ListObjectsV2\": {\n          \"request_parameters\": [\n            \"Prefix\"\n          ],\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"ListObjects\": {\n          \"request_parameters\": [\n            \"Prefix\"\n          ],\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"GetObjectTagging\": {\n          \"request_parameters\": [\n            \"Key\",\n            \"VersionId\"\n          ],\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"PutObjectTagging\": {\n          \"request_parameters\": [\n            \"Key\",\n            \"VersionId\"\n          ],\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"ListVersions\": {\n          \"request_parameters\": [\n            \"Prefix\"\n          ],\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"SetObjectAcl\": {\n          \"request_parameters\": [\n            \"Key\",\n            \"VersionId\"\n          ],\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"GetBucketAcl\": {\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"PutBucketAcl\": {\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"HeadBucket\": {\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"UploadPart\": {\n          \"request_parameters\": [\n            \"Key\"\n          ],\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"DeleteObject\": {\n          \"request_parameters\": [\n            \"Key\"\n          ],\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"DeleteBucket\": {\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"DeleteObjects\": {\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"DeleteVersion\": {\n          \"request_parameters\": [\n            \"Key\",\n            \"VersionId\"\n          ],\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"GetBucketPolicy\": {\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"PutBucketPolicy\": {\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"ListParts\": {\n          \"request_parameters\": [\n            \"Key\"\n          ],\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"RestoreObject\": {\n          \"request_parameters\": [\n            \"Key\",\n            \"VersionId\"\n          ],\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"RestoreObjectV2\": {\n          \"request_parameters\": [\n            \"Key\",\n            \"VersionId\"\n          ],\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"PutBucketNotificationConfiguration\": {\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"DeleteBucketLifecycleConfiguration\": {\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"GetBucketNotificationConfiguration\": {\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"DeleteBucketCors\": {\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"PutBucketCors\": {\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"GetBucketCors\": {\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"ListBucketInventoryConfigurations\": {\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"GetBucketReplicationConfiguration\": {\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"PutBucketReplicationConfiguration\": {\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"DeleteBucketReplicationConfiguration\": {\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"DeleteBucketAnalyticsConfiguration\": {\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"DeleteBucketInventoryConfiguration\": {\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"ListBucketAnalyticsConfigurations\": {\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"DeleteObjectTagging\": {\n          \"request_parameters\": [\n            \"Key\",\n            \"VersionId\"\n          ],\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"PutBucketVersioning\": {\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"GetBucketVersioning\": {\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"GetBucketWebsite\": {\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"GetBucketLifecycleConfiguration\": {\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"SetBucketLifecycleConfiguration\": {\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"GetBucketTagging\": {\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"PutBucketTagging\": {\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"GetBucketLocation\": {\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"GetBucketLogging\": {\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"ListMultipartUploads\": {\n          \"request_parameters\": [\n            \"Prefix\"\n          ],\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"DeleteBucketPolicy\": {\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"DeleteBucketEncryption\": {\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"PutBucketAccelerateConfiguration\": {\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"PutBucketWebsite\": {\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"CompleteMultipartUpload\": {\n          \"request_parameters\": [\n            \"Key\"\n          ],\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"InitiateMultipartUpload\": {\n          \"request_parameters\": [\n            \"Key\"\n          ],\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"PutBucketEncryption\": {\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"SetBucketLogging\": {\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"DeleteBucketWebsite\": {\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"GetBucketEncryption\": {\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"AbortMultipartUpload\": {\n          \"request_parameters\": [\n            \"Key\"\n          ],\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"GeneratePresignedUrl\": {\n          \"request_parameters\": [\n            \"Key\",\n            \"VersionId\"\n          ],\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"DeleteBucketTagging\": {\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"GetBucketAccelerateConfiguration\": {\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"GetBucketMetricsConfiguration\": {\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"ListBucketMetricsConfigurations\": {\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"PutBucketInventoryConfiguration\": {\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"PutBucketMetricsConfiguration\": {\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"PutBucketAnalyticsConfiguration\": {\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"DeleteBucketMetricsConfiguration\": {\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"GetBucketAnalyticsConfiguration\": {\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        },\n        \"GetBucketInventoryConfiguration\": {\n          \"request_descriptors\": {\n            \"Bucket\": {\n              \"rename_to\": \"bucket_name\"\n            }\n          }\n        }\n      }\n    },\n    \"runtime.sagemaker\": {\n      \"operations\": {\n        \"InvokeEndpoint\": {\n          \"request_parameters\": [\n            \"EndpointName\"\n          ]\n        }\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "aws_xray_sdk/ext/sqlalchemy/__init__.py",
    "content": ""
  },
  {
    "path": "aws_xray_sdk/ext/sqlalchemy/query.py",
    "content": "from builtins import super\nfrom sqlalchemy.orm.query import Query\nfrom sqlalchemy.orm.session import Session, sessionmaker\nfrom .util.decorators import xray_on_call, decorate_all_functions\n\n\n@decorate_all_functions(xray_on_call)\nclass XRaySession(Session):\n    pass\n\n\n@decorate_all_functions(xray_on_call)\nclass XRayQuery(Query):\n    pass\n\n\n@decorate_all_functions(xray_on_call)\nclass XRaySessionMaker(sessionmaker):\n    def __init__(self, bind=None, class_=XRaySession, autoflush=True,\n                 autocommit=False,\n                 expire_on_commit=True,\n                 info=None, **kw):\n        kw['query_cls'] = XRayQuery\n        super().__init__(bind, class_, autoflush, autocommit, expire_on_commit,\n                         info, **kw)\n"
  },
  {
    "path": "aws_xray_sdk/ext/sqlalchemy/util/__init__.py",
    "content": ""
  },
  {
    "path": "aws_xray_sdk/ext/sqlalchemy/util/decorators.py",
    "content": "import re\nimport types\nfrom urllib.parse import urlparse, uses_netloc\n\nfrom sqlalchemy.engine.base import Connection\n\nfrom aws_xray_sdk.core import xray_recorder\nfrom aws_xray_sdk.ext.util import strip_url\n\n\ndef decorate_all_functions(function_decorator):\n    def decorator(cls):\n        for c in cls.__bases__:\n            for name, obj in vars(c).items():\n                if name.startswith(\"_\"):\n                    continue\n                if isinstance(obj, types.FunctionType):\n                    try:\n                        obj = obj.__func__  # unwrap Python 2 unbound method\n                    except AttributeError:\n                        pass  # not needed in Python 3\n                    setattr(c, name, function_decorator(c, obj))\n        return cls\n    return decorator\n\n\ndef xray_on_call(cls, func):\n    def wrapper(*args, **kw):\n        from ..query import XRayQuery, XRaySession\n        try:\n            from ...flask_sqlalchemy.query import XRaySignallingSession\n            has_sql_alchemy = True\n        except ImportError:\n            has_sql_alchemy = False\n\n        class_name = str(cls.__module__)\n        c = xray_recorder._context\n        sql = None\n        subsegment = None\n        if class_name == \"sqlalchemy.orm.session\":\n            for arg in args:\n                if isinstance(arg, XRaySession):\n                    sql = parse_bind(arg.bind)\n                if has_sql_alchemy and isinstance(arg, XRaySignallingSession):\n                    sql = parse_bind(arg.bind)\n        if class_name == 'sqlalchemy.orm.query':\n            for arg in args:\n                if isinstance(arg, XRayQuery):\n                    try:\n                        sql = parse_bind(arg.session.bind)\n                        if xray_recorder.stream_sql:\n                            sql['sanitized_query'] = str(arg)\n                    except Exception:\n                        sql = None\n        if sql is not None:\n            if getattr(c._local, 'entities', None) is not None:\n                # Strip URL of ? and following text\n                sub_name = strip_url(sql['url'])\n                subsegment = xray_recorder.begin_subsegment(sub_name, namespace='remote')\n            else:\n                subsegment = None\n\n        try:\n            res = func(*args, **kw)\n        finally:\n            if subsegment is not None:\n                subsegment.set_sql(sql)\n                subsegment.put_annotation(\"sqlalchemy\", class_name+'.'+func.__name__)\n                xray_recorder.end_subsegment()\n        return res\n    return wrapper\n# URL Parse output\n# scheme\t0\tURL scheme specifier\tscheme parameter\n# netloc\t1\tNetwork location part\tempty string\n# path\t2\tHierarchical path\tempty string\n# query\t3\tQuery component\tempty string\n# fragment\t4\tFragment identifier\tempty string\n# username\t \tUser name\tNone\n# password\t \tPassword\tNone\n# hostname\t \tHost name (lower case)\tNone\n# port\t \tPort number as integer, if present\tNone\n#\n# XRAY Trace SQL metaData Sample\n# \"sql\" : {\n#     \"url\": \"jdbc:postgresql://aawijb5u25wdoy.cpamxznpdoq8.us-west-2.rds.amazonaws.com:5432/ebdb\",\n#     \"preparation\": \"statement\",\n#     \"database_type\": \"PostgreSQL\",\n#     \"database_version\": \"9.5.4\",\n#     \"driver_version\": \"PostgreSQL 9.4.1211.jre7\",\n#     \"user\" : \"dbuser\",\n#     \"sanitized_query\" : \"SELECT  *  FROM  customers  WHERE  customer_id=?;\"\n#   }\ndef parse_bind(bind):\n    \"\"\"Parses a connection string and creates SQL trace metadata\"\"\"\n    if isinstance(bind, Connection):\n        engine = bind.engine\n    else:\n        engine = bind\n    m = re.match(r\"Engine\\((.*?)\\)\", str(engine))\n    if m is not None:\n        u = urlparse(m.group(1))\n        # Add Scheme to uses_netloc or // will be missing from url.\n        uses_netloc.append(u.scheme)\n        safe_url = \"\"\n        if u.password is None:\n            safe_url = u.geturl()\n        else:\n            # Strip password from URL\n            host_info = u.netloc.rpartition('@')[-1]\n            parts = u._replace(netloc='{}@{}'.format(u.username, host_info))\n            safe_url = parts.geturl()\n        sql = {}\n        sql['database_type'] = u.scheme\n        sql['url'] = safe_url\n        if u.username is not None:\n            sql['user'] = \"{}\".format(u.username)\n    return sql\n"
  },
  {
    "path": "aws_xray_sdk/ext/sqlalchemy_core/__init__.py",
    "content": "from .patch import patch, unpatch\n\n__all__ = ['patch', 'unpatch']"
  },
  {
    "path": "aws_xray_sdk/ext/sqlalchemy_core/patch.py",
    "content": "import logging\nimport sys\nfrom urllib.parse import urlparse, uses_netloc, quote_plus\n\nimport wrapt\nfrom sqlalchemy.sql.expression import ClauseElement\n\nfrom aws_xray_sdk.core import xray_recorder\nfrom aws_xray_sdk.core.patcher import _PATCHED_MODULES\nfrom aws_xray_sdk.core.utils import stacktrace\nfrom aws_xray_sdk.ext.util import unwrap\n\n\ndef _sql_meta(engine_instance, args):\n    try:\n        metadata = {}\n        # Workaround for https://github.com/sqlalchemy/sqlalchemy/issues/10662\n        # sqlalchemy.engine.url.URL's __repr__ does not url encode username nor password.\n        # This will continue to work once sqlalchemy fixes the bug.\n        sa_url = engine_instance.engine.url\n        username = sa_url.username\n        sa_url = sa_url._replace(username=None, password=None)\n        url = urlparse(str(sa_url))\n        name = url.netloc\n        if username:\n            # Restore url encoded username\n            quoted_username = quote_plus(username)\n            url = url._replace(netloc='{}@{}'.format(quoted_username, url.netloc))\n        # Add Scheme to uses_netloc or // will be missing from url.\n        uses_netloc.append(url.scheme)\n        metadata['url'] = url.geturl()\n        metadata['user'] = url.username\n        metadata['database_type'] = engine_instance.engine.name\n        try:\n            version = getattr(engine_instance.dialect, '{}_version'.format(engine_instance.engine.driver))\n            version_str = '.'.join(map(str, version))\n            metadata['driver_version'] = \"{}-{}\".format(engine_instance.engine.driver, version_str)\n        except AttributeError:\n            metadata['driver_version'] = engine_instance.engine.driver\n        if engine_instance.dialect.server_version_info is not None:\n            metadata['database_version'] = '.'.join(map(str, engine_instance.dialect.server_version_info))\n        if xray_recorder.stream_sql:\n            try:\n                if isinstance(args[0], ClauseElement):\n                    metadata['sanitized_query'] = str(args[0].compile(engine_instance.engine))\n                else:\n                    metadata['sanitized_query'] = str(args[0])\n            except Exception:\n                logging.getLogger(__name__).exception('Error getting the sanitized query')\n    except Exception:\n        metadata = None\n        name = None\n        logging.getLogger(__name__).exception('Error parsing sql metadata.')\n    return name, metadata\n\n\ndef _xray_traced_sqlalchemy_execute(wrapped, instance, args, kwargs):\n    return _process_request(wrapped, instance, args, kwargs)\n\n\ndef _xray_traced_sqlalchemy_session(wrapped, instance, args, kwargs):\n    return _process_request(wrapped, instance.bind, args, kwargs)\n\n\ndef _process_request(wrapped, engine_instance, args, kwargs):\n    name, sql = _sql_meta(engine_instance, args)\n    if sql is not None:\n        subsegment = xray_recorder.begin_subsegment(name, namespace='remote')\n    else:\n        subsegment = None\n    try:\n        res = wrapped(*args, **kwargs)\n    except Exception:\n        if subsegment is not None:\n            exception = sys.exc_info()[1]\n            stack = stacktrace.get_stacktrace(limit=xray_recorder._max_trace_back)\n            subsegment.add_exception(exception, stack)\n        raise\n    finally:\n        if subsegment is not None:\n            subsegment.set_sql(sql)\n            xray_recorder.end_subsegment()\n    return res\n\n\ndef patch():\n    wrapt.wrap_function_wrapper(\n        'sqlalchemy.engine.base',\n        'Connection.execute',\n        _xray_traced_sqlalchemy_execute\n    )\n\n    wrapt.wrap_function_wrapper(\n        'sqlalchemy.orm.session',\n        'Session.execute',\n        _xray_traced_sqlalchemy_session\n    )\n\n\ndef unpatch():\n    \"\"\"\n    Unpatch any previously patched modules.\n    This operation is idempotent.\n    \"\"\"\n    _PATCHED_MODULES.discard('sqlalchemy_core')\n    import sqlalchemy\n    unwrap(sqlalchemy.engine.base.Connection, 'execute')\n    unwrap(sqlalchemy.orm.session.Session, 'execute')\n"
  },
  {
    "path": "aws_xray_sdk/ext/sqlite3/__init__.py",
    "content": "from .patch import patch\n\n\n__all__ = ['patch']\n"
  },
  {
    "path": "aws_xray_sdk/ext/sqlite3/patch.py",
    "content": "import wrapt\nimport sqlite3\n\nfrom aws_xray_sdk.ext.dbapi2 import XRayTracedConn\n\n\ndef patch():\n\n    wrapt.wrap_function_wrapper(\n        'sqlite3',\n        'connect',\n        _xray_traced_connect\n    )\n\n\ndef _xray_traced_connect(wrapped, instance, args, kwargs):\n\n    conn = wrapped(*args, **kwargs)\n\n    meta = {}\n    meta['name'] = args[0]\n    meta['database_version'] = sqlite3.sqlite_version\n\n    traced_conn = XRayTracedSQLite(conn, meta)\n\n    return traced_conn\n\n\nclass XRayTracedSQLite(XRayTracedConn):\n\n    def execute(self, *args, **kwargs):\n        return self.cursor().execute(*args, **kwargs)\n\n    def executemany(self, *args, **kwargs):\n        return self.cursor().executemany(*args, **kwargs)\n"
  },
  {
    "path": "aws_xray_sdk/ext/util.py",
    "content": "import re\nfrom urllib.parse import urlparse\n\nimport wrapt\n\nfrom aws_xray_sdk.core.models import http\nfrom aws_xray_sdk.core.models.trace_header import TraceHeader\n\nfirst_cap_re = re.compile('(.)([A-Z][a-z]+)')\nall_cap_re = re.compile('([a-z0-9])([A-Z])')\nUNKNOWN_HOSTNAME = \"UNKNOWN HOST\"\n\n\ndef inject_trace_header(headers, entity):\n    \"\"\"\n    Extract trace id, entity id and sampling decision\n    from the input entity and inject these information\n    to headers.\n\n    :param dict headers: http headers to inject\n    :param Entity entity: trace entity that the trace header\n        value generated from.\n    \"\"\"\n    if not entity:\n        return\n\n    if hasattr(entity, 'type') and entity.type == 'subsegment':\n        header = entity.parent_segment.get_origin_trace_header()\n    else:\n        header = entity.get_origin_trace_header()\n    data = header.data if header else None\n    to_insert = TraceHeader(\n        root=entity.trace_id,\n        parent=entity.id,\n        sampled=entity.sampled,\n        data=data,\n    )\n\n    value = to_insert.to_header_str()\n\n    headers[http.XRAY_HEADER] = value\n\n\ndef calculate_sampling_decision(trace_header, recorder, sampling_req):\n    \"\"\"\n    Return 1 or the matched rule name if should sample and 0 if should not.\n    The sampling decision coming from ``trace_header`` always has\n    the highest precedence. If the ``trace_header`` doesn't contain\n    sampling decision then it checks if sampling is enabled or not\n    in the recorder. If not enbaled it returns 1. Otherwise it uses user\n    defined sampling rules to decide.\n    \"\"\"\n    if trace_header.sampled is not None and trace_header.sampled != '?':\n        return trace_header.sampled\n    elif not recorder.sampling:\n        return 1\n    else:\n        decision = recorder.sampler.should_trace(sampling_req)\n    return decision if decision else 0\n\n\ndef construct_xray_header(headers):\n    \"\"\"\n    Construct a ``TraceHeader`` object from dictionary headers\n    of the incoming request. This method should always return\n    a ``TraceHeader`` object regardless of tracing header's presence\n    in the incoming request.\n    \"\"\"\n    header_str = headers.get(http.XRAY_HEADER) or headers.get(http.ALT_XRAY_HEADER)\n    if header_str:\n        return TraceHeader.from_header_str(header_str)\n    else:\n        return TraceHeader()\n\n\ndef calculate_segment_name(host_name, recorder):\n    \"\"\"\n    Returns the segment name based on recorder configuration and\n    input host name. This is a helper generally used in web framework\n    middleware where a host name is available from incoming request's headers.\n    \"\"\"\n    if recorder.dynamic_naming:\n        return recorder.dynamic_naming.get_name(host_name)\n    else:\n        return recorder.service\n\n\ndef prepare_response_header(origin_header, segment):\n    \"\"\"\n    Prepare a trace header to be inserted into response\n    based on original header and the request segment.\n    \"\"\"\n    if origin_header and origin_header.sampled == '?':\n        new_header = TraceHeader(root=segment.trace_id,\n                                 sampled=segment.sampled)\n    else:\n        new_header = TraceHeader(root=segment.trace_id)\n\n    return new_header.to_header_str()\n\n\ndef to_snake_case(name):\n    \"\"\"\n    Convert the input string to snake-cased string.\n    \"\"\"\n    s1 = first_cap_re.sub(r'\\1_\\2', name)\n    # handle acronym words\n    return all_cap_re.sub(r'\\1_\\2', s1).lower()\n\n\n# ? is not a valid entity, and we don't want things after the ? for the segment name\ndef strip_url(url):\n    \"\"\"\n    Will generate a valid url string for use as a segment name\n    :param url: url to strip\n    :return: validated url string\n    \"\"\"\n    return url.partition('?')[0] if url else url\n\n\ndef get_hostname(url):\n    if url is None:\n        return UNKNOWN_HOSTNAME\n    url_parse = urlparse(url)\n    hostname = url_parse.hostname\n    if hostname is None:\n        return UNKNOWN_HOSTNAME\n    return hostname if hostname else url  # If hostname is none, we return the regular URL; indication of malformed url\n\n\ndef unwrap(obj, attr):\n    \"\"\"\n    Will unwrap a `wrapt` attribute\n    :param obj: base object\n    :param attr: attribute on `obj` to unwrap\n    \"\"\"\n    f = getattr(obj, attr, None)\n    if f and hasattr(f, '__wrapped__'):\n        setattr(obj, attr, f.__wrapped__)\n"
  },
  {
    "path": "aws_xray_sdk/sdk_config.py",
    "content": "import os\nimport logging\n\nlog = logging.getLogger(__name__)\n\n\nclass SDKConfig:\n    \"\"\"\n    Global Configuration Class that defines SDK-level configuration properties.\n\n    Enabling/Disabling the SDK:\n        By default, the SDK is enabled unless if an environment variable AWS_XRAY_SDK_ENABLED\n            is set. If it is set, it needs to be a valid string boolean, otherwise, it will default\n            to true. If the environment variable is set, all calls to set_sdk_enabled() will\n            prioritize the value of the environment variable.\n        Disabling the SDK affects the recorder, patcher, and middlewares in the following ways:\n        For the recorder, disabling automatically generates DummySegments for subsequent segments\n            and DummySubsegments for subsegments created and thus not send any traces to the daemon.\n        For the patcher, module patching will automatically be disabled. The SDK must be disabled\n            before calling patcher.patch() method in order for this to function properly.\n        For the middleware, no modification is made on them, but since the recorder automatically\n            generates DummySegments for all subsequent calls, they will not generate segments/subsegments\n            to be sent.\n\n    Environment variables:\n        \"AWS_XRAY_SDK_ENABLED\" - If set to 'false' disables the SDK and causes the explained above\n            to occur.\n    \"\"\"\n    XRAY_ENABLED_KEY = 'AWS_XRAY_SDK_ENABLED'\n    DISABLED_ENTITY_NAME = 'dummy'\n\n    __SDK_ENABLED = None\n\n    @classmethod\n    def __get_enabled_from_env(cls):\n        \"\"\"\n        Searches for the environment variable to see if the SDK should be disabled.\n        If no environment variable is found, it returns True by default.\n\n        :return: bool - True if it is enabled, False otherwise.\n        \"\"\"\n        env_var_str = os.getenv(cls.XRAY_ENABLED_KEY, 'true').lower()\n        if env_var_str in ('y', 'yes', 't', 'true', 'on', '1'):\n            return True\n        elif env_var_str in ('n', 'no', 'f', 'false', 'off', '0'):\n            return False\n        else:\n            log.warning(\"Invalid literal passed into environment variable `AWS_XRAY_SDK_ENABLED`. Defaulting to True...\")\n            return True  # If an invalid parameter is passed in, we return True.\n\n    @classmethod\n    def sdk_enabled(cls):\n        \"\"\"\n        Returns whether the SDK is enabled or not.\n        \"\"\"\n        if cls.__SDK_ENABLED is None:\n            cls.__SDK_ENABLED = cls.__get_enabled_from_env()\n        return cls.__SDK_ENABLED\n\n    @classmethod\n    def set_sdk_enabled(cls, value):\n        \"\"\"\n        Modifies the enabled flag if the \"AWS_XRAY_SDK_ENABLED\" environment variable is not set,\n        otherwise, set the enabled flag to be equal to the environment variable. If the\n        env variable is an invalid string boolean, it will default to true.\n\n        :param bool value: Flag to set whether the SDK is enabled or disabled.\n\n        Environment variables AWS_XRAY_SDK_ENABLED overrides argument value.\n        \"\"\"\n        # Environment Variables take precedence over hardcoded configurations.\n        if cls.XRAY_ENABLED_KEY in os.environ:\n            cls.__SDK_ENABLED = cls.__get_enabled_from_env()\n        else:\n            if type(value) == bool:\n                cls.__SDK_ENABLED = value\n            else:\n                cls.__SDK_ENABLED = True\n                log.warning(\"Invalid parameter type passed into set_sdk_enabled(). Defaulting to True...\")\n"
  },
  {
    "path": "aws_xray_sdk/version.py",
    "content": "VERSION = '2.15.0'\n"
  },
  {
    "path": "docs/.gitignore",
    "content": "_build"
  },
  {
    "path": "docs/Makefile",
    "content": "# Minimal makefile for Sphinx documentation\n#\n\n# You can set these variables from the command line.\nSPHINXOPTS    =\nSPHINXBUILD   = python -msphinx\nSPHINXPROJ    = aws-xray-sdk\nSOURCEDIR     = .\nBUILDDIR      = _build\n\n# Put it first so that \"make\" without argument is like \"make help\".\nhelp:\n\t@$(SPHINXBUILD) -M help \"$(SOURCEDIR)\" \"$(BUILDDIR)\" $(SPHINXOPTS) $(O)\n\n.PHONY: help Makefile\n\n# Catch-all target: route all unknown targets to Sphinx using the new\n# \"make mode\" option.  $(O) is meant as a shortcut for $(SPHINXOPTS).\n%: Makefile\n\t@$(SPHINXBUILD) -M $@ \"$(SOURCEDIR)\" \"$(BUILDDIR)\" $(SPHINXOPTS) $(O)"
  },
  {
    "path": "docs/_templates/layout.html",
    "content": "{% extends '!layout.html' %}\n\n{% block footer %}\n<script src=\"/SdkStatic/sdk-priv.js\" async=\"true\"></script>\n{% endblock %}"
  },
  {
    "path": "docs/aws_xray_sdk.core.emitters.rst",
    "content": "aws\\_xray\\_sdk.core.emitters package\n====================================\n\nSubmodules\n----------\n\naws\\_xray\\_sdk.core.emitters.udp\\_emitter module\n------------------------------------------------\n\n.. automodule:: aws_xray_sdk.core.emitters.udp_emitter\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\n\nModule contents\n---------------\n\n.. automodule:: aws_xray_sdk.core.emitters\n    :members:\n    :undoc-members:\n    :show-inheritance:\n"
  },
  {
    "path": "docs/aws_xray_sdk.core.exceptions.rst",
    "content": "aws\\_xray\\_sdk.core.exceptions package\n======================================\n\nSubmodules\n----------\n\naws\\_xray\\_sdk.core.exceptions.exceptions module\n------------------------------------------------\n\n.. automodule:: aws_xray_sdk.core.exceptions.exceptions\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\n\nModule contents\n---------------\n\n.. automodule:: aws_xray_sdk.core.exceptions\n    :members:\n    :undoc-members:\n    :show-inheritance:\n"
  },
  {
    "path": "docs/aws_xray_sdk.core.models.rst",
    "content": "aws\\_xray\\_sdk.core.models package\n==================================\n\nSubmodules\n----------\n\naws\\_xray\\_sdk.core.models.default\\_dynamic\\_naming module\n----------------------------------------------------------\n\n.. automodule:: aws_xray_sdk.core.models.default_dynamic_naming\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\naws\\_xray\\_sdk.core.models.dummy\\_entities module\n-------------------------------------------------\n\n.. automodule:: aws_xray_sdk.core.models.dummy_entities\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\naws\\_xray\\_sdk.core.models.entity module\n----------------------------------------\n\n.. automodule:: aws_xray_sdk.core.models.entity\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\naws\\_xray\\_sdk.core.models.facade\\_segment module\n-------------------------------------------------\n\n.. automodule:: aws_xray_sdk.core.models.facade_segment\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\naws\\_xray\\_sdk.core.models.http module\n--------------------------------------\n\n.. automodule:: aws_xray_sdk.core.models.http\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\naws\\_xray\\_sdk.core.models.segment module\n-----------------------------------------\n\n.. automodule:: aws_xray_sdk.core.models.segment\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\naws\\_xray\\_sdk.core.models.subsegment module\n--------------------------------------------\n\n.. automodule:: aws_xray_sdk.core.models.subsegment\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\naws\\_xray\\_sdk.core.models.throwable module\n-------------------------------------------\n\n.. automodule:: aws_xray_sdk.core.models.throwable\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\naws\\_xray\\_sdk.core.models.trace\\_header module\n-----------------------------------------------\n\n.. automodule:: aws_xray_sdk.core.models.trace_header\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\naws\\_xray\\_sdk.core.models.traceid module\n-----------------------------------------\n\n.. automodule:: aws_xray_sdk.core.models.traceid\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\n\nModule contents\n---------------\n\n.. automodule:: aws_xray_sdk.core.models\n    :members:\n    :undoc-members:\n    :show-inheritance:\n"
  },
  {
    "path": "docs/aws_xray_sdk.core.plugins.rst",
    "content": "aws\\_xray\\_sdk.core.plugins package\n===================================\n\nSubmodules\n----------\n\naws\\_xray\\_sdk.core.plugins.ec2\\_plugin module\n----------------------------------------------\n\n.. automodule:: aws_xray_sdk.core.plugins.ec2_plugin\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\naws\\_xray\\_sdk.core.plugins.ecs\\_plugin module\n----------------------------------------------\n\n.. automodule:: aws_xray_sdk.core.plugins.ecs_plugin\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\naws\\_xray\\_sdk.core.plugins.elasticbeanstalk\\_plugin module\n-----------------------------------------------------------\n\n.. automodule:: aws_xray_sdk.core.plugins.elasticbeanstalk_plugin\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\naws\\_xray\\_sdk.core.plugins.utils module\n----------------------------------------\n\n.. automodule:: aws_xray_sdk.core.plugins.utils\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\n\nModule contents\n---------------\n\n.. automodule:: aws_xray_sdk.core.plugins\n    :members:\n    :undoc-members:\n    :show-inheritance:\n"
  },
  {
    "path": "docs/aws_xray_sdk.core.rst",
    "content": "aws\\_xray\\_sdk.core package\n===========================\n\nSubpackages\n-----------\n\n.. toctree::\n\n    aws_xray_sdk.core.emitters\n    aws_xray_sdk.core.exceptions\n    aws_xray_sdk.core.models\n    aws_xray_sdk.core.plugins\n    aws_xray_sdk.core.sampling\n    aws_xray_sdk.core.streaming\n    aws_xray_sdk.core.utils\n\nSubmodules\n----------\n\naws\\_xray\\_sdk.core.async\\_context module\n-----------------------------------------\n\n.. automodule:: aws_xray_sdk.core.async_context\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\naws\\_xray\\_sdk.core.async\\_recorder module\n------------------------------------------\n\n.. automodule:: aws_xray_sdk.core.async_recorder\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\naws\\_xray\\_sdk.core.context module\n----------------------------------\n\n.. automodule:: aws_xray_sdk.core.context\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\naws\\_xray\\_sdk.core.lambda\\_launcher module\n-------------------------------------------\n\n.. automodule:: aws_xray_sdk.core.lambda_launcher\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\naws\\_xray\\_sdk.core.patcher module\n----------------------------------\n\n.. automodule:: aws_xray_sdk.core.patcher\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\naws\\_xray\\_sdk.core.recorder module\n-----------------------------------\n\n.. automodule:: aws_xray_sdk.core.recorder\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\n\nModule contents\n---------------\n\n.. automodule:: aws_xray_sdk.core\n    :members:\n    :undoc-members:\n    :show-inheritance:\n"
  },
  {
    "path": "docs/aws_xray_sdk.core.sampling.rst",
    "content": "aws\\_xray\\_sdk.core.sampling package\n====================================\n\nSubmodules\n----------\n\naws\\_xray\\_sdk.core.sampling.default\\_sampler module\n----------------------------------------------------\n\n.. automodule:: aws_xray_sdk.core.sampling.default_sampler\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\naws\\_xray\\_sdk.core.sampling.reservoir module\n---------------------------------------------\n\n.. automodule:: aws_xray_sdk.core.sampling.reservoir\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\naws\\_xray\\_sdk.core.sampling.sampling\\_rule module\n--------------------------------------------------\n\n.. automodule:: aws_xray_sdk.core.sampling.sampling_rule\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\n\nModule contents\n---------------\n\n.. automodule:: aws_xray_sdk.core.sampling\n    :members:\n    :undoc-members:\n    :show-inheritance:\n"
  },
  {
    "path": "docs/aws_xray_sdk.core.streaming.rst",
    "content": "aws\\_xray\\_sdk.core.streaming package\n=====================================\n\nSubmodules\n----------\n\naws\\_xray\\_sdk.core.streaming.default\\_streaming module\n-------------------------------------------------------\n\n.. automodule:: aws_xray_sdk.core.streaming.default_streaming\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\n\nModule contents\n---------------\n\n.. automodule:: aws_xray_sdk.core.streaming\n    :members:\n    :undoc-members:\n    :show-inheritance:\n"
  },
  {
    "path": "docs/aws_xray_sdk.core.utils.rst",
    "content": "aws\\_xray\\_sdk.core.utils package\n=================================\n\nSubmodules\n----------\n\naws\\_xray\\_sdk.core.utils.atomic\\_counter module\n------------------------------------------------\n\n.. automodule:: aws_xray_sdk.core.utils.atomic_counter\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\naws\\_xray\\_sdk.core.utils.compat module\n---------------------------------------\n\n.. automodule:: aws_xray_sdk.core.utils.compat\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\naws\\_xray\\_sdk.core.utils.search\\_pattern module\n------------------------------------------------\n\n.. automodule:: aws_xray_sdk.core.utils.search_pattern\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\n\nModule contents\n---------------\n\n.. automodule:: aws_xray_sdk.core.utils\n    :members:\n    :undoc-members:\n    :show-inheritance:\n"
  },
  {
    "path": "docs/aws_xray_sdk.ext.aiobotocore.rst",
    "content": "aws\\_xray\\_sdk.ext.aiobotocore package\n======================================\n\nSubmodules\n----------\n\naws\\_xray\\_sdk.ext.aiobotocore.patch module\n-------------------------------------------\n\n.. automodule:: aws_xray_sdk.ext.aiobotocore.patch\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\n\nModule contents\n---------------\n\n.. automodule:: aws_xray_sdk.ext.aiobotocore\n    :members:\n    :undoc-members:\n    :show-inheritance:\n"
  },
  {
    "path": "docs/aws_xray_sdk.ext.aiohttp.rst",
    "content": "aws\\_xray\\_sdk.ext.aiohttp package\n==================================\n\nSubmodules\n----------\n\naws\\_xray\\_sdk.ext.aiohttp.client module\n----------------------------------------\n\n.. automodule:: aws_xray_sdk.ext.aiohttp.client\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\naws\\_xray\\_sdk.ext.aiohttp.middleware module\n--------------------------------------------\n\n.. automodule:: aws_xray_sdk.ext.aiohttp.middleware\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\n\nModule contents\n---------------\n\n.. automodule:: aws_xray_sdk.ext.aiohttp\n    :members:\n    :undoc-members:\n    :show-inheritance:\n"
  },
  {
    "path": "docs/aws_xray_sdk.ext.botocore.rst",
    "content": "aws\\_xray\\_sdk.ext.botocore package\n===================================\n\nSubmodules\n----------\n\naws\\_xray\\_sdk.ext.botocore.patch module\n----------------------------------------\n\n.. automodule:: aws_xray_sdk.ext.botocore.patch\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\n\nModule contents\n---------------\n\n.. automodule:: aws_xray_sdk.ext.botocore\n    :members:\n    :undoc-members:\n    :show-inheritance:\n"
  },
  {
    "path": "docs/aws_xray_sdk.ext.django.rst",
    "content": "aws\\_xray\\_sdk.ext.django package\n=================================\n\nSubmodules\n----------\n\naws\\_xray\\_sdk.ext.django.apps module\n-------------------------------------\n\n.. automodule:: aws_xray_sdk.ext.django.apps\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\naws\\_xray\\_sdk.ext.django.conf module\n-------------------------------------\n\n.. automodule:: aws_xray_sdk.ext.django.conf\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\naws\\_xray\\_sdk.ext.django.db module\n-----------------------------------\n\n.. automodule:: aws_xray_sdk.ext.django.db\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\naws\\_xray\\_sdk.ext.django.middleware module\n-------------------------------------------\n\n.. automodule:: aws_xray_sdk.ext.django.middleware\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\naws\\_xray\\_sdk.ext.django.templates module\n------------------------------------------\n\n.. automodule:: aws_xray_sdk.ext.django.templates\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\n\nModule contents\n---------------\n\n.. automodule:: aws_xray_sdk.ext.django\n    :members:\n    :undoc-members:\n    :show-inheritance:\n"
  },
  {
    "path": "docs/aws_xray_sdk.ext.flask.rst",
    "content": "aws\\_xray\\_sdk.ext.flask package\n================================\n\nSubmodules\n----------\n\naws\\_xray\\_sdk.ext.flask.middleware module\n------------------------------------------\n\n.. automodule:: aws_xray_sdk.ext.flask.middleware\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\n\nModule contents\n---------------\n\n.. automodule:: aws_xray_sdk.ext.flask\n    :members:\n    :undoc-members:\n    :show-inheritance:\n"
  },
  {
    "path": "docs/aws_xray_sdk.ext.flask_sqlalchemy.rst",
    "content": "aws\\_xray\\_sdk.ext.flask\\_sqlalchemy package\n============================================\n\nSubmodules\n----------\n\naws\\_xray\\_sdk.ext.flask\\_sqlalchemy.query module\n-------------------------------------------------\n\n.. automodule:: aws_xray_sdk.ext.flask_sqlalchemy.query\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\n\nModule contents\n---------------\n\n.. automodule:: aws_xray_sdk.ext.flask_sqlalchemy\n    :members:\n    :undoc-members:\n    :show-inheritance:\n"
  },
  {
    "path": "docs/aws_xray_sdk.ext.httplib.rst",
    "content": "aws\\_xray\\_sdk.ext.httplib package\n==================================\n\nSubmodules\n----------\n\naws\\_xray\\_sdk.ext.httplib.patch module\n---------------------------------------\n\n.. automodule:: aws_xray_sdk.ext.httplib.patch\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\n\nModule contents\n---------------\n\n.. automodule:: aws_xray_sdk.ext.httplib\n    :members:\n    :undoc-members:\n    :show-inheritance:\n"
  },
  {
    "path": "docs/aws_xray_sdk.ext.httpx.rst",
    "content": "aws\\_xray\\_sdk.ext.httpx package\n================================\n\nSubmodules\n----------\n\naws\\_xray\\_sdk.ext.httpx.patch module\n-------------------------------------\n\n.. automodule:: aws_xray_sdk.ext.httpx.patch\n   :members:\n   :undoc-members:\n   :show-inheritance:\n\nModule contents\n---------------\n\n.. automodule:: aws_xray_sdk.ext.httpx\n   :members:\n   :undoc-members:\n   :show-inheritance:\n"
  },
  {
    "path": "docs/aws_xray_sdk.ext.mysql.rst",
    "content": "aws\\_xray\\_sdk.ext.mysql package\n================================\n\nSubmodules\n----------\n\naws\\_xray\\_sdk.ext.mysql.patch module\n-------------------------------------\n\n.. automodule:: aws_xray_sdk.ext.mysql.patch\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\n\nModule contents\n---------------\n\n.. automodule:: aws_xray_sdk.ext.mysql\n    :members:\n    :undoc-members:\n    :show-inheritance:\n"
  },
  {
    "path": "docs/aws_xray_sdk.ext.pg8000.rst",
    "content": "aws\\_xray\\_sdk.ext.pg8000 package\n=================================\n\nSubmodules\n----------\n\naws\\_xray\\_sdk.ext.pg8000.patch module\n--------------------------------------\n\n.. automodule:: aws_xray_sdk.ext.pg8000.patch\n   :members:\n   :undoc-members:\n   :show-inheritance:\n\nModule contents\n---------------\n\n.. automodule:: aws_xray_sdk.ext.pg8000\n   :members:\n   :undoc-members:\n   :show-inheritance:\n"
  },
  {
    "path": "docs/aws_xray_sdk.ext.psycopg2.rst",
    "content": "aws\\_xray\\_sdk.ext.psycopg2 package\n===================================\n\nSubmodules\n----------\n\naws\\_xray\\_sdk.ext.psycopg2.patch module\n----------------------------------------\n\n.. automodule:: aws_xray_sdk.ext.psycopg2.patch\n   :members:\n   :undoc-members:\n   :show-inheritance:\n\nModule contents\n---------------\n\n.. automodule:: aws_xray_sdk.ext.psycopg2\n   :members:\n   :undoc-members:\n   :show-inheritance:\n"
  },
  {
    "path": "docs/aws_xray_sdk.ext.pymongo.rst",
    "content": "aws\\_xray\\_sdk.ext.pymongo package\n==================================\n\nSubmodules\n----------\n\naws\\_xray\\_sdk.ext.pymongo.patch module\n---------------------------------------\n\n.. automodule:: aws_xray_sdk.ext.pymongo.patch\n   :members:\n   :undoc-members:\n   :show-inheritance:\n\nModule contents\n---------------\n\n.. automodule:: aws_xray_sdk.ext.pymongo\n   :members:\n   :undoc-members:\n   :show-inheritance:\n"
  },
  {
    "path": "docs/aws_xray_sdk.ext.pymysql.rst",
    "content": "aws\\_xray\\_sdk.ext.pymysql package\n==================================\n\nSubmodules\n----------\n\naws\\_xray\\_sdk.ext.pymysql.patch module\n---------------------------------------\n\n.. automodule:: aws_xray_sdk.ext.pymysql.patch\n   :members:\n   :undoc-members:\n   :show-inheritance:\n\nModule contents\n---------------\n\n.. automodule:: aws_xray_sdk.ext.pymysql\n   :members:\n   :undoc-members:\n   :show-inheritance:\n"
  },
  {
    "path": "docs/aws_xray_sdk.ext.pynamodb.rst",
    "content": "aws\\_xray\\_sdk.ext.pynamodb package\n===================================\n\nSubmodules\n----------\n\naws\\_xray\\_sdk.ext.pynamodb.patch module\n----------------------------------------\n\n.. automodule:: aws_xray_sdk.ext.pynamodb.patch\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\n\nModule contents\n---------------\n\n.. automodule:: aws_xray_sdk.ext.pynamodb\n    :members:\n    :undoc-members:\n    :show-inheritance:\n"
  },
  {
    "path": "docs/aws_xray_sdk.ext.requests.rst",
    "content": "aws\\_xray\\_sdk.ext.requests package\n===================================\n\nSubmodules\n----------\n\naws\\_xray\\_sdk.ext.requests.patch module\n----------------------------------------\n\n.. automodule:: aws_xray_sdk.ext.requests.patch\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\n\nModule contents\n---------------\n\n.. automodule:: aws_xray_sdk.ext.requests\n    :members:\n    :undoc-members:\n    :show-inheritance:\n"
  },
  {
    "path": "docs/aws_xray_sdk.ext.rst",
    "content": "aws\\_xray\\_sdk.ext package\n==========================\n\nSubpackages\n-----------\n\n.. toctree::\n\n    aws_xray_sdk.ext.aiobotocore\n    aws_xray_sdk.ext.aiohttp\n    aws_xray_sdk.ext.botocore\n    aws_xray_sdk.ext.django\n    aws_xray_sdk.ext.flask\n    aws_xray_sdk.ext.flask_sqlalchemy\n    aws_xray_sdk.ext.httplib\n    aws_xray_sdk.ext.mysql\n    aws_xray_sdk.ext.pynamodb\n    aws_xray_sdk.ext.requests\n    aws_xray_sdk.ext.sqlalchemy\n    aws_xray_sdk.ext.sqlite3\n\nSubmodules\n----------\n\naws\\_xray\\_sdk.ext.boto\\_utils module\n-------------------------------------\n\n.. automodule:: aws_xray_sdk.ext.boto_utils\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\naws\\_xray\\_sdk.ext.dbapi2 module\n--------------------------------\n\n.. automodule:: aws_xray_sdk.ext.dbapi2\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\naws\\_xray\\_sdk.ext.util module\n------------------------------\n\n.. automodule:: aws_xray_sdk.ext.util\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\n\nModule contents\n---------------\n\n.. automodule:: aws_xray_sdk.ext\n    :members:\n    :undoc-members:\n    :show-inheritance:\n"
  },
  {
    "path": "docs/aws_xray_sdk.ext.sqlalchemy.rst",
    "content": "aws\\_xray\\_sdk.ext.sqlalchemy package\n=====================================\n\nSubpackages\n-----------\n\n.. toctree::\n\n    aws_xray_sdk.ext.sqlalchemy.util\n\nSubmodules\n----------\n\naws\\_xray\\_sdk.ext.sqlalchemy.query module\n------------------------------------------\n\n.. automodule:: aws_xray_sdk.ext.sqlalchemy.query\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\n\nModule contents\n---------------\n\n.. automodule:: aws_xray_sdk.ext.sqlalchemy\n    :members:\n    :undoc-members:\n    :show-inheritance:\n"
  },
  {
    "path": "docs/aws_xray_sdk.ext.sqlalchemy.util.rst",
    "content": "aws\\_xray\\_sdk.ext.sqlalchemy.util package\n==========================================\n\nSubmodules\n----------\n\naws\\_xray\\_sdk.ext.sqlalchemy.util.decorators module\n----------------------------------------------------\n\n.. automodule:: aws_xray_sdk.ext.sqlalchemy.util.decorators\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\n\nModule contents\n---------------\n\n.. automodule:: aws_xray_sdk.ext.sqlalchemy.util\n    :members:\n    :undoc-members:\n    :show-inheritance:\n"
  },
  {
    "path": "docs/aws_xray_sdk.ext.sqlalchemy_core.rst",
    "content": "aws\\_xray\\_sdk.ext.sqlalchemy\\_core package\n===========================================\n\nSubmodules\n----------\n\naws\\_xray\\_sdk.ext.sqlalchemy\\_core.patch module\n------------------------------------------------\n\n.. automodule:: aws_xray_sdk.ext.sqlalchemy_core.patch\n   :members:\n   :undoc-members:\n   :show-inheritance:\n\nModule contents\n---------------\n\n.. automodule:: aws_xray_sdk.ext.sqlalchemy_core\n   :members:\n   :undoc-members:\n   :show-inheritance:\n"
  },
  {
    "path": "docs/aws_xray_sdk.ext.sqlite3.rst",
    "content": "aws\\_xray\\_sdk.ext.sqlite3 package\n==================================\n\nSubmodules\n----------\n\naws\\_xray\\_sdk.ext.sqlite3.patch module\n---------------------------------------\n\n.. automodule:: aws_xray_sdk.ext.sqlite3.patch\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\n\nModule contents\n---------------\n\n.. automodule:: aws_xray_sdk.ext.sqlite3\n    :members:\n    :undoc-members:\n    :show-inheritance:\n"
  },
  {
    "path": "docs/aws_xray_sdk.rst",
    "content": "aws\\_xray\\_sdk package\n======================\n\nSubpackages\n-----------\n\n.. toctree::\n\n    aws_xray_sdk.core\n    aws_xray_sdk.ext\n\nSubmodules\n----------\n\naws\\_xray\\_sdk.version module\n-----------------------------\n\n.. automodule:: aws_xray_sdk.version\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\n\nModule contents\n---------------\n\n.. automodule:: aws_xray_sdk\n    :members:\n    :undoc-members:\n    :show-inheritance:\n"
  },
  {
    "path": "docs/basic.rst",
    "content": ".. _basic:\n\nBasic Usage\n===========\n\nThe SDK provides a global recorder, ``xray_recorder``, to generate segments and subsegments.\n\nManually create segment/subsegment\n----------------------------------\nIf you're using a web framework or library that is not supported, or you want to define\nyour own structure on segments/subsegments, you can manually create \nsegments and subsegments by using code like the following::\n\n    from aws_xray_sdk.core import xray_recorder\n    \n    xray_recorder.begin_segment('name')\n\n    # your code here\n\n    xray_recorder.begin_subsegment('name')\n    # some code block you want to record\n    xray_recorder.end_subsegment()\n\n    xray_recorder.end_segment()\n\nThe ``xray_recorder`` keeps one segment per thread.\nTherefore, in manual mode, call ``xray_recorder.end_segment()`` before creating a new segment,\notherwise the new segment overwrites the existing one.\nTo trace a particular code block inside a segment, use a subsegment.\nIf you open a new subsegment while there is already an open subsegment,\nthe new subsegment becomes the child of the existing subsegment.\n\nDecorator for function auto-capture\n-----------------------------------\nA decorator is provided to easily capture basic information as a subsegment on\nuser defined functions. You can use the decorator like the following::\n    \n    @xray_recorder.capture('name')\n    def my_func():\n        #do something\n\n``xray_recorder`` generates a subsegment for the decorated function, where the name is optional.\nIf the name argument is not provided, the function name is used as the subsegment name.\nIf the function is called without an open segment in the context storage, the subsegment is discarded.\nCurrently the decorator only works with synchronous functions.\n\nSet annotation or metadata\n--------------------------\nYou can add annotations and metadata to an active segment/subsegment.\n\nAnnotations are simple key-value pairs that are indexed for use with\n`filter expressions <http://docs.aws.amazon.com/xray/latest/devguide/xray-console-filters.html>`_.\nUse annotations to record data that you want to use to group traces in the console,\nor when calling the GetTraceSummaries API. Annotation keys should only use ASCII letters, numbers, and\nthe underscore(_) character.\n\nMetadata are key-value pairs with values of any type, including objects and lists, but that are not indexed.\nUse metadata to record data you want to store in the trace but don't need to use for searching traces.\n\nYou can add annotations/metadata like the following::\n\n    from aws_xray_sdk.core import xray_recorder\n\n    segment = xray_recorder.current_segment()\n    # value can be string, number or bool\n    segment.put_annotation('key', value)\n    # namespace and key must be string and value is an object\n    # that can be serialized to json\n    segment.put_metadata('key', json, 'namespace')\n\nThe ``current_segment`` and ``current_subsegment`` functions get the current\nopen segment or subsegment, respectively, from context storage.\nPut these calls between segment or subsegment begin and end statements.\n\nAWS Lambda Integration\n----------------------\n\nTo integrate with Lambda you must\nfirst enable active tracing on a Lambda function.\nSee http://docs.aws.amazon.com/lambda/latest/dg/lambda-x-ray.html#using-x-ray for details.\n\nIn your Lambda function, you can only begin and end a subsegment.\nThe Lambda service emits a segment as the root.\nThis segment cannot be mutated.\nInstrument the SDK as you would in any Python script.\nSubsegments generated outside of the Lambda handler are discarded.\n"
  },
  {
    "path": "docs/changes.rst",
    "content": ".. _changes:\n\n.. include:: ../CHANGELOG.rst"
  },
  {
    "path": "docs/conf.py",
    "content": "# -*- coding: utf-8 -*-\n#\n# aws-xray-sdk documentation build configuration file, created by\n# sphinx-quickstart on Wed Aug  2 15:33:56 2017.\n#\n# This file is execfile()d with the current directory set to its\n# containing dir.\n#\n# Note that not all possible configuration values are present in this\n# autogenerated file.\n#\n# All configuration values have a default; values that are commented out\n# serve to show the default.\n\n# If extensions (or modules to document with autodoc) are in another directory,\n# add these directories to sys.path here. If the directory is relative to the\n# documentation root, use os.path.abspath to make it absolute, like shown here.\n#\nimport os\nimport sys\nsys.path.append(os.path.join(os.path.dirname(__name__), '..'))\nsys.path.insert(0, os.path.join(os.path.abspath('.'), '../tests/ext/django'))\n\nos.environ['DJANGO_SETTINGS_MODULE'] = 'app.settings'\n\nimport django\ndjango.setup()\n\n# -- General configuration ------------------------------------------------\n\n# If your documentation needs a minimal Sphinx version, state it here.\n#\n# needs_sphinx = '1.0'\n\n# Add any Sphinx extension module names here, as strings. They can be\n# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom\n# ones.\nextensions = ['sphinx.ext.autodoc',\n              'sphinx.ext.doctest',\n              'sphinx.ext.intersphinx',\n              'sphinx.ext.coverage']\n\n# Add any paths that contain templates here, relative to this directory.\ntemplates_path = ['_templates']\n\n# The suffix(es) of source filenames.\n# You can specify multiple suffix as a list of string:\n#\n# source_suffix = ['.rst', '.md']\nsource_suffix = '.rst'\n\n# The master toctree document.\nmaster_doc = 'index'\n\n# General information about the project.\nproject = u'aws-xray-sdk'\ncopyright = u'2017, Amazon Web Services'\nauthor = u'Amazon Web Services'\n\n# The version info for the project you're documenting, acts as replacement for\n# |version| and |release|, also used in various other places throughout the\n# built documents.\n#\n# The short X.Y version.\nversion = u'2.15.0'\n# The full version, including alpha/beta/rc tags.\nrelease = u'2.15.0'\n\n# The language for content autogenerated by Sphinx. Refer to documentation\n# for a list of supported languages.\n#\n# This is also used if you do content translation via gettext catalogs.\n# Usually you set \"language\" from the command line for these cases.\nlanguage = None\n\n# List of patterns, relative to source directory, that match files and\n# directories to ignore when looking for source files.\n# This patterns also effect to html_static_path and html_extra_path\nexclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']\n\n# The name of the Pygments (syntax highlighting) style to use.\npygments_style = 'sphinx'\n\n# If true, `todo` and `todoList` produce output, else they produce nothing.\ntodo_include_todos = False\n\n\n# -- Options for HTML output ----------------------------------------------\n\n# The theme to use for HTML and HTML Help pages.  See the documentation for\n# a list of builtin themes.\n#\nhtml_theme = 'alabaster'\n\n# Theme options are theme-specific and customize the look and feel of a theme\n# further.  For a list of options available for each theme, see the\n# documentation.\n#\n# html_theme_options = {}\n\n# Add any paths that contain custom static files (such as style sheets) here,\n# relative to this directory. They are copied after the builtin static files,\n# so a file named \"default.css\" will overwrite the builtin \"default.css\".\nhtml_static_path = ['_static']\n\n# Custom sidebar templates, must be a dictionary that maps document names\n# to template names.\n#\n# This is required for the alabaster theme\n# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars\nhtml_sidebars = {\n    '**': [\n        'about.html',\n        'navigation.html',\n        'relations.html',  # needs 'show_related': True theme option to display\n        'searchbox.html',\n        'donate.html',\n    ]\n}\n\n\n# -- Options for HTMLHelp output ------------------------------------------\n\n# Output file base name for HTML help builder.\nhtmlhelp_basename = 'aws-xray-sdkdoc'\n\n\n# -- Options for LaTeX output ---------------------------------------------\n\nlatex_elements = {\n    # The paper size ('letterpaper' or 'a4paper').\n    #\n    # 'papersize': 'letterpaper',\n\n    # The font size ('10pt', '11pt' or '12pt').\n    #\n    # 'pointsize': '10pt',\n\n    # Additional stuff for the LaTeX preamble.\n    #\n    # 'preamble': '',\n\n    # Latex figure (float) alignment\n    #\n    # 'figure_align': 'htbp',\n}\n\n# Grouping the document tree into LaTeX files. List of tuples\n# (source start file, target name, title,\n#  author, documentclass [howto, manual, or own class]).\nlatex_documents = [\n    (master_doc, 'aws-xray-sdk.tex', u'aws-xray-sdk Documentation',\n     u'Amazon Web Services', 'manual'),\n]\n\n\n# -- Options for manual page output ---------------------------------------\n\n# One entry per manual page. List of tuples\n# (source start file, name, description, authors, manual section).\nman_pages = [\n    (master_doc, 'aws-xray-sdk', u'aws-xray-sdk Documentation',\n     [author], 1)\n]\n\n\n# -- Options for Texinfo output -------------------------------------------\n\n# Grouping the document tree into Texinfo files. List of tuples\n# (source start file, target name, title, author,\n#  dir menu entry, description, category)\ntexinfo_documents = [\n    (master_doc, 'aws-xray-sdk', u'aws-xray-sdk Documentation',\n     author, 'aws-xray-sdk', 'One line description of project.',\n     'Miscellaneous'),\n]\n\n\n# Example configuration for intersphinx: refer to the Python standard library.\nintersphinx_mapping = {\n    'python': ('https://docs.python.org/', None),\n}\n"
  },
  {
    "path": "docs/configurations.rst",
    "content": ".. _configurations:\n\nConfigure Global Recorder\n=========================\n\nSampling\n--------\nSampling is enabled by default.\nWhenever the global recorder creates a segment,\nit decides whether to sample this segment.\nIf it does not sample this segment, it is discarded and not sent to the\nX-Ray daemon.\n\nTo turn off sampling, use code like the following::\n\n    from aws_xray_sdk.core import xray_recorder\n    xray_recorder.configure(sampling=False)\n\nBy default, the SDK uses sampling rules configured in your AWS account. You can also configure the backup sampling rules locally::\n\n    xray_recorder.configure(sampling_rules=your_rules)\n\nThe input can either be an absolute path to your sampling rule\n*.json* file or a dictionary.\n\nTo use only local rules for sampling, configure the recorder with a ``LocalSampler``::\n\n    from aws_xray_sdk.core.sampling.local.sampler import LocalSampler\n    xray_recorder.configure(sampler=LocalSampler())\n\nThe following code is an example of a rule configuration::\n\n    {\n    \"version\": 1,\n    \"rules\": [\n        {\n        \"description\": \"Player moves.\",\n        \"service_name\": \"*\",\n        \"http_method\": \"*\",\n        \"url_path\": \"/api/move/*\",\n        \"fixed_target\": 0,\n        \"rate\": 0.05\n        }\n    ],\n    \"default\": {\n        \"fixed_target\": 1,\n        \"rate\": 0.1\n        }\n    }\n\nThis example defines one custom rule and a default rule.\nThe custom rule applies a five-percent sampling rate\nwith no minimum number of requests to trace for paths under */api/move/*.\nThe default rule traces the first request each second and 10 percent of\nadditional requests.\nThe SDK applies custom rules in the order in which they are defined.\nIf a request matches multiple custom rules, the SDK applies only the first rule.\nYou can use wildcard character \"*\" and \"?\" in service_name, http_method and\nurl_path.\n\"*\" represents any combination of characters. \"?\" represents a single character.\n\nNote that sampling configurations have no effect if the application runs in AWS Lambda.\n\nPlugins\n-------\nThe plugin adds extra metadata for each segment if the app is running on that environment.\nThe SDK provides three plugins:\n\n* Amazon EC2 – EC2Plugin adds the instance ID and Availability Zone.\n* Elastic Beanstalk – ElasticBeanstalkPlugin adds the environment name, version label, and deployment ID.\n* Amazon ECS – ECSPlugin adds the container host name\n\nTo use plugins, use code like the following::\n\n    # a tuple of strings\n    plugins = ('elasticbeanstalk_plugin', 'ec2_plugin', 'ecs_plugin')\n    # alternatively you can use \n    plugins = ('ElasticBeanstalkPlugin', 'EC2Plugin', 'ECSPlugin')\n\n    xray_recorder.configure(plugins=plugins)\n\nOrder matters in the tuple and the origin of the segment is set from the last plugin.\nTherefore, in the previous example, if the program runs on ECS, the segment origin is\n'AWS::ECS::CONTAINER'.\nPlugins must be configured before patching any third party libraries to\navoid unexpected behavior.\nPlugins are employed after they are specified.\n\nContext Missing Strategy\n------------------------\nDefines the recorder behavior when your instrumented code attempts to record data when no segment is open.\nConfigure like the following::\n\n    xray_recorder.configure(context_missing='Your Strategy Name Here')\n\nSupported strategies are:\n\n* RUNTIME_ERROR: throw an SegmentNotFoundException\n* LOG_ERROR: log an error and continue\n* IGNORE_ERROR: do nothing\n\nSegment Dynamic Naming\n----------------------\nFor a web application you might want to name the segment using host names. You can pass in a pattern\nwith wildcard character \"*\" and \"?\". \"*\" represents any combination of characters.\n\"?\" represents a single character. If the host name from incoming request's header matches the pattern,\nthe host name will be used as the segment name, otherwise it uses fallback name defined in ``AWS_XRAY_TRACING_NAME``.\nTo configure dynamic naming, use code like the following::\n    \n    xray_recorder.configure(dynamic_naming='*.example.com')\n\nEnvironment Variables\n---------------------\nThere are three supported environment variables to configure the global\nrecorder:\n\n* AWS_XRAY_CONTEXT_MISSING: configure context missing strategy\n* AWS_XRAY_TRACING_NAME: default segment name\n* AWS_XRAY_DAEMON_ADDRESS: where the recorder sends data to over UDP\n\nEnvironment variables has higher precedence over ``xray_recorder.configure()``\n\nLogging\n-------\nThe SDK uses Python's built-in ``logging`` module to perform logging.\nYou can configure the SDK logging just like how you configure other\npython libraries. An example of set the SDK log level is like the following::\n\n    logging.basicConfig(level='DEBUG')\n    logging.getLogger('aws_xray_sdk').setLevel(logging.WARNING)\n\nContext Storage\n---------------\nThe global recorder uses threadlocal to store active segments/subsegments.\nYou can override the default context class to implement your own context storage::\n    \n    from aws_xray_sdk.core.context import Context\n\n    class MyOwnContext(Context):\n\n        def put_segment(self, segment):\n        # store the segment created by ``xray_recorder`` to the context.\n        pass\n\n        def end_segment(self, end_time=None):\n        # end the segment in the current context.\n        pass\n\n        def put_subsegment(self, subsegment):\n        # store the subsegment created by ``xray_recorder`` to the context.\n        pass\n\n        def end_subsegment(self, end_time=None):\n        # end the subsegment in the current context.\n        pass\n\n        def get_trace_entity(self):\n        # get the current active trace entity(segment or subsegment).\n        pass\n\n        def set_trace_entity(self, trace_entity):\n        # manually inject a trace entity to the context storage.\n        pass\n\n        def clear_trace_entities(self):\n        # clean up context storage.\n        pass\n\n        def handle_context_missing(self):\n        # behavior on no trace entity to access or mutate.\n        pass\n\nThe function ``current_segment`` and ``current_subsegment`` on recorder level uses\n``context.get_trace_entity()`` and dynamically return the expected type by using internal\nreferences inside segment/subsegment objects.\n\nThen you can pass your own context::\n\n    my_context=MyOwnContext()\n    xray_recorder.configure(context=my_context)\n\nEmitter\n-------\nThe default emitter uses non-blocking socket to send data to the X-Ray daemon.\nIt doesn't retry on IOError. To override the default emitter::\n\n    from aws_xray_sdk.core.emitters.udp_emitter import UDPEmitter\n\n    class MyOwnEmitter(UDPEmitter):\n\n        def send_entity(self, entity):\n        # send the input segment/subsegment to the X-Ray daemon.\n        # Return True on success and False on failure.\n        pass\n\n        def set_daemon_address(self, address):\n        # parse input full address like 127.0.0.1:8000 to ip and port and\n        # store them to the local emitter properties.\n        pass\n\nThen you can pass your own emitter::\n\n    my_emitter = MyOwnEmitter()\n    xray_recorder.configure(emitter=my_emitter)\n"
  },
  {
    "path": "docs/frameworks.rst",
    "content": ".. _frameworks:\n\nDjango\n======\n\nConfigure X-Ray Recorder\n------------------------\nMake sure you add ``XRayMiddleWare`` as the first entry in your\nDjango *settings.py* file, as shown in the following example::\n\n    MIDDLEWARE = [\n        'aws_xray_sdk.ext.django.middleware.XRayMiddleware',\n        'django.contrib.auth.middleware.AuthenticationMiddleware',\n        'django.contrib.messages.middleware.MessageMiddleware',\n    ] \n\nThe incoming requests to the Django app are then automatically recorded as\na segment.\n\nTo get the current segment and add annotations or metadata as needed,\nuse the following statement in your application code when processing request::\n\n    segment = xray_recorder.current_segment()\n\nFor more configurations in your Django ``settings.py`` file,\nadd the following line::\n\n    INSTALLED_APPS = [\n        'django.contrib.admin',\n        ...\n        'django.contrib.sessions',\n        'aws_xray_sdk.ext.django',\n    ]\n\nYou can configure the X-Ray recorder in a Django app under the\n'XRAY_RECORDER' namespace.\nThe default values are as follows::\n\n    XRAY_RECORDER = {\n        'AWS_XRAY_DAEMON_ADDRESS': '127.0.0.1:2000',\n        'AUTO_INSTRUMENT': True,  # If turned on built-in database queries and template rendering will be recorded as subsegments\n        'AWS_XRAY_CONTEXT_MISSING': 'LOG_ERROR',\n        'PLUGINS': (),\n        'SAMPLING': True,\n        'SAMPLING_RULES': None,\n        'AWS_XRAY_TRACING_NAME': None, # the segment name for segments generated from incoming requests\n        'DYNAMIC_NAMING': None, # defines a pattern that host names should match\n        'STREAMING_THRESHOLD': None, # defines when a segment starts to stream out its children subsegments\n    }\n\nEnvironment variables have higher precedence over user settings.\nIf neither is set, the defaults values shown previously are used.\n'AWS_XRAY_TRACING_NAME' is required unless specified as an environment variable.\nAll other keys are optional.\nFor further information on individual settings, see the :ref:`Configure Global Recorder <configurations>` section.\n\nLocal Development\n-----------------\nWhen doing Django app local development, if you configured Django built-in database with ``AUTO_INSTRUMENT`` turned-on,\nthe command ``manage.py runserver`` may fail if ``AWS_XRAY_CONTEXT_MISSING`` is set to ``RUNTIME_ERROR``. This is because\nthe command ``runserver`` performs migrations check which will generate a subsegment,\nthe ``xray_recorder`` will raise an error since there is no active segment. \n\nOne solution is to set ``AWS_XRAY_CONTEXT_MISSING`` to ``LOG_ERROR`` so it only emits a error message on server startup. \nAlternatively if you have defined your own ``ready()`` function for code execution at startup you can manually create a segment\nas a placeholder.\n\nBy Django official guide it's recommanded to deploy Django to other servers in production so this particular issue normally\ndoesn't exist in production.\n\nFlask\n=====\n\nTo generate segment based on incoming requests, you need to instantiate the X-Ray middleware for flask::\n\n    from aws_xray_sdk.core import xray_recorder\n    from aws_xray_sdk.ext.flask.middleware import XRayMiddleware\n\n    app = Flask(__name__)\n\n    xray_recorder.configure(service='my_app_name')\n    XRayMiddleware(app, xray_recorder)\n\nFlask built-in template rendering will be wrapped into subsegments.\nYou can configure the recorder, see :ref:`Configure Global Recorder <configurations>` for more details.\n\nAiohttp\n=======\n\nServer\n------\n\nFor X-Ray to create a segment based on an incoming request, you need register some middleware with aiohttp. As aiohttp\nis an asyncronous framework, X-Ray will also need to be configured with an ``AsyncContext`` compared to the default threaded\nversion.::\n\n    import asyncio\n\n    from aiohttp import web\n\n    from aws_xray_sdk.ext.aiohttp.middleware import middleware\n    from aws_xray_sdk.core.async_context import AsyncContext\n    from aws_xray_sdk.core import xray_recorder\n    # Configure X-Ray to use AsyncContext\n    xray_recorder.configure(service='service_name', context=AsyncContext())\n\n\n    async def handler(request):\n        return web.Response(body='Hello World')\n\n    loop = asyncio.get_event_loop()\n    # Use X-Ray SDK middleware, its crucial the X-Ray middleware comes first\n    app = web.Application(middlewares=[middleware])\n    app.router.add_get(\"/\", handler)\n\n    web.run_app(app)\n\nThere are two things to note from the example above. Firstly a middleware corountine from aws-xray-sdk is provided during the creation\nof an aiohttp server app. Lastly the ``xray_recorder`` has also been configured with a name and an ``AsyncContext``. See\n:ref:`Configure Global Recorder <configurations>` for more information about configuring the ``xray_recorder``.\n\nClient\n------\n\nSince 3.0.0 Aiohttp provides a generic object that allows third packages to gather the different events ocurred during an HTTP call, X-Ray\ncan be configured to track these requests as subsegments using the `aws_xray_trace_config` function. This will return a valid `TraceConfig` ready to be installed\nin any `aiohttp.ClientSession`. The following example shows how it can be used.::\n\n    from aws_xray_sdk.ext.aiohttp.client import aws_xray_trace_config\n\n    trace_config = aws_xray_trace_config()\n    async with ClientSession(loop=loop, trace_configs=[trace_config]) as session:\n        async with session.get(url) as resp\n            await resp.read()\n"
  },
  {
    "path": "docs/index.rst",
    "content": ".. aws-xray-sdk documentation master file, created by\n   sphinx-quickstart on Wed Aug  2 15:33:56 2017.\n   You can adapt this file completely to your liking, but it should at least\n   contain the root `toctree` directive.\n\nWelcome to the AWS X-Ray SDK for Python!\n========================================\nThis project is open sourced in Github. Please see: https://github.com/aws/aws-xray-sdk-python.\n\nThe AWS X-Ray service accepts application information in the form of trace segments.\nA trace segment represents the work done by a single machine as a part of the entire task or request.\nA set of trace segments which share the same trace ID form a trace.\nA trace represents a full unit of work completed for a single task or request.\nLearn more about AWS X-Ray service: https://aws.amazon.com/xray/.\n\nThe AWS X-Ray SDK for Python (the SDK) enables Python developers to record and emit\ninformation from within their applications to the AWS X-Ray service.\nYou can get started in minutes using ``pip`` or by downloading a zip file.\n\nCurrently supported web frameworks and libraries:\n\n* aioboto3/aiobotocore\n* aiohttp >=2.3\n* boto3/botocore\n* Bottle\n* Django >=1.10\n* Flask\n* httplib/http.client\n* mysql-connector\n* pg8000\n* psycopg2\n* psycopg (psycopg3)\n* pymongo\n* pymysql\n* pynamodb\n* requests\n* SQLAlchemy\n* sqlite3\n\nYou must have the X-Ray daemon running to use the SDK.\nFor information about installing and configuring the daemon see:\nhttp://docs.aws.amazon.com/xray/latest/devguide/xray-daemon.html.\n\n\nContents:\n\n\n.. toctree::\n   :maxdepth: 2\n\n   Basic Usage <basic>\n   Recorder Configurations <configurations>\n   Third Party Libraries <thirdparty>\n   Working with Web Frameworks <frameworks>\n   Change Log <changes>\n   License <license>\n\nIndices and tables\n==================\n\n* :ref:`modindex`\n* :ref:`search`\n"
  },
  {
    "path": "docs/license.rst",
    "content": ".. _license:\n\nLicense\n=======\n\nPlease see Github page on https://github.com/aws/aws-xray-sdk-python/blob/master/LICENSE.\n"
  },
  {
    "path": "docs/make.bat",
    "content": "@ECHO OFF\r\n\r\nREM Command file for Sphinx documentation\r\n\r\npushd %~dp0\r\n\r\nif \"%SPHINXBUILD%\" == \"\" (\r\n\tset SPHINXBUILD=python -msphinx\r\n)\r\nset BUILDDIR=_build\r\nset ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .\r\nset I18NSPHINXOPTS=%SPHINXOPTS% .\r\nif NOT \"%PAPER%\" == \"\" (\r\n\tset ALLSPHINXOPTS=-D latex_elements.papersize=%PAPER% %ALLSPHINXOPTS%\r\n\tset I18NSPHINXOPTS=-D latex_elements.papersize=%PAPER% %I18NSPHINXOPTS%\r\n)\r\n\r\nif \"%1\" == \"\" goto help\r\n\r\nif \"%1\" == \"help\" (\r\n\t:help\r\n\techo.Please use `make ^<target^>` where ^<target^> is one of\r\n\techo.  html       to make standalone HTML files\r\n\techo.  dirhtml    to make HTML files named index.html in directories\r\n\techo.  singlehtml to make a single large HTML file\r\n\techo.  pickle     to make pickle files\r\n\techo.  json       to make JSON files\r\n\techo.  htmlhelp   to make HTML files and an HTML help project\r\n\techo.  qthelp     to make HTML files and a qthelp project\r\n\techo.  devhelp    to make HTML files and a Devhelp project\r\n\techo.  epub       to make an epub\r\n\techo.  epub3      to make an epub3\r\n\techo.  latex      to make LaTeX files, you can set PAPER=a4 or PAPER=letter\r\n\techo.  text       to make text files\r\n\techo.  man        to make manual pages\r\n\techo.  texinfo    to make Texinfo files\r\n\techo.  gettext    to make PO message catalogs\r\n\techo.  changes    to make an overview over all changed/added/deprecated items\r\n\techo.  xml        to make Docutils-native XML files\r\n\techo.  pseudoxml  to make pseudoxml-XML files for display purposes\r\n\techo.  linkcheck  to check all external links for integrity\r\n\techo.  doctest    to run all doctests embedded in the documentation if enabled\r\n\techo.  coverage   to run coverage check of the documentation if enabled\r\n\techo.  dummy      to check syntax errors of document sources\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"clean\" (\r\n\tfor /d %%i in (%BUILDDIR%\\*) do rmdir /q /s %%i\r\n\tdel /q /s %BUILDDIR%\\*\r\n\tgoto end\r\n)\r\n\r\n\r\nREM Check if sphinx-build is available\r\n%SPHINXBUILD% 1>NUL 2>NUL\r\nif errorlevel 1 (\r\n\techo.\r\n\techo.The Sphinx module was not found. Make sure you have Sphinx installed,\r\n\techo.then set the SPHINXBUILD environment variable to point to the full\r\n\techo.path of the 'sphinx-build' executable. Alternatively you may add the\r\n\techo.Sphinx directory to PATH.\r\n\techo.\r\n\techo.If you don't have Sphinx installed, grab it from\r\n\techo.http://sphinx-doc.org/\r\n\texit /b 1\r\n)\r\n\r\n\r\nif \"%1\" == \"html\" (\r\n\t%SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Build finished. The HTML pages are in %BUILDDIR%/html.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"dirhtml\" (\r\n\t%SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"singlehtml\" (\r\n\t%SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"pickle\" (\r\n\t%SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Build finished; now you can process the pickle files.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"json\" (\r\n\t%SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Build finished; now you can process the JSON files.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"htmlhelp\" (\r\n\t%SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Build finished; now you can run HTML Help Workshop with the ^\r\n.hhp project file in %BUILDDIR%/htmlhelp.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"qthelp\" (\r\n\t%SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Build finished; now you can run \"qcollectiongenerator\" with the ^\r\n.qhcp project file in %BUILDDIR%/qthelp, like this:\r\n\techo.^> qcollectiongenerator %BUILDDIR%\\qthelp\\aws_xray_sdk.qhcp\r\n\techo.To view the help file:\r\n\techo.^> assistant -collectionFile %BUILDDIR%\\qthelp\\aws_xray_sdk.ghc\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"devhelp\" (\r\n\t%SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Build finished.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"epub\" (\r\n\t%SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Build finished. The epub file is in %BUILDDIR%/epub.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"epub3\" (\r\n\t%SPHINXBUILD% -b epub3 %ALLSPHINXOPTS% %BUILDDIR%/epub3\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Build finished. The epub3 file is in %BUILDDIR%/epub3.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"latex\" (\r\n\t%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Build finished; the LaTeX files are in %BUILDDIR%/latex.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"latexpdf\" (\r\n\t%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex\r\n\tcd %BUILDDIR%/latex\r\n\tmake all-pdf\r\n\tcd %~dp0\r\n\techo.\r\n\techo.Build finished; the PDF files are in %BUILDDIR%/latex.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"latexpdfja\" (\r\n\t%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex\r\n\tcd %BUILDDIR%/latex\r\n\tmake all-pdf-ja\r\n\tcd %~dp0\r\n\techo.\r\n\techo.Build finished; the PDF files are in %BUILDDIR%/latex.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"text\" (\r\n\t%SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Build finished. The text files are in %BUILDDIR%/text.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"man\" (\r\n\t%SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Build finished. The manual pages are in %BUILDDIR%/man.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"texinfo\" (\r\n\t%SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"gettext\" (\r\n\t%SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Build finished. The message catalogs are in %BUILDDIR%/locale.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"changes\" (\r\n\t%SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.The overview file is in %BUILDDIR%/changes.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"linkcheck\" (\r\n\t%SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Link check complete; look for any errors in the above output ^\r\nor in %BUILDDIR%/linkcheck/output.txt.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"doctest\" (\r\n\t%SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Testing of doctests in the sources finished, look at the ^\r\nresults in %BUILDDIR%/doctest/output.txt.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"coverage\" (\r\n\t%SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Testing of coverage in the sources finished, look at the ^\r\nresults in %BUILDDIR%/coverage/python.txt.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"xml\" (\r\n\t%SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Build finished. The XML files are in %BUILDDIR%/xml.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"pseudoxml\" (\r\n\t%SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml.\r\n\tgoto end\r\n)\r\n\r\nif \"%1\" == \"dummy\" (\r\n\t%SPHINXBUILD% -b dummy %ALLSPHINXOPTS% %BUILDDIR%/dummy\r\n\tif errorlevel 1 exit /b 1\r\n\techo.\r\n\techo.Build finished. Dummy builder generates no files.\r\n\tgoto end\r\n)\r\n\r\n:end\r\npopd\r\n"
  },
  {
    "path": "docs/modules.rst",
    "content": "aws_xray_sdk\n============\n\n.. toctree::\n   :maxdepth: 4\n\n   aws_xray_sdk\n"
  },
  {
    "path": "docs/thirdparty.rst",
    "content": ".. _thirdparty:\n\nThird Party Library Support\n===========================\n\nPatching Supported Libraries\n----------------------------\n\nThe X-Ray Python SDK supports patching aioboto3, aiobotocore, boto3, botocore, pynamodb, requests, \nsqlite3, mysql, httplib, pymongo, pymysql, psycopg2, pg8000, sqlalchemy_core, httpx, and mysql-connector.\n\nTo patch, use code like the following in the main app::\n\n    from aws_xray_sdk.core import patch_all\n\n    patch_all()\n\n``patch_all`` ignores any libraries that are not installed.\n\nTo patch specific modules::\n\n    from aws_xray_sdk.core import patch\n\n    i_want_to_patch = ('botocore') # a tuple that contains the libs you want to patch\n    patch(i_want_to_patch)\n\nThe following modules are available to patch::\n\n    SUPPORTED_MODULES = (\n        'aioboto3',\n        'aiobotocore',\n        'boto3',\n        'botocore',\n        'pynamodb',\n        'requests',\n        'sqlite3',\n        'mysql',\n        'httplib',\n        'pymongo',\n        'pymysql',\n        'psycopg2',\n        'pg8000',\n        'sqlalchemy_core',\n        'httpx',\n    )\n\nPatching boto3 and botocore are equivalent since boto3 depends on botocore.\n\nPatching pynamodb applies the botocore patch as well, as it uses the logic from the botocore\npatch to apply the trace header.\n\nPatching mysql\n----------------------------\n\nFor mysql, only the mysql-connector module is supported and you have to use\ncode like the following to generate a subsegment for an SQL query::\n\n    def call_mysql():\n        conn = mysql.connector.connect(\n            host='host',\n            port='some_port',\n            user='some_user',\n            password='your_password',\n            database='your_db_name'\n        )\n\n        conn.cursor().execute('SHOW TABLES')\n\nPatching aioboto3 and aiobotocore\n---------------------------------\n\nOn top of patching aioboto3 or aiobotocore, the xray_recorder also needs to be\nconfigured to use the ``AsyncContext``. The following snippet shows how to set\nup the X-Ray SDK with an Async Context, bear in mind this requires Python 3.5+::\n\n    from aws_xray_sdk.core.async_context import AsyncContext\n    from aws_xray_sdk.core import xray_recorder\n    # Configure X-Ray to use AsyncContext\n    xray_recorder.configure(service='service_name', context=AsyncContext())\n\nSee :ref:`Configure Global Recorder <configurations>` for more information about\nconfiguring the ``xray_recorder``.\n\nPatching httplib\n----------------\n\nhttplib is a low-level python module which is used by several third party modules, so\nby enabling patching to this module you can gain patching of many modules \"for free.\"\nSome examples of modules that depend on httplib: requests and httplib2\n"
  },
  {
    "path": "sample-apps/LICENSE",
    "content": "Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of this\nsoftware and associated documentation files (the \"Software\"), to deal in the Software\nwithout restriction, including without limitation the rights to use, copy, modify,\nmerge, publish, distribute, sublicense, and/or sell copies of the Software, and to\npermit persons to whom the Software is furnished to do so.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,\nINCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A\nPARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT\nHOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION\nOF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\nSOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n"
  },
  {
    "path": "sample-apps/flask/Dockerfile",
    "content": "FROM python:3.6\n\nWORKDIR /app\n\nCOPY . ./\n\nRUN pip install -r requirements.txt\n\nCMD [\"python\", \"application.py\"]\n"
  },
  {
    "path": "sample-apps/flask/application.py",
    "content": "import boto3\nfrom flask import Flask\nfrom aws_xray_sdk.core import xray_recorder, patch_all\nfrom aws_xray_sdk.ext.flask.middleware import XRayMiddleware\nfrom aws_xray_sdk.ext.flask_sqlalchemy.query import XRayFlaskSqlAlchemy\nimport requests\nimport os\n\napplication = app = Flask(__name__)\napplication.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False\napplication.config[\"SQLALCHEMY_DATABASE_URI\"] = \"sqlite:///db.sqlite3\"\n\nxray_recorder.configure(service='My Flask Web Application')\nXRayMiddleware(app, xray_recorder)\npatch_all()\n\ndb = XRayFlaskSqlAlchemy(app=application)\n\n\nclass User(db.Model):\n    __tablename__ = 'users'\n\n    id = db.Column(db.Integer, primary_key=True)\n    name = db.Column(db.String(255), nullable=False, unique=True)\n\n\n# test http instrumentation\n@app.route('/outgoing-http-call')\ndef callHTTP():\n    requests.get(\"https://aws.amazon.com\")\n    return \"Ok! tracing outgoing http call\"\n\n\n# test aws sdk instrumentation\n@app.route('/aws-sdk-call')\ndef callAWSSDK():\n    client = boto3.client('s3')\n    client.list_buckets()\n\n    return 'Ok! tracing aws sdk call'\n\n\n# test flask-sql alchemy instrumentation\n@app.route('/flask-sql-alchemy-call')\ndef callSQL():\n    name = 'sql-alchemy-model'\n    user = User(name=name)\n    db.create_all()\n    db.session.add(user)\n\n    return 'Ok! tracing sql call'\n\n\n@app.route('/')\ndef default():\n    return \"healthcheck\"\n\n\nif __name__ == \"__main__\":\n    address = os.environ.get('LISTEN_ADDRESS')\n\n    if address is None:\n        host = '127.0.0.1'\n        port = '5000'\n    else:\n        host, port = address.split(\":\")\n    app.run(host=host, port=int(port), debug=True)\n"
  },
  {
    "path": "sample-apps/flask/requirements.txt",
    "content": "boto3==1.34.26\ncertifi==2024.7.4\nchardet==5.2.0\nFlask==2.3.3\nidna==3.7\nrequests==2.32.0\nurllib3==1.26.19\nWerkzeug==3.0.6\nflask-sqlalchemy==2.5.1\nSQLAlchemy==1.4\naws_xray_sdk==2.6.0\n"
  },
  {
    "path": "setup.cfg",
    "content": "[bdist_wheel]\nuniversal=1"
  },
  {
    "path": "setup.py",
    "content": "from setuptools import setup, find_packages\nfrom os import path\nfrom aws_xray_sdk.version import VERSION\n\nCURRENT_DIR = path.abspath(path.dirname(__file__))\n\nwith open(path.join(CURRENT_DIR, 'README.md'), 'r') as f:\n    long_description = f.read()\n\nsetup(\n    name='aws-xray-sdk',\n    version=VERSION,\n\n    description='The AWS X-Ray SDK for Python (the SDK) enables Python developers to record'\n                ' and emit information from within their applications to the AWS X-Ray service.',\n    long_description=long_description,\n    long_description_content_type='text/markdown',\n\n    url='https://github.com/aws/aws-xray-sdk-python',\n\n    author='Amazon Web Services',\n\n    license=\"Apache License 2.0\",\n\n    classifiers=[\n        'Development Status :: 5 - Production/Stable',\n        'Intended Audience :: Developers',\n        'Natural Language :: English',\n        'License :: OSI Approved :: Apache Software License',\n        'Programming Language :: Python',\n        'Programming Language :: Python :: 3',\n        'Programming Language :: Python :: 3.7',\n        'Programming Language :: Python :: 3.8',\n        'Programming Language :: Python :: 3.9',\n        'Programming Language :: Python :: 3.10',\n        'Programming Language :: Python :: 3.11',\n    ],\n\n    python_requires=\">=3.7\",\n    install_requires=[\n        'wrapt',\n        'botocore>=1.11.3',\n    ],\n\n    keywords='aws xray sdk',\n\n    packages=find_packages(exclude=['tests*']),\n    include_package_data=True\n)\n"
  },
  {
    "path": "terraform/eb.tf",
    "content": "terraform {\n  required_providers {\n    aws = {\n      source  = \"hashicorp/aws\"\n      version = \"3.5.0\"\n    }\n  }\n}\n\nprovider \"aws\" {\n  profile = \"default\"\n  region  = var.region\n}\n\nresource \"aws_s3_bucket_public_access_block\" \"bucket_access\" {\n  bucket = aws_s3_bucket.eb_app_bucket.id\n\n  restrict_public_buckets   = true\n}\n\nresource \"aws_s3_bucket\" \"eb_app_bucket\" {\n  bucket = \"${var.resource_prefix}.eb.app.applicationversion\"\n\n  versioning {\n    enabled = true\n  }\n\n  server_side_encryption_configuration {\n    rule {\n      apply_server_side_encryption_by_default {\n        sse_algorithm     = \"AES256\"\n      }\n    }\n  }\n}\n\nresource \"aws_s3_bucket_object\" \"eb_app_package\" {\n  bucket = aws_s3_bucket.eb_app_bucket.id\n  key    = var.bucket_key\n  source = var.source_path\n}\n\nresource \"aws_elastic_beanstalk_application\" \"eb_app\" {\n  name        = \"${var.resource_prefix}-EB-App\"\n  description = \"Deployment of EB App for integration testing\"\n}\n\nresource \"aws_elastic_beanstalk_application_version\" \"eb_app_version\" {\n  name        = \"${var.resource_prefix}-EB-App-1\"\n  application = aws_elastic_beanstalk_application.eb_app.name\n  bucket      = aws_s3_bucket.eb_app_bucket.id\n  key         = aws_s3_bucket_object.eb_app_package.id\n}\n\nresource \"aws_elastic_beanstalk_environment\" \"eb_env\" {\n  name                = \"${var.resource_prefix}-EB-App-Env\"\n  application         = aws_elastic_beanstalk_application.eb_app.name\n  solution_stack_name = \"64bit Amazon Linux 2 v3.5.12 running Python 3.8\"\n  tier = \"WebServer\"\n  version_label = aws_elastic_beanstalk_application_version.eb_app_version.name\n  cname_prefix = \"${var.resource_prefix}-Eb-app-env\"\n\n  setting {\n    namespace = \"aws:autoscaling:launchconfiguration\"\n    name = \"IamInstanceProfile\"\n    value = \"aws-elasticbeanstalk-ec2-role\"\n  }\n\n  setting {\n    namespace = \"aws:elasticbeanstalk:xray\"\n    name = \"XRayEnabled\"\n    value = \"true\"\n  }\n  \n  setting {\n    namespace = \"aws:autoscaling:launchconfiguration\"\n    name = \"DisableIMDSv1\"\n    value = \"true\"\n  }\n}\n"
  },
  {
    "path": "terraform/fixtures.us-west-2.tfvars",
    "content": "region = \"us-west-2\"\n\nbucket_key = \"beanstalk/deploy.zip\"\n\nsource_path = \"deploy.zip\"\n"
  },
  {
    "path": "terraform/variables.tf",
    "content": "variable \"region\" {\n  type        = string\n  description = \"AWS region for deployment of resources\"\n}\n\nvariable \"bucket_key\" {\n  type        = string\n  description = \"AWS s3 object key\"\n}\n\nvariable \"source_path\" {\n  type        = string\n  description = \"local source zip path to upload on AWS s3 bucket\"\n}\n\nvariable \"resource_prefix\" {}\n\n"
  },
  {
    "path": "tests/__init__.py",
    "content": ""
  },
  {
    "path": "tests/distributioncheck/__init__.py",
    "content": ""
  },
  {
    "path": "tests/distributioncheck/test_sanity.py",
    "content": "from aws_xray_sdk.core.models.segment import Segment\n\ndef test_create_segment():\n    segment = Segment('test')\n    assert segment.name == 'test'\n"
  },
  {
    "path": "tests/ext/__init__.py",
    "content": "from aws_xray_sdk.core import xray_recorder\nfrom ..util import StubbedEmitter\n\n\nxray_recorder.configure(sampling=False)\nxray_recorder.emitter = StubbedEmitter()\n"
  },
  {
    "path": "tests/ext/aiobotocore/__init__.py",
    "content": ""
  },
  {
    "path": "tests/ext/aiobotocore/test_aiobotocore.py",
    "content": "import pytest\n\nfrom aiobotocore.session import get_session\nfrom botocore.stub import Stubber, ANY\nfrom botocore.exceptions import ClientError\n\nfrom aws_xray_sdk.core import patch\nfrom aws_xray_sdk.core.async_context import AsyncContext\nfrom aws_xray_sdk.core import xray_recorder\n\npatch(('aiobotocore',))\n\n\n@pytest.fixture(scope='function')\ndef recorder(event_loop):\n    \"\"\"\n    Clean up before and after each test run\n    \"\"\"\n    xray_recorder.configure(\n        service='test', sampling=False, context=AsyncContext(loop=event_loop)\n    )\n    xray_recorder.clear_trace_entities()\n    yield xray_recorder\n    xray_recorder.clear_trace_entities()\n\n\nasync def test_describe_table(event_loop, recorder):\n    segment = recorder.begin_segment('name')\n\n    req_id = '1234'\n    response = {'ResponseMetadata': {'RequestId': req_id, 'HTTPStatusCode': 403}}\n\n    session = get_session()\n    async with session.create_client('dynamodb', region_name='eu-west-2') as client:\n        with Stubber(client) as stubber:\n            stubber.add_response('describe_table', response, {'TableName': 'mytable'})\n            await client.describe_table(TableName='mytable')\n\n    subsegment = segment.subsegments[0]\n    assert subsegment.error\n    assert subsegment.http['response']['status'] == 403\n\n    aws_meta = subsegment.aws\n    assert aws_meta['table_name'] == 'mytable'\n    assert aws_meta['request_id'] == req_id\n    assert aws_meta['region'] == 'eu-west-2'\n    assert aws_meta['operation'] == 'DescribeTable'\n\n\nasync def test_s3_parameter_capture(event_loop, recorder):\n    segment = recorder.begin_segment('name')\n\n    bucket_name = 'mybucket'\n    key = 'mykey'\n    version_id = 'myversionid'\n    response = {'ResponseMetadata': {'RequestId': '1234', 'HTTPStatusCode': 200}}\n\n    session = get_session()\n    async with session.create_client('s3', region_name='eu-west-2') as client:\n        with Stubber(client) as stubber:\n            stubber.add_response('get_object', response,\n                                 {'Bucket': bucket_name, 'Key': key, 'VersionId': version_id})\n            await client.get_object(Bucket=bucket_name, Key=key,\n                                    VersionId=version_id)\n\n    subsegment = segment.subsegments[0]\n    aws_meta = subsegment.aws\n\n    assert aws_meta['bucket_name'] == bucket_name\n    assert aws_meta['key'] == key\n    assert aws_meta['version_id'] == version_id\n    assert aws_meta['operation'] == 'GetObject'\n\n\nasync def test_list_parameter_counting(event_loop, recorder):\n    \"\"\"\n    Test special parameters that have shape of list are recorded\n    as count based on `para_whitelist.json`\n    \"\"\"\n    segment = recorder.begin_segment('name')\n\n    queue_urls = ['url1', 'url2']\n    queue_name_prefix = 'url'\n    response = {\n        'QueueUrls': queue_urls,\n        'ResponseMetadata': {\n            'RequestId': '1234',\n            'HTTPStatusCode': 200,\n        }\n    }\n\n    session = get_session()\n    async with session.create_client('sqs', region_name='eu-west-2') as client:\n        with Stubber(client) as stubber:\n            stubber.add_response('list_queues', response, {'QueueNamePrefix': queue_name_prefix})\n            await client.list_queues(QueueNamePrefix='url')\n\n    subsegment = segment.subsegments[0]\n    assert subsegment.http['response']['status'] == 200\n\n    aws_meta = subsegment.aws\n    assert aws_meta['queue_count'] == len(queue_urls)\n    # all whitelisted input parameters will be converted to snake case\n    # unless there is an explicit 'rename_to' attribute in json key\n    assert aws_meta['queue_name_prefix'] == queue_name_prefix\n\n\nasync def test_map_parameter_grouping(event_loop, recorder):\n    \"\"\"\n    Test special parameters that have shape of map are recorded\n    as a list of keys based on `para_whitelist.json`\n    \"\"\"\n    segment = recorder.begin_segment('name')\n\n    response = {\n        'ResponseMetadata': {\n            'RequestId': '1234',\n            'HTTPStatusCode': 500,\n        }\n    }\n\n    session = get_session()\n    async with session.create_client('dynamodb', region_name='eu-west-2') as client:\n        with Stubber(client) as stubber:\n            stubber.add_response('batch_write_item', response, {'RequestItems': ANY})\n            await client.batch_write_item(RequestItems={'table1': [{}], 'table2': [{}]})\n\n    subsegment = segment.subsegments[0]\n    assert subsegment.fault\n    assert subsegment.http['response']['status'] == 500\n\n    aws_meta = subsegment.aws\n    assert sorted(aws_meta['table_names']) == ['table1', 'table2']\n\n\nasync def test_context_missing_not_swallow_return(event_loop, recorder):\n    xray_recorder.configure(service='test', sampling=False,\n                            context=AsyncContext(loop=event_loop),\n                            context_missing='LOG_ERROR')\n\n    response = {'ResponseMetadata': {'RequestId': '1234', 'HTTPStatusCode': 403}}\n\n    session = get_session()\n    async with session.create_client('dynamodb', region_name='eu-west-2') as client:\n        with Stubber(client) as stubber:\n            stubber.add_response('describe_table', response, {'TableName': 'mytable'})\n            actual_resp = await client.describe_table(TableName='mytable')\n\n    assert actual_resp == response\n\n\nasync def test_context_missing_not_suppress_exception(event_loop, recorder):\n    xray_recorder.configure(service='test', sampling=False,\n                            context=AsyncContext(loop=event_loop),\n                            context_missing='LOG_ERROR')\n\n    session = get_session()\n    async with session.create_client('dynamodb', region_name='eu-west-2') as client:\n        with Stubber(client) as stubber:\n            stubber.add_client_error('describe_table', expected_params={'TableName': ANY})\n            with pytest.raises(ClientError):\n                await client.describe_table(TableName='mytable')\n"
  },
  {
    "path": "tests/ext/aiohttp/__init__.py",
    "content": ""
  },
  {
    "path": "tests/ext/aiohttp/test_client.py",
    "content": "import logging\n\nimport pytest\nfrom aiohttp import ClientSession\n\nfrom aws_xray_sdk.core import xray_recorder\nfrom aws_xray_sdk.core.async_context import AsyncContext\nfrom aws_xray_sdk.core.context import MISSING_SEGMENT_MSG\nfrom aws_xray_sdk.core.exceptions.exceptions import SegmentNotFoundException\nfrom aws_xray_sdk.ext.util import strip_url, get_hostname\nfrom aws_xray_sdk.ext.aiohttp.client import aws_xray_trace_config\nfrom aws_xray_sdk.ext.aiohttp.client import REMOTE_NAMESPACE, LOCAL_NAMESPACE\n\n\n# httpbin.org is created by the same author of requests to make testing http easy.\nBASE_URL = 'httpbin.org'\n\n\n@pytest.fixture(scope='function')\ndef recorder(loop):\n    \"\"\"\n    Initiate a recorder and clear it up once has been used.\n    \"\"\"\n    xray_recorder.configure(service='test', sampling=False, context=AsyncContext(loop=loop))\n    xray_recorder.clear_trace_entities()\n    yield recorder\n    xray_recorder.clear_trace_entities()\n\n\nasync def test_ok(loop, recorder):\n    xray_recorder.begin_segment('name')\n    trace_config = aws_xray_trace_config()\n    status_code = 200\n    url = 'http://{}/status/{}?foo=bar'.format(BASE_URL, status_code)\n    async with ClientSession(loop=loop, trace_configs=[trace_config]) as session:\n        async with session.get(url):\n            pass\n\n    subsegment = xray_recorder.current_segment().subsegments[0]\n    assert subsegment.name == get_hostname(url)\n    assert subsegment.namespace == REMOTE_NAMESPACE\n\n    http_meta = subsegment.http\n    assert http_meta['request']['url'] == strip_url(url)\n    assert http_meta['request']['method'] == 'GET'\n    assert http_meta['response']['status'] == status_code\n\n\nasync def test_ok_name(loop, recorder):\n    xray_recorder.begin_segment('name')\n    trace_config = aws_xray_trace_config(name='test')\n    status_code = 200\n    url = 'http://{}/status/{}?foo=bar'.format(BASE_URL, status_code)\n    async with ClientSession(loop=loop, trace_configs=[trace_config]) as session:\n        async with session.get(url):\n            pass\n\n    subsegment = xray_recorder.current_segment().subsegments[0]\n    assert subsegment.name == 'test'\n\n\nasync def test_error(loop, recorder):\n    xray_recorder.begin_segment('name')\n    trace_config = aws_xray_trace_config()\n    status_code = 400\n    url = 'http://{}/status/{}'.format(BASE_URL, status_code)\n    async with ClientSession(loop=loop, trace_configs=[trace_config]) as session:\n        async with session.post(url):\n            pass\n\n    subsegment = xray_recorder.current_segment().subsegments[0]\n    assert subsegment.name == get_hostname(url)\n    assert subsegment.error\n\n    http_meta = subsegment.http\n    assert http_meta['request']['url'] == strip_url(url)\n    assert http_meta['request']['method'] == 'POST'\n    assert http_meta['response']['status'] == status_code\n\n\nasync def test_throttle(loop, recorder):\n    xray_recorder.begin_segment('name')\n    trace_config = aws_xray_trace_config()\n    status_code = 429\n    url = 'http://{}/status/{}'.format(BASE_URL, status_code)\n    async with ClientSession(loop=loop, trace_configs=[trace_config]) as session:\n        async with session.head(url):\n            pass\n\n    subsegment = xray_recorder.current_segment().subsegments[0]\n    assert subsegment.name == get_hostname(url)\n    assert subsegment.error\n    assert subsegment.throttle\n\n    http_meta = subsegment.http\n    assert http_meta['request']['url'] == strip_url(url)\n    assert http_meta['request']['method'] == 'HEAD'\n    assert http_meta['response']['status'] == status_code\n\n\nasync def test_fault(loop, recorder):\n    xray_recorder.begin_segment('name')\n    trace_config = aws_xray_trace_config()\n    status_code = 500\n    url = 'http://{}/status/{}'.format(BASE_URL, status_code)\n    async with ClientSession(loop=loop, trace_configs=[trace_config]) as session:\n        async with session.put(url):\n            pass\n\n    subsegment = xray_recorder.current_segment().subsegments[0]\n    assert subsegment.name == get_hostname(url)\n    assert subsegment.fault\n\n    http_meta = subsegment.http\n    assert http_meta['request']['url'] == strip_url(url)\n    assert http_meta['request']['method'] == 'PUT'\n    assert http_meta['response']['status'] == status_code\n\n\nasync def test_invalid_url(loop, recorder):\n    xray_recorder.begin_segment('name')\n    trace_config = aws_xray_trace_config()\n    async with ClientSession(loop=loop, trace_configs=[trace_config]) as session:\n        try:\n            async with session.get('http://doesnt.exist'):\n                pass\n        except Exception:\n            # prevent uncatch exception from breaking test run\n            pass\n\n    subsegment = xray_recorder.current_segment().subsegments[0]\n    assert subsegment.namespace == LOCAL_NAMESPACE\n    assert subsegment.fault\n\n    exception = subsegment.cause['exceptions'][0]\n    assert exception.type == 'ClientConnectorError'\n\n\nasync def test_no_segment_raise(loop, recorder):\n    xray_recorder.configure(context_missing='RUNTIME_ERROR')\n    trace_config = aws_xray_trace_config()\n    status_code = 200\n    url = 'http://{}/status/{}?foo=bar'.format(BASE_URL, status_code)\n    with pytest.raises(SegmentNotFoundException):\n        async with ClientSession(loop=loop, trace_configs=[trace_config]) as session:\n            async with session.get(url):\n                pass\n\n\nasync def test_no_segment_log_error(loop, recorder, caplog):\n    caplog.set_level(logging.ERROR)\n    xray_recorder.configure(context_missing='LOG_ERROR')\n    trace_config = aws_xray_trace_config()\n    status_code = 200\n    url = 'http://{}/status/{}?foo=bar'.format(BASE_URL, status_code)\n    async with ClientSession(loop=loop, trace_configs=[trace_config]) as session:\n        async with session.get(url) as resp:\n            status_received = resp.status\n\n    # Just check that the request was done correctly\n    assert status_received == status_code\n    assert MISSING_SEGMENT_MSG in [rec.message for rec in caplog.records]\n\n\nasync def test_no_segment_ignore_error(loop, recorder, caplog):\n    caplog.set_level(logging.ERROR)\n    xray_recorder.configure(context_missing='IGNORE_ERROR')\n    trace_config = aws_xray_trace_config()\n    status_code = 200\n    url = 'http://{}/status/{}?foo=bar'.format(BASE_URL, status_code)\n    async with ClientSession(loop=loop, trace_configs=[trace_config]) as session:\n        async with session.get(url) as resp:\n            status_received = resp.status\n\n    # Just check that the request was done correctly\n    assert status_received == status_code\n    assert MISSING_SEGMENT_MSG not in [rec.message for rec in caplog.records]\n"
  },
  {
    "path": "tests/ext/aiohttp/test_middleware.py",
    "content": "\"\"\"\nTests the middleware for aiohttp server\n\nExpects pytest-aiohttp\n\"\"\"\nimport asyncio\nimport sys\nfrom unittest.mock import patch\n\nimport pytest\nfrom aiohttp import web\nfrom aiohttp.web_exceptions import HTTPUnauthorized\n\nfrom aws_xray_sdk import global_sdk_config\nfrom aws_xray_sdk.core.async_context import AsyncContext\nfrom aws_xray_sdk.core.emitters.udp_emitter import UDPEmitter\nfrom aws_xray_sdk.core.models import http\nfrom aws_xray_sdk.ext.aiohttp.middleware import middleware\nfrom tests.util import get_new_stubbed_recorder\n\n\nclass CustomStubbedEmitter(UDPEmitter):\n    \"\"\"\n    Custom stubbed emitter which stores all segments instead of the last one\n    \"\"\"\n\n    def __init__(self, daemon_address='127.0.0.1:2000'):\n        super().__init__(daemon_address)\n        self.local = []\n\n    def send_entity(self, entity):\n        self.local.append(entity)\n\n    def pop(self):\n        try:\n            return self.local.pop(0)\n        except IndexError:\n            return None\n\n\nclass ServerTest:\n    \"\"\"\n    Simple class to hold a copy of the event loop\n    \"\"\"\n    __test__ = False\n\n    def __init__(self, loop):\n        self._loop = loop\n\n    async def handle_ok(self, request: web.Request) -> web.Response:\n        \"\"\"\n        Handle / request\n        \"\"\"\n        if \"content_length\" in request.query:\n            headers = {'Content-Length': request.query['content_length']}\n        else:\n            headers = None\n\n        return web.Response(text=\"ok\", headers=headers)\n\n    async def handle_error(self, request: web.Request) -> web.Response:\n        \"\"\"\n        Handle /error which returns a 404\n        \"\"\"\n        return web.Response(text=\"not found\", status=404)\n\n    async def handle_unauthorized(self, request: web.Request) -> web.Response:\n        \"\"\"\n        Handle /unauthorized which returns a 401\n        \"\"\"\n        raise HTTPUnauthorized()\n\n    async def handle_exception(self, request: web.Request) -> web.Response:\n        \"\"\"\n        Handle /exception which raises a CancelledError; this is important, as starting from python 3.8 CancelledError\n        extends BaseException instead of Exception\n        \"\"\"\n        raise asyncio.CancelledError()\n\n    async def handle_delay(self, request: web.Request) -> web.Response:\n        \"\"\"\n        Handle /delay request\n        \"\"\"\n        if sys.version_info >= (3, 8):\n            await asyncio.sleep(0.3)\n        else:\n            await asyncio.sleep(0.3, loop=self._loop)\n        return web.Response(text=\"ok\")\n\n    def get_app(self) -> web.Application:\n        app = web.Application(middlewares=[middleware])\n        app.router.add_get('/', self.handle_ok)\n        app.router.add_get('/error', self.handle_error)\n        app.router.add_get('/exception', self.handle_exception)\n        app.router.add_get('/unauthorized', self.handle_unauthorized)\n        app.router.add_get('/delay', self.handle_delay)\n\n        return app\n\n    @classmethod\n    def app(cls, loop=None) -> web.Application:\n        return cls(loop=loop).get_app()\n\n\n@pytest.fixture(scope='function')\ndef recorder(loop):\n    \"\"\"\n    Clean up context storage before and after each test run\n    \"\"\"\n    xray_recorder = get_new_stubbed_recorder()\n    xray_recorder.configure(service='test', sampling=False, context=AsyncContext(loop=loop))\n\n    patcher = patch('aws_xray_sdk.ext.aiohttp.middleware.xray_recorder', xray_recorder)\n    patcher.start()\n\n    xray_recorder.clear_trace_entities()\n    yield xray_recorder\n    global_sdk_config.set_sdk_enabled(True)\n    xray_recorder.clear_trace_entities()\n    patcher.stop()\n\n\nasync def test_ok(aiohttp_client, loop, recorder):\n    \"\"\"\n    Test a normal response\n\n    :param aiohttp_client: AioHttp test client fixture\n    :param loop: Eventloop fixture\n    :param recorder: X-Ray recorder fixture\n    \"\"\"\n    client = await aiohttp_client(ServerTest.app(loop=loop))\n\n    resp = await client.get('/')\n    assert resp.status == 200\n\n    segment = recorder.emitter.pop()\n    assert not segment.in_progress\n\n    request = segment.http['request']\n    response = segment.http['response']\n\n    assert request['method'] == 'GET'\n    assert request['url'] == 'http://127.0.0.1:{port}/'.format(port=client.port)\n    assert response['status'] == 200\n\n\nasync def test_ok_x_forwarded_for(aiohttp_client, loop, recorder):\n    \"\"\"\n    Test a normal response with x_forwarded_for headers\n\n    :param aiohttp_client: AioHttp test client fixture\n    :param loop: Eventloop fixture\n    :param recorder: X-Ray recorder fixture\n    \"\"\"\n    client = await aiohttp_client(ServerTest.app(loop=loop))\n\n    resp = await client.get('/', headers={'X-Forwarded-For': 'foo'})\n    assert resp.status == 200\n\n    segment = recorder.emitter.pop()\n    assert segment.http['request']['client_ip'] == 'foo'\n    assert segment.http['request']['x_forwarded_for']\n\n\nasync def test_ok_content_length(aiohttp_client, loop, recorder):\n    \"\"\"\n    Test a normal response with content length as response header\n\n    :param aiohttp_client: AioHttp test client fixture\n    :param loop: Eventloop fixture\n    :param recorder: X-Ray recorder fixture\n    \"\"\"\n    client = await aiohttp_client(ServerTest.app(loop=loop))\n\n    resp = await client.get('/?content_length=100')\n    assert resp.status == 200\n\n    segment = recorder.emitter.pop()\n    assert segment.http['response']['content_length'] == 100\n\n\nasync def test_error(aiohttp_client, loop, recorder):\n    \"\"\"\n    Test a 4XX response\n\n    :param aiohttp_client: AioHttp test client fixture\n    :param loop: Eventloop fixture\n    :param recorder: X-Ray recorder fixture\n    \"\"\"\n    client = await aiohttp_client(ServerTest.app(loop=loop))\n\n    resp = await client.get('/error')\n    assert resp.status == 404\n\n    segment = recorder.emitter.pop()\n    assert not segment.in_progress\n    assert segment.error\n\n    request = segment.http['request']\n    response = segment.http['response']\n    assert request['method'] == 'GET'\n    assert request['url'] == 'http://127.0.0.1:{port}/error'.format(port=client.port)\n    assert request['client_ip'] == '127.0.0.1'\n    assert response['status'] == 404\n\n\nasync def test_exception(aiohttp_client, loop, recorder):\n    \"\"\"\n    Test handling an exception\n\n    :param aiohttp_client: AioHttp test client fixture\n    :param loop: Eventloop fixture\n    :param recorder: X-Ray recorder fixture\n    \"\"\"\n    client = await aiohttp_client(ServerTest.app(loop=loop))\n\n    with pytest.raises(Exception):\n        await client.get('/exception')\n\n    segment = recorder.emitter.pop()\n    assert not segment.in_progress\n    assert segment.fault\n\n    request = segment.http['request']\n    response = segment.http['response']\n    exception = segment.cause['exceptions'][0]\n    assert request['method'] == 'GET'\n    assert request['url'] == 'http://127.0.0.1:{port}/exception'.format(port=client.port)\n    assert request['client_ip'] == '127.0.0.1'\n    assert response['status'] == 500\n    assert exception.type == 'CancelledError'\n\n\nasync def test_unhauthorized(aiohttp_client, loop, recorder):\n    \"\"\"\n    Test a 401 response\n\n    :param aiohttp_client: AioHttp test client fixture\n    :param loop: Eventloop fixture\n    :param recorder: X-Ray recorder fixture\n    \"\"\"\n    client = await aiohttp_client(ServerTest.app(loop=loop))\n\n    resp = await client.get('/unauthorized')\n    assert resp.status == 401\n\n    segment = recorder.emitter.pop()\n    assert not segment.in_progress\n    assert segment.error\n\n    request = segment.http['request']\n    response = segment.http['response']\n    assert request['method'] == 'GET'\n    assert request['url'] == 'http://127.0.0.1:{port}/unauthorized'.format(port=client.port)\n    assert request['client_ip'] == '127.0.0.1'\n    assert response['status'] == 401\n\n\nasync def test_response_trace_header(aiohttp_client, loop, recorder):\n    client = await aiohttp_client(ServerTest.app(loop=loop))\n    resp = await client.get('/')\n    xray_header = resp.headers[http.XRAY_HEADER]\n    segment = recorder.emitter.pop()\n\n    expected = 'Root=%s' % segment.trace_id\n    assert expected in xray_header\n\n\nasync def test_concurrent(aiohttp_client, loop, recorder):\n    \"\"\"\n    Test multiple concurrent requests\n\n    :param aiohttp_client: AioHttp test client fixture\n    :param loop: Eventloop fixture\n    :param recorder: X-Ray recorder fixture\n    \"\"\"\n    client = await aiohttp_client(ServerTest.app(loop=loop))\n\n    recorder.emitter = CustomStubbedEmitter()\n\n    async def get_delay():\n        resp = await client.get('/delay')\n        assert resp.status == 200\n\n    if sys.version_info >= (3, 8):\n        await asyncio.wait([loop.create_task(get_delay()) for i in range(9)])\n    else:\n        await asyncio.wait([loop.create_task(get_delay()) for i in range(9)], loop=loop)\n\n    # Ensure all ID's are different\n    ids = [item.id for item in recorder.emitter.local]\n    assert len(ids) == len(set(ids))\n\n\nasync def test_disabled_sdk(aiohttp_client, loop, recorder):\n    \"\"\"\n    Test a normal response when the SDK is disabled.\n\n    :param aiohttp_client: AioHttp test client fixture\n    :param loop: Eventloop fixture\n    :param recorder: X-Ray recorder fixture\n    \"\"\"\n    global_sdk_config.set_sdk_enabled(False)\n    client = await aiohttp_client(ServerTest.app(loop=loop))\n\n    resp = await client.get('/')\n    assert resp.status == 200\n\n    segment = recorder.emitter.pop()\n    assert not segment\n"
  },
  {
    "path": "tests/ext/botocore/__init__.py",
    "content": ""
  },
  {
    "path": "tests/ext/botocore/test_botocore.py",
    "content": "import pytest\nimport botocore.session\nfrom botocore.stub import Stubber, ANY\n\nfrom aws_xray_sdk.core import patch\nfrom aws_xray_sdk.core import xray_recorder\nfrom aws_xray_sdk.core.context import Context\n\npatch(('botocore',))\nsession = botocore.session.get_session()\n\nREQUEST_ID = '1234'\n\n\n@pytest.fixture(autouse=True)\ndef construct_ctx():\n    \"\"\"\n    Clean up context storage on each test run and begin a segment\n    so that later subsegment can be attached. After each test run\n    it cleans up context storage again.\n    \"\"\"\n    xray_recorder.configure(service='test', sampling=False, context=Context())\n    xray_recorder.clear_trace_entities()\n    xray_recorder.begin_segment('name')\n    yield\n    xray_recorder.clear_trace_entities()\n\n\ndef test_ddb_table_name():\n    ddb = session.create_client('dynamodb', region_name='us-west-2')\n    response = {\n        'ResponseMetadata': {\n            'RequestId': REQUEST_ID,\n            'HTTPStatusCode': 403,\n        }\n    }\n\n    with Stubber(ddb) as stubber:\n        stubber.add_response('describe_table', response, {'TableName': 'mytable'})\n        ddb.describe_table(TableName='mytable')\n\n    subsegment = xray_recorder.current_segment().subsegments[0]\n    assert subsegment.error\n    assert subsegment.http['response']['status'] == 403\n\n    aws_meta = subsegment.aws\n    assert aws_meta['table_name'] == 'mytable'\n    assert aws_meta['request_id'] == REQUEST_ID\n    assert aws_meta['region'] == 'us-west-2'\n    assert aws_meta['operation'] == 'DescribeTable'\n\n\ndef test_s3_bucket_name_capture():\n    s3 = session.create_client('s3', region_name='us-west-2')\n    response = {\n        'ResponseMetadata': {\n            'RequestId': REQUEST_ID,\n            'HTTPStatusCode': 200,\n        }\n    }\n\n    bucket_name = 'mybucket'\n\n    with Stubber(s3) as stubber:\n        stubber.add_response('list_objects_v2', response, {'Bucket': bucket_name})\n        s3.list_objects_v2(Bucket=bucket_name)\n\n    subsegment = xray_recorder.current_segment().subsegments[0]\n    aws_meta = subsegment.aws\n\n    assert aws_meta['bucket_name'] == bucket_name\n    assert aws_meta['request_id'] == REQUEST_ID\n    assert aws_meta['region'] == 'us-west-2'\n    assert aws_meta['operation'] == 'ListObjectsV2'\n\n\ndef test_list_parameter_counting():\n    \"\"\"\n    Test special parameters that have shape of list are recorded\n    as count based on `para_whitelist.json`\n    \"\"\"\n    sqs = session.create_client('sqs', region_name='us-west-2')\n    queue_urls = ['url1', 'url2']\n    queue_name_prefix = 'url'\n    response = {\n        'QueueUrls': queue_urls,\n        'ResponseMetadata': {\n            'RequestId': '1234',\n            'HTTPStatusCode': 200,\n        }\n    }\n\n    with Stubber(sqs) as stubber:\n        stubber.add_response('list_queues', response, {'QueueNamePrefix': queue_name_prefix})\n        sqs.list_queues(QueueNamePrefix='url')\n\n    subsegment = xray_recorder.current_segment().subsegments[0]\n    assert subsegment.http['response']['status'] == 200\n\n    aws_meta = subsegment.aws\n    assert aws_meta['queue_count'] == len(queue_urls)\n    # all whitelisted input parameters will be converted to snake case\n    # unless there is an explicit 'rename_to' attribute in json key\n    assert aws_meta['queue_name_prefix'] == queue_name_prefix\n\n\ndef test_map_parameter_grouping():\n    \"\"\"\n    Test special parameters that have shape of map are recorded\n    as a list of keys based on `para_whitelist.json`\n    \"\"\"\n    ddb = session.create_client('dynamodb', region_name='us-west-2')\n    response = {\n        'ResponseMetadata': {\n            'RequestId': REQUEST_ID,\n            'HTTPStatusCode': 500,\n        }\n    }\n\n    with Stubber(ddb) as stubber:\n        stubber.add_response('batch_write_item', response, {'RequestItems': ANY})\n        ddb.batch_write_item(RequestItems={'table1': [{}], 'table2': [{}]})\n\n    subsegment = xray_recorder.current_segment().subsegments[0]\n    assert subsegment.fault\n    assert subsegment.http['response']['status'] == 500\n\n    aws_meta = subsegment.aws\n    assert sorted(aws_meta['table_names']) == ['table1', 'table2']\n\ndef test_pass_through_on_context_missing():\n    \"\"\"\n    The built-in patcher or subsegment capture logic should not throw\n    any error when a `None` subsegment created from `LOG_ERROR` missing context.\n    \"\"\"\n    xray_recorder.configure(context_missing='LOG_ERROR')\n    xray_recorder.clear_trace_entities()\n\n    ddb = session.create_client('dynamodb', region_name='us-west-2')\n    response = {\n        'ResponseMetadata': {\n            'RequestId': REQUEST_ID,\n            'HTTPStatusCode': 200,\n        }\n    }\n\n    with Stubber(ddb) as stubber:\n        stubber.add_response('describe_table', response, {'TableName': 'mytable'})\n        result = ddb.describe_table(TableName='mytable')\n    assert result is not None\n\n    xray_recorder.configure(context_missing='RUNTIME_ERROR')\n\n\ndef test_sns_publish_parameters():\n    sns = session.create_client('sns', region_name='us-west-2')\n    response = {\n        'ResponseMetadata': {\n            'RequestId': REQUEST_ID,\n            'HTTPStatusCode': 200,\n        }\n    }\n\n    with Stubber(sns) as stubber:\n        stubber.add_response('publish', response, {'TopicArn': 'myAmazingTopic', 'Message': 'myBodaciousMessage'})\n        sns.publish(TopicArn='myAmazingTopic', Message='myBodaciousMessage')\n\n    subsegment = xray_recorder.current_segment().subsegments[0]\n    assert subsegment.http['response']['status'] == 200\n\n    aws_meta = subsegment.aws\n    assert aws_meta['topic_arn'] == 'myAmazingTopic'\n    assert aws_meta['request_id'] == REQUEST_ID\n    assert aws_meta['region'] == 'us-west-2'\n    assert aws_meta['operation'] == 'Publish'\n"
  },
  {
    "path": "tests/ext/bottle/__init__.py",
    "content": ""
  },
  {
    "path": "tests/ext/bottle/test_bottle.py",
    "content": "import pytest\nfrom bottle import Bottle, request, response, template, view, HTTPError, TEMPLATE_PATH\nfrom webtest import TestApp as WebApp\n\nfrom aws_xray_sdk import global_sdk_config\nfrom aws_xray_sdk.ext.bottle.middleware import XRayMiddleware\nfrom aws_xray_sdk.core.context import Context\nfrom aws_xray_sdk.core import lambda_launcher\nfrom aws_xray_sdk.core.models import http, facade_segment, segment as segment_model\nfrom tests.util import get_new_stubbed_recorder\nimport os\n\n\n# define Bottle app for testing purpose\nTEMPLATE_PATH.insert(0, os.path.dirname(__file__) + '/views')\napp = Bottle()\n\n\n@app.route('/ok')\ndef ok():\n    response_data = 'ok'\n    # Bottle not always set Content-Length header\n    response.content_length = len(response_data)\n    return response_data\n\n\n@app.route('/error')\ndef error():\n    response.status = 404\n    return 'Not Found'\n\n\n@app.route('/client_error')\ndef faulty_client():\n    class CustomError(Exception):\n        def __init__(self, description=None, status_code=None):\n            self.description = description\n            self.status_code = status_code\n\n    raise CustomError(description='Bad request', status_code=400)\n\n\n@app.route('/server_error')\ndef faulty_server():\n    raise HTTPError(status=503, body='Service Unavailable')\n\n\n@app.route('/fault')\ndef fault():\n    return {}['key']\n\n\n@app.route('/template')\ndef template_():\n    return template('Hello {{name}}!', name='World')\n\n\n@app.route('/view')\n@view('index')\ndef view_(name='bottle'):\n    return dict(name=name)\n\n\n# add X-Ray plugin to Bottle app\nrecorder = get_new_stubbed_recorder()\nrecorder.configure(service='test', sampling=False, context=Context())\napp.install(XRayMiddleware(recorder))\n\napp = WebApp(app)\n\nBASE_URL = 'http://localhost:80{}'\n\n\n@pytest.fixture(autouse=True)\ndef cleanup():\n    \"\"\"\n    Clean up context storage before and after each test run\n    \"\"\"\n    recorder.clear_trace_entities()\n    yield\n    recorder.clear_trace_entities()\n    global_sdk_config.set_sdk_enabled(True)\n\n\ndef test_ok():\n    path = '/ok'\n    app.get(path, extra_environ={'REMOTE_ADDR': '127.0.0.1'})\n    segment = recorder.emitter.pop()\n    assert not segment.in_progress\n\n    request = segment.http['request']\n    response = segment.http['response']\n\n    assert request['method'] == 'GET'\n    assert request['url'] == BASE_URL.format(path)\n    assert request['client_ip'] == '127.0.0.1'\n    assert response['status'] == 200\n    assert response['content_length'] == 2\n\n\ndef test_error():\n    path = '/error'\n    try:\n        app.get(path, extra_environ={'HTTP_X_FORWARDED_FOR': '192.168.0.0'})\n    except Exception:\n        pass\n    segment = recorder.emitter.pop()\n    assert not segment.in_progress\n    assert segment.error\n\n    request = segment.http['request']\n    response = segment.http['response']\n    assert request['method'] == 'GET'\n    assert request['url'] == BASE_URL.format(path)\n    assert request['client_ip'] == '192.168.0.0'\n    assert response['status'] == 404\n\n\ndef test_custom_client_error():\n    path = '/client_error'\n    try:\n        app.get(path)\n    except Exception:\n        pass\n    segment = recorder.emitter.pop()\n    assert not segment.in_progress\n    assert segment.error\n\n    response = segment.http['response']\n    assert response['status'] == 400\n    exception = segment.cause['exceptions'][0]\n    assert exception.type == 'CustomError'\n\n    request = segment.http['request']\n    assert request['method'] == 'GET'\n    assert request['url'] == BASE_URL.format(path)\n\n\ndef test_server_error():\n    path = '/server_error'\n    try:\n        app.get(path)\n    except Exception as e:\n        pass\n    segment = recorder.emitter.pop()\n    assert not segment.in_progress\n    assert segment.fault\n\n    response = segment.http['response']\n    assert response['status'] == 503\n\n    exception = segment.cause['exceptions'][0]\n    assert exception.type == 'HTTPError'\n\n\ndef test_fault():\n    path = '/fault'\n    try:\n        app.get(path)\n    except Exception:\n        pass\n    segment = recorder.emitter.pop()\n    assert not segment.in_progress\n    assert segment.fault\n\n    response = segment.http['response']\n    assert response['status'] == 500\n\n    exception = segment.cause['exceptions'][0]\n    assert exception.type == 'KeyError'\n\n\ndef test_render_template():\n    path = '/template'\n    app.get(path)\n    segment = recorder.emitter.pop()\n    assert not segment.in_progress\n    # segment should contain a template render subsegment\n    assert segment.subsegments\n\n    subsegment = segment.subsegments[0]\n    assert subsegment.name\n    assert subsegment.namespace == 'local'\n    assert not subsegment.in_progress\n\n\ndef test_render_view():\n    path = '/view'\n    response = app.get(path)\n    assert response.text == \"<h1>Hello Bottle!</h1>\\n<p>How are you?</p>\\n\"\n    segment = recorder.emitter.pop()\n    assert not segment.in_progress\n    # segment should contain a template render subsegment\n    assert segment.subsegments\n\n    subsegment = segment.subsegments[0]\n    assert subsegment.name\n    assert subsegment.namespace == 'local'\n    assert not subsegment.in_progress\n\n\ndef test_incoming_sampling_decision_respected():\n    path = '/ok'\n    # resp = app.get(path, headers={http.XRAY_HEADER: 'Sampled=0'})\n    resp = app.get(path, headers={http.XRAY_HEADER: 'Sampled=0'})\n    resp_header = resp.headers[http.XRAY_HEADER]\n    segment = recorder.emitter.pop()\n\n    assert not segment\n    # The SDK should still send the headers back regardless of sampling decision\n    assert 'Root' in resp_header\n\n\ndef test_trace_header_data_perservation():\n    path = '/ok'\n    app.get(path, headers={http.XRAY_HEADER: 'k1=v1'})\n    segment = recorder.emitter.pop()\n    header = segment.get_origin_trace_header()\n\n    assert header.data['k1'] == 'v1'\n\n\ndef test_sampled_response_header():\n    path = '/ok'\n    app.get(path, headers={http.XRAY_HEADER: 'Sampled=?;k1=v1'})\n    segment = recorder.emitter.pop()\n\n    resp_header = response.headers.get(http.XRAY_HEADER)\n    assert segment.trace_id in resp_header\n    assert 'Sampled=1' in resp_header\n\n\ndef test_disabled_sdk():\n    global_sdk_config.set_sdk_enabled(False)\n    path = '/ok'\n    app.get(path)\n    segment = recorder.emitter.pop()\n    assert not segment\n\n\ndef test_lambda_serverless():\n    TRACE_ID = '1-5759e988-bd862e3fe1be46a994272793'\n    PARENT_ID = '53995c3f42cd8ad8'\n    HEADER_VAR = 'Root=%s;Parent=%s;Sampled=1' % (TRACE_ID, PARENT_ID)\n\n    os.environ[lambda_launcher.LAMBDA_TRACE_HEADER_KEY] = HEADER_VAR\n    lambda_context = lambda_launcher.LambdaContext()\n\n    new_recorder = get_new_stubbed_recorder()\n    new_recorder.configure(service='test', sampling=False, context=lambda_context)\n    new_app = Bottle()\n\n    @new_app.route('/subsegment')\n    def subsegment_():\n        # Test in between request and make sure Serverless creates a subsegment instead of a segment.\n        # Ensure that the parent segment is a facade segment.\n        assert new_recorder.current_subsegment()\n        assert type(new_recorder.current_segment()) == facade_segment.FacadeSegment\n        return 'ok'\n\n    @new_app.route('/trace_header')\n    def trace_header():\n        # Ensure trace header is preserved.\n        subsegment = new_recorder.current_subsegment()\n        header = subsegment.get_origin_trace_header()\n        assert header.data['k1'] == 'v1'\n        return 'ok'\n\n    plugin = XRayMiddleware(new_recorder)\n    plugin._in_lambda_ctx = True\n    new_app.install(plugin)\n\n    app_client = WebApp(new_app)\n\n    path = '/subsegment'\n    app_client.get(path)\n    new_app.get(path)\n    segment = recorder.emitter.pop()\n    assert not segment  # Segment should be none because it's created and ended by the plugin\n\n    path2 = '/trace_header'\n    app_client.get(path2, headers={http.XRAY_HEADER: 'k1=v1'})\n\n\ndef test_lambda_default_ctx():\n    # Track to make sure that Bottle will default to generating segments if context is not the lambda context\n    new_recorder = get_new_stubbed_recorder()\n    new_recorder.configure(service='test', sampling=False)\n    new_app = Bottle()\n\n    @new_app.route('/segment')\n    def segment_():\n        # Test in between request and make sure Lambda that uses default context generates a segment.\n        assert new_recorder.current_segment()\n        assert type(new_recorder.current_segment()) == segment_model.Segment\n        return 'ok'\n\n    new_app.install(XRayMiddleware(new_recorder))\n    app_client = WebApp(new_app)\n\n    path = '/segment'\n    app_client.get(path)\n    segment = recorder.emitter.pop()\n    assert not segment  # Segment should be none because it's created and ended by the plugin\n"
  },
  {
    "path": "tests/ext/bottle/views/index.tpl",
    "content": "<h1>Hello {{name.title()}}!</h1>\n<p>How are you?</p>\n"
  },
  {
    "path": "tests/ext/django/__init__.py",
    "content": ""
  },
  {
    "path": "tests/ext/django/app/__init__.py",
    "content": ""
  },
  {
    "path": "tests/ext/django/app/settings.py",
    "content": "\"\"\"\nConfig file for a django app used by django testing client\n\"\"\"\nimport os\nfrom aws_xray_sdk.core.sampling.sampler import LocalSampler\n\n\nBASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))\n\nDEBUG = True\n\nDATABASES = {\n    'default': {\n        'ENGINE': 'django.db.backends.sqlite3',\n        'NAME': ':memory:'\n    }\n}\n\nALLOWED_HOSTS = ['testserver']\n\nSECRET_KEY = 'doesntreallymatter'\n\nROOT_URLCONF = 'tests.ext.django.app.views'\n\nTEMPLATES = [\n    {\n        'BACKEND': 'django.template.backends.django.DjangoTemplates',\n        'DIRS': [\n            os.path.join(BASE_DIR, 'app', 'templates'),\n        ],\n        'APP_DIRS': True,\n        'OPTIONS': {\n            'context_processors': [\n                'django.template.context_processors.debug',\n                'django.template.context_processors.request',\n            ],\n        },\n    },\n]\n\nMIDDLEWARE = [\n    # X-Ray middleware for django\n    'aws_xray_sdk.ext.django.middleware.XRayMiddleware',\n    'django.contrib.sessions.middleware.SessionMiddleware',\n    'django.middleware.common.CommonMiddleware',\n    'django.middleware.csrf.CsrfViewMiddleware',\n    'django.contrib.auth.middleware.AuthenticationMiddleware',\n    'django.contrib.messages.middleware.MessageMiddleware',\n    'django.middleware.clickjacking.XFrameOptionsMiddleware',\n    'django.middleware.security.SecurityMiddleware',\n]\n\nINSTALLED_APPS = [\n    'django.contrib.admin',\n    'django.contrib.auth',\n    'django.contrib.contenttypes',\n    'django.contrib.sessions',\n    'aws_xray_sdk.ext.django',\n]\n\nXRAY_RECORDER = {\n    'AWS_XRAY_TRACING_NAME': 'django',\n    'SAMPLING': False,\n    'SAMPLER': LocalSampler(),\n}\n\nLANGUAGE_CODE = 'en-us'\n\nTIME_ZONE = 'UTC'\n\nUSE_I18N = True\n\nUSE_L10N = True\n\nUSE_TZ = True\n\nSTATIC_URL = '/static/'\n"
  },
  {
    "path": "tests/ext/django/app/templates/block.html",
    "content": "<p>Hello World</p>\n"
  },
  {
    "path": "tests/ext/django/app/templates/block_user.html",
    "content": "<!DOCTYPE html>\n<html>\n<body>\n\n<h1>Django Test App</h1>\n\n{% include \"block.html\" %}\n\n</body>\n</html>\n"
  },
  {
    "path": "tests/ext/django/app/templates/index.html",
    "content": "<!DOCTYPE html>\n<html>\n<body>\n\n<h1>Django Test App</h1>\n\n<p>Hello World</p>\n\n</body>\n</html>"
  },
  {
    "path": "tests/ext/django/app/views.py",
    "content": "import sqlite3\n\nfrom django.http import HttpResponse\nfrom django.urls import path\nfrom django.views.generic import TemplateView\n\n\nclass IndexView(TemplateView):\n    template_name = 'index.html'\n\n\nclass TemplateBlockView(TemplateView):\n    template_name = 'block_user.html'\n\n\ndef ok(request):\n    return HttpResponse(status=200)\n\n\ndef fault(request):\n    {}['key']\n\n\ndef call_db(request):\n    conn = sqlite3.connect(':memory:')\n    q = 'SELECT name FROM sqlite_master'\n    conn.execute(q)\n    return HttpResponse(status=201)\n\n\n# def template(request):\n\n\nurlpatterns = [\n    path('200ok/', ok, name='200ok'),\n    path('500fault/', fault, name='500fault'),\n    path('call_db/', call_db, name='call_db'),\n    path('template/', IndexView.as_view(), name='template'),\n    path('template_block/', TemplateBlockView.as_view(), name='template_block'),\n]\n"
  },
  {
    "path": "tests/ext/django/test_db.py",
    "content": "import django\n\nimport pytest\n\nfrom aws_xray_sdk.core import xray_recorder\nfrom aws_xray_sdk.core.context import Context\nfrom aws_xray_sdk.ext.django.db import patch_db\n\n\n@pytest.fixture(scope='module', autouse=True)\ndef setup():\n    django.setup()\n    xray_recorder.configure(context=Context())\n    patch_db()\n\n\n@pytest.fixture(scope='module')\ndef user_class(setup):\n    from django.db import models\n    from django_fake_model import models as f\n\n    class User(f.FakeModel):\n        name = models.CharField(max_length=255)\n        password = models.CharField(max_length=255)\n\n    return User\n\n\n@pytest.fixture(\n    autouse=True,\n    params=[\n        False,\n        True,\n    ]\n)\n@pytest.mark.django_db\ndef func_setup(request, user_class):\n    xray_recorder.stream_sql = request.param\n    xray_recorder.clear_trace_entities()\n    xray_recorder.begin_segment('name')\n    try:\n        user_class.create_table()\n        yield\n    finally:\n        xray_recorder.clear_trace_entities()\n        try:\n            user_class.delete_table()\n        finally:\n            xray_recorder.end_segment()\n\n\ndef _assert_query(sql_meta):\n    if xray_recorder.stream_sql:\n        assert 'sanitized_query' in sql_meta\n        assert sql_meta['sanitized_query']\n        assert sql_meta['sanitized_query'].startswith('SELECT')\n    else:\n        if 'sanitized_query' in sql_meta:\n            assert sql_meta['sanitized_query']\n            # Django internally executes queries for table checks, ignore those\n            assert not sql_meta['sanitized_query'].startswith('SELECT')\n\n\ndef test_all(user_class):\n    \"\"\" Test calling all() on get all records.\n    Verify we run the query and return the SQL as metadata\"\"\"\n    # Materialising the query executes the SQL\n    list(user_class.objects.all())\n    subsegment = xray_recorder.current_segment().subsegments[-1]\n    sql = subsegment.sql\n    assert sql['database_type'] == 'sqlite'\n    _assert_query(sql)\n\n\ndef test_filter(user_class):\n    \"\"\" Test calling filter() to get filtered records.\n    Verify we run the query and return the SQL as metadata\"\"\"\n    # Materialising the query executes the SQL\n    list(user_class.objects.filter(password='mypassword!').all())\n    subsegment = xray_recorder.current_segment().subsegments[-1]\n    sql = subsegment.sql\n    assert sql['database_type'] == 'sqlite'\n    _assert_query(sql)\n    if xray_recorder.stream_sql:\n        assert 'mypassword!' not in sql['sanitized_query']\n        assert '\"password\" = %s' in sql['sanitized_query']\n"
  },
  {
    "path": "tests/ext/django/test_middleware.py",
    "content": "import django\nfrom aws_xray_sdk import global_sdk_config\nfrom django.urls import reverse\nfrom django.test import TestCase\n\nfrom aws_xray_sdk.core import xray_recorder, lambda_launcher\nfrom aws_xray_sdk.core.context import Context\nfrom aws_xray_sdk.core.models import http, facade_segment, segment\nfrom aws_xray_sdk.core import patch\nfrom tests.util import get_new_stubbed_recorder\nimport os\n\n\nclass XRayTestCase(TestCase):\n\n    def setUp(self):\n        django.setup()\n        xray_recorder.configure(context=Context())\n        xray_recorder.clear_trace_entities()\n        global_sdk_config.set_sdk_enabled(True)\n\n    def tearDown(self):\n        xray_recorder.clear_trace_entities()\n\n    def test_ok(self):\n        url = reverse('200ok')\n        self.client.get(url)\n        segment = xray_recorder.emitter.pop()\n\n        request = segment.http['request']\n        response = segment.http['response']\n\n        assert request['method'] == 'GET'\n        assert request['client_ip'] == '127.0.0.1'\n        assert response['status'] == 200\n\n    def test_error(self):\n        self.client.get('/notfound/')\n        segment = xray_recorder.emitter.pop()\n        assert segment.error\n\n        request = segment.http['request']\n        response = segment.http['response']\n\n        assert request['method'] == 'GET'\n        assert request['client_ip'] == '127.0.0.1'\n        assert response['status'] == 404\n\n    def test_fault(self):\n        url = reverse('500fault')\n        try:\n            self.client.get(url)\n        except Exception:\n            pass\n        segment = xray_recorder.emitter.pop()\n        assert segment.fault\n\n        request = segment.http['request']\n        response = segment.http['response']\n\n        assert request['method'] == 'GET'\n        assert request['client_ip'] == '127.0.0.1'\n        assert response['status'] == 500\n\n        exception = segment.cause['exceptions'][0]\n        assert exception.type == 'KeyError'\n\n    def test_db(self):\n        patch(('sqlite3',))\n        url = reverse('call_db')\n        self.client.get(url)\n        segment = xray_recorder.emitter.pop()\n        assert len(segment.subsegments) == 1\n\n        subsegment = segment.subsegments[0]\n        assert subsegment.name == ':memory:'\n        assert not subsegment.in_progress\n\n        sql = subsegment.sql\n        assert sql['database_type'] == 'sqlite3'\n        assert sql['database_version']\n\n    def test_template(self):\n        url = reverse('template')\n        self.client.get(url)\n        segment = xray_recorder.emitter.pop()\n        assert len(segment.subsegments) == 1\n\n        subsegment = segment.subsegments[0]\n        assert subsegment.name == 'index.html'\n        assert not subsegment.in_progress\n        assert subsegment.namespace == 'local'\n\n    def test_template_block(self):\n        url = reverse('template_block')\n        self.client.get(url)\n        segment = xray_recorder.emitter.pop()\n        assert len(segment.subsegments) == 1\n\n        subsegment = segment.subsegments[0]\n        assert subsegment.name == 'block_user.html'\n        assert not subsegment.in_progress\n        assert subsegment.namespace == 'local'\n\n    def test_trace_header_data_perservation(self):\n        url = reverse('200ok')\n        self.client.get(url, HTTP_X_AMZN_TRACE_ID='k1=v1')\n        segment = xray_recorder.emitter.pop()\n        header = segment.get_origin_trace_header()\n\n        assert header.data['k1'] == 'v1'\n\n    def test_response_header(self):\n        url = reverse('200ok')\n        resp = self.client.get(url, HTTP_X_AMZN_TRACE_ID='Sampled=?')\n        segment = xray_recorder.emitter.pop()\n        trace_header = resp[http.XRAY_HEADER]\n\n        assert 'Sampled=1' in trace_header\n        assert segment.trace_id in trace_header\n\n    def test_disabled_sdk(self):\n        global_sdk_config.set_sdk_enabled(False)\n        url = reverse('200ok')\n        self.client.get(url)\n        segment = xray_recorder.emitter.pop()\n        assert not segment\n\n    def test_lambda_serverless(self):\n        TRACE_ID = '1-5759e988-bd862e3fe1be46a994272793'\n        PARENT_ID = '53995c3f42cd8ad8'\n        HEADER_VAR = \"Root=%s;Parent=%s;Sampled=1\" % (TRACE_ID, PARENT_ID)\n\n        os.environ[lambda_launcher.LAMBDA_TRACE_HEADER_KEY] = HEADER_VAR\n        lambda_context = lambda_launcher.LambdaContext()\n\n        new_recorder = get_new_stubbed_recorder()\n        new_recorder.configure(service='test', sampling=False, context=lambda_context)\n        subsegment = new_recorder.begin_subsegment(\"subsegment\")\n        assert type(subsegment.parent_segment) == facade_segment.FacadeSegment\n        new_recorder.end_subsegment()\n\n        url = reverse('200ok')\n        self.client.get(url)\n        segment = new_recorder.emitter.pop()\n        assert not segment\n\n        # Test Fault in Lambda\n        url = reverse('500fault')\n        try:\n            self.client.get(url)\n        except Exception:\n            pass\n        segment = xray_recorder.emitter.pop()\n        assert segment.fault\n\n        request = segment.http['request']\n        response = segment.http['response']\n\n        assert request['method'] == 'GET'\n        assert request['client_ip'] == '127.0.0.1'\n        assert response['status'] == 500\n\n        exception = segment.cause['exceptions'][0]\n        assert exception.type == 'KeyError'\n\n    def test_lambda_default_ctx(self):\n        # Track to make sure that Django will default to generating segments if context is not the lambda context\n        url = reverse('200ok')\n        self.client.get(url)\n        cur_segment = xray_recorder.emitter.pop()\n        assert type(cur_segment) == segment.Segment\n"
  },
  {
    "path": "tests/ext/django/test_settings.py",
    "content": "from unittest import mock\n\nimport django\nfrom django.apps import apps\nfrom django.conf import settings\nfrom django.test import TestCase, override_settings\n\nfrom aws_xray_sdk import global_sdk_config\nfrom aws_xray_sdk.core import xray_recorder\nfrom aws_xray_sdk.core.sampling.sampler import LocalSampler\n\n\nclass XRayConfigurationTestCase(TestCase):\n    def test_sampler_can_be_configured(self):\n        assert isinstance(settings.XRAY_RECORDER['SAMPLER'], LocalSampler)\n        assert isinstance(xray_recorder.sampler, LocalSampler)\n"
  },
  {
    "path": "tests/ext/flask/__init__.py",
    "content": ""
  },
  {
    "path": "tests/ext/flask/test_flask.py",
    "content": "import pytest\nfrom flask import Flask, render_template_string\n\nfrom aws_xray_sdk import global_sdk_config\nfrom aws_xray_sdk.ext.flask.middleware import XRayMiddleware\nfrom aws_xray_sdk.core.context import Context\nfrom aws_xray_sdk.core import lambda_launcher\nfrom aws_xray_sdk.core.models import http, facade_segment, segment\nfrom tests.util import get_new_stubbed_recorder\nimport os\n\n# define a flask app for testing purpose\napp = Flask(__name__)\n\n\n@app.route('/ok')\ndef ok():\n    return 'ok'\n\n\n@app.route('/error')\ndef error():\n    return 'Not Found', 404\n\n\n@app.route('/fault')\ndef fault():\n    return {}['key']\n\n\n@app.route('/fault_no_exception')\ndef fault_no_exception():\n    return \"SomeException\", 500\n\n\n@app.route('/template')\ndef template():\n    return render_template_string('hello template')\n\n\n# add X-Ray middleware to flask app\nrecorder = get_new_stubbed_recorder()\nrecorder.configure(service='test', sampling=False, context=Context())\nXRayMiddleware(app, recorder)\n\n# We don't need to enable testing mode by doing app.config['TESTING'] = True\n# because what it does is disable error catching during request handling,\n# so that you get better error reports when performing test requests against the application.\n# But this also results in `after_request` method not getting invoked during unhandled exception which we want\n# since it is the actual application behavior in our use case.\napp = app.test_client()\n\nBASE_URL = 'http://localhost{}'\n\n\n@pytest.fixture(autouse=True)\ndef cleanup():\n    \"\"\"\n    Clean up context storage before and after each test run\n    \"\"\"\n    recorder.clear_trace_entities()\n    yield\n    recorder.clear_trace_entities()\n    global_sdk_config.set_sdk_enabled(True)\n\n\ndef test_ok():\n    path = '/ok'\n    app.get(path)\n    segment = recorder.emitter.pop()\n    assert not segment.in_progress\n\n    request = segment.http['request']\n    response = segment.http['response']\n\n    assert request['method'] == 'GET'\n    assert request['url'] == BASE_URL.format(path)\n    assert request['client_ip'] == '127.0.0.1'\n    assert response['status'] == 200\n    assert response['content_length'] == 2\n\n\ndef test_error():\n    path = '/error'\n    app.get(path)\n    segment = recorder.emitter.pop()\n    assert not segment.in_progress\n    assert segment.error\n\n    request = segment.http['request']\n    response = segment.http['response']\n    assert request['method'] == 'GET'\n    assert request['url'] == BASE_URL.format(path)\n    assert request['client_ip'] == '127.0.0.1'\n    assert response['status'] == 404\n\n\ndef test_fault():\n    path = '/fault'\n    try:\n        app.get(path)\n    except Exception:\n        pass\n    segment = recorder.emitter.pop()\n    assert not segment.in_progress\n    assert segment.fault\n\n    response = segment.http['response']\n    assert response['status'] == 500\n\n    exception = segment.cause['exceptions'][0]\n    assert exception.type == 'KeyError'\n\n\ndef test_fault_no_exception():\n    path = '/fault_no_exception'\n    app.get(path)\n    segment = recorder.emitter.pop()\n    assert not segment.in_progress\n    assert segment.fault\n\n    response = segment.http['response']\n    assert response['status'] == 500\n    assert segment.cause == {}\n\n\ndef test_render_template():\n    path = '/template'\n    app.get(path)\n    segment = recorder.emitter.pop()\n    assert not segment.in_progress\n    # segment should contain a template render subsegment\n    assert segment.subsegments\n\n    subsegment = segment.subsegments[0]\n    assert subsegment.name\n    assert subsegment.namespace == 'local'\n    assert not subsegment.in_progress\n\n\ndef test_incoming_sampling_decision_respected():\n    path = '/ok'\n    resp = app.get(path, headers={http.XRAY_HEADER: 'Sampled=0'})\n    resp_header = resp.headers[http.XRAY_HEADER]\n    segment = recorder.emitter.pop()\n\n    assert not segment\n    # The SDK should still send the headers back regardless of sampling decision\n    assert 'Root' in resp_header\n\n\ndef test_trace_header_data_perservation():\n    path = '/ok'\n    app.get(path, headers={http.XRAY_HEADER: 'k1=v1'})\n    segment = recorder.emitter.pop()\n    header = segment.get_origin_trace_header()\n\n    assert header.data['k1'] == 'v1'\n\n\ndef test_sampled_response_header():\n    path = '/ok'\n    resp = app.get(path, headers={http.XRAY_HEADER: 'Sampled=?;k1=v1'})\n    segment = recorder.emitter.pop()\n\n    resp_header = resp.headers[http.XRAY_HEADER]\n    assert segment.trace_id in resp_header\n    assert 'Sampled=1' in resp_header\n\n\ndef test_disabled_sdk():\n    global_sdk_config.set_sdk_enabled(False)\n    path = '/ok'\n    app.get(path)\n    segment = recorder.emitter.pop()\n    assert not segment\n\n\ndef test_lambda_serverless():\n    TRACE_ID = '1-5759e988-bd862e3fe1be46a994272793'\n    PARENT_ID = '53995c3f42cd8ad8'\n    HEADER_VAR = \"Root=%s;Parent=%s;Sampled=1\" % (TRACE_ID, PARENT_ID)\n\n    os.environ[lambda_launcher.LAMBDA_TRACE_HEADER_KEY] = HEADER_VAR\n    lambda_context = lambda_launcher.LambdaContext()\n\n    new_recorder = get_new_stubbed_recorder()\n    new_recorder.configure(service='test', sampling=False, context=lambda_context)\n    new_app = Flask(__name__)\n\n    @new_app.route('/subsegment')\n    def subsegment():\n        # Test in between request and make sure Serverless creates a subsegment instead of a segment.\n        # Ensure that the parent segment is a facade segment.\n        assert new_recorder.current_subsegment()\n        assert type(new_recorder.current_segment()) == facade_segment.FacadeSegment\n        return 'ok'\n\n    @new_app.route('/trace_header')\n    def trace_header():\n        # Ensure trace header is preserved.\n        subsegment = new_recorder.current_subsegment()\n        header = subsegment.get_origin_trace_header()\n        assert header.data['k1'] == 'v1'\n        return 'ok'\n\n    middleware = XRayMiddleware(new_app, new_recorder)\n    middleware.in_lambda_ctx = True\n\n    app_client = new_app.test_client()\n\n    path = '/subsegment'\n    app_client.get(path)\n    segment = recorder.emitter.pop()\n    assert not segment  # Segment should be none because it's created and ended by the middleware\n\n    path2 = '/trace_header'\n    app_client.get(path2, headers={http.XRAY_HEADER: 'k1=v1'})\n\n\ndef test_lambda_default_ctx():\n    # Track to make sure that Flask will default to generating segments if context is not the lambda context\n    new_recorder = get_new_stubbed_recorder()\n    new_recorder.configure(service='test', sampling=False)\n    new_app = Flask(__name__)\n\n    @new_app.route('/segment')\n    def subsegment():\n        # Test in between request and make sure Lambda that uses default context generates a segment.\n        assert new_recorder.current_segment()\n        assert type(new_recorder.current_segment()) == segment.Segment\n        return 'ok'\n\n    XRayMiddleware(new_app, new_recorder)\n    app_client = new_app.test_client()\n\n    path = '/segment'\n    app_client.get(path)\n    segment = recorder.emitter.pop()\n    assert not segment  # Segment should be none because it's created and ended by the middleware\n"
  },
  {
    "path": "tests/ext/flask_sqlalchemy/__init__.py",
    "content": ""
  },
  {
    "path": "tests/ext/flask_sqlalchemy/test_query.py",
    "content": "import pytest\nfrom aws_xray_sdk.core import xray_recorder\nfrom aws_xray_sdk.core.context import Context\nfrom aws_xray_sdk.ext.flask_sqlalchemy.query import XRayFlaskSqlAlchemy\nfrom flask import Flask\nfrom ...util import find_subsegment_by_annotation\n\n\napp = Flask(__name__)\napp.config[\"SQLALCHEMY_TRACK_MODIFICATIONS\"] = False\napp.config[\"SQLALCHEMY_DATABASE_URI\"] = \"sqlite:///:memory:\"\ndb = XRayFlaskSqlAlchemy(app)\n\n\nclass User(db.Model):\n    __tablename__ = \"users\"\n\n    id = db.Column(db.Integer, primary_key=True)\n    name = db.Column(db.String(255), nullable=False, unique=True)\n    fullname = db.Column(db.String(255), nullable=False)\n    password = db.Column(db.String(255), nullable=False)\n\n\n@pytest.fixture(\n    params=[\n        False,\n        True,\n    ],\n)\ndef session(request):\n    \"\"\"Test Fixture to Create DataBase Tables and start a trace segment\"\"\"\n    xray_recorder.configure(service='test', sampling=False, context=Context(), stream_sql=request.param)\n    xray_recorder.clear_trace_entities()\n    xray_recorder.begin_segment('SQLAlchemyTest')\n    db.create_all()\n    yield\n    xray_recorder.end_segment()\n    xray_recorder.clear_trace_entities()\n\n\ndef test_all(capsys, session):\n    \"\"\" Test calling all() on get all records.\n    Verify that we capture trace of query and return the SQL as metdata\"\"\"\n    # with capsys.disabled():\n    User.query.all()\n    subsegment = find_subsegment_by_annotation(xray_recorder.current_segment(), 'sqlalchemy', 'sqlalchemy.orm.query.all')\n    assert subsegment['annotations']['sqlalchemy'] == 'sqlalchemy.orm.query.all'\n    assert subsegment['sql']['url']\n    assert bool(subsegment['sql'].get('sanitized_query', None)) is xray_recorder.stream_sql\n\n\ndef test_add(capsys, session):\n    \"\"\" Test calling add() on insert a row.\n    Verify we that we capture trace for the add\"\"\"\n    # with capsys.disabled():\n    john = User(name='John', fullname=\"John Doe\", password=\"password\")\n    db.session.add(john)\n    subsegment = find_subsegment_by_annotation(xray_recorder.current_segment(), 'sqlalchemy', 'sqlalchemy.orm.session.add')\n    assert subsegment['annotations']['sqlalchemy'] == 'sqlalchemy.orm.session.add'\n    assert subsegment['sql']['url']\n"
  },
  {
    "path": "tests/ext/httplib/__init__.py",
    "content": ""
  },
  {
    "path": "tests/ext/httplib/test_httplib.py",
    "content": "import http.client as httplib\nfrom urllib.parse import urlparse\n\nimport pytest\n\nfrom aws_xray_sdk.core import patch, xray_recorder\nfrom aws_xray_sdk.core.context import Context\nfrom aws_xray_sdk.ext.util import get_hostname, strip_url\n\n# httpbin.org is created by the same author of requests to make testing http easy.\nBASE_URL = 'httpbin.org'\n\n\n@pytest.fixture(autouse=True)\ndef construct_ctx():\n    \"\"\"\n    Clean up context storage on each test run and begin a segment\n    so that later subsegment can be attached. After each test run\n    it cleans up context storage again.\n    \"\"\"\n    from aws_xray_sdk.ext.httplib import reset_ignored, unpatch\n\n    patch(('httplib',))\n    xray_recorder.configure(service='test', sampling=False, context=Context())\n    xray_recorder.clear_trace_entities()\n    xray_recorder.begin_segment('name')\n\n    yield\n    xray_recorder.clear_trace_entities()\n    unpatch()\n    reset_ignored()\n\n\ndef _do_req(url, method='GET', use_https=True):\n    parts = urlparse(url)\n    host, _, port = parts.netloc.partition(':')\n    if port == '':\n        port = None\n    if use_https:\n        conn = httplib.HTTPSConnection(parts.netloc, port)\n    else:\n        conn = httplib.HTTPConnection(parts.netloc, port)\n\n    path = '{}?{}'.format(parts.path, parts.query) if parts.query else parts.path\n    conn.request(method, path)\n    resp = conn.getresponse()\n\n\ndef test_ok():\n    status_code = 200\n    url = 'https://{}/status/{}?foo=bar&baz=foo'.format(BASE_URL, status_code)\n    _do_req(url)\n    subsegment = xray_recorder.current_segment().subsegments[1]\n    assert subsegment.name == get_hostname(url)\n\n    http_meta = subsegment.http\n    assert http_meta['request']['url'] == strip_url(url)\n    assert http_meta['request']['method'].upper() == 'GET'\n    assert http_meta['response']['status'] == status_code\n\n\ndef test_error():\n    status_code = 400\n    url = 'https://{}/status/{}'.format(BASE_URL, status_code)\n    _do_req(url, 'POST')\n    subsegment = xray_recorder.current_segment().subsegments[1]\n    assert subsegment.name == get_hostname(url)\n    assert subsegment.error\n\n    http_meta = subsegment.http\n    assert http_meta['request']['url'] == strip_url(url)\n    assert http_meta['request']['method'].upper() == 'POST'\n    assert http_meta['response']['status'] == status_code\n\n\ndef test_throttle():\n    status_code = 429\n    url = 'https://{}/status/{}'.format(BASE_URL, status_code)\n    _do_req(url, 'HEAD')\n    subsegment = xray_recorder.current_segment().subsegments[1]\n    assert subsegment.name == get_hostname(url)\n    assert subsegment.error\n    assert subsegment.throttle\n\n    http_meta = subsegment.http\n    assert http_meta['request']['url'] == strip_url(url)\n    assert http_meta['request']['method'].upper() == 'HEAD'\n    assert http_meta['response']['status'] == status_code\n\n\ndef test_fault():\n    status_code = 500\n    url = 'https://{}/status/{}'.format(BASE_URL, status_code)\n    _do_req(url, 'PUT')\n    subsegment = xray_recorder.current_segment().subsegments[1]\n    assert subsegment.name == get_hostname(url)\n    assert subsegment.fault\n\n    http_meta = subsegment.http\n    assert http_meta['request']['url'] == strip_url(url)\n    assert http_meta['request']['method'].upper() == 'PUT'\n    assert http_meta['response']['status'] == status_code\n\n\ndef test_invalid_url():\n    try:\n        _do_req('http://doesnt.exist')\n    except Exception:\n        # prevent uncatch exception from breaking test run\n        pass\n    subsegment = xray_recorder.current_segment().subsegments[0]\n    assert subsegment.fault\n\n    exception = subsegment.cause['exceptions'][0]\n    assert exception.type == 'gaierror'\n\n\ndef test_correct_identify_http():\n    status_code = 200\n    url = 'http://{}/status/{}?foo=bar&baz=foo'.format(BASE_URL, status_code)\n    _do_req(url, use_https=False)\n    subsegment = xray_recorder.current_segment().subsegments[0]\n    assert subsegment.name == get_hostname(url)\n\n    http_meta = subsegment.http\n    assert http_meta['request']['url'].split(\":\")[0] == 'http'\n\n\ndef test_correct_identify_https():\n    status_code = 200\n    url = 'https://{}/status/{}?foo=bar&baz=foo'.format(BASE_URL, status_code)\n    _do_req(url, use_https=True)\n    subsegment = xray_recorder.current_segment().subsegments[0]\n    assert subsegment.name == get_hostname(url)\n\n    https_meta = subsegment.http\n    assert https_meta['request']['url'].split(\":\")[0] == 'https'\n\n\ndef test_ignore_url():\n    from aws_xray_sdk.ext.httplib import add_ignored\n    path = '/status/200'\n    url = 'https://{}{}'.format(BASE_URL, path)\n    add_ignored(urls=[path])\n    _do_req(url, use_https=True)\n    assert len(xray_recorder.current_segment().subsegments) == 0\n\n\ndef test_ignore_hostname():\n    from aws_xray_sdk.ext.httplib import add_ignored\n    path = '/status/200'\n    url = 'https://{}{}'.format(BASE_URL, path)\n    add_ignored(hostname=BASE_URL)\n    _do_req(url, use_https=True)\n    assert len(xray_recorder.current_segment().subsegments) == 0\n\n\ndef test_ignore_hostname_glob():\n    from aws_xray_sdk.ext.httplib import add_ignored\n    path = '/status/200'\n    url = 'https://{}{}'.format(BASE_URL, path)\n    add_ignored(hostname='http*.org')\n    _do_req(url, use_https=True)\n    assert len(xray_recorder.current_segment().subsegments) == 0\n\n\nclass CustomHttpsConnection(httplib.HTTPSConnection):\n    pass\n\n\ndef test_ignore_subclass():\n    from aws_xray_sdk.ext.httplib import add_ignored\n    path = '/status/200'\n    subclass = 'tests.ext.httplib.test_httplib.CustomHttpsConnection'\n    add_ignored(subclass=subclass)\n    conn = CustomHttpsConnection(BASE_URL)\n    conn.request('GET', path)\n    conn.getresponse()\n    assert len(xray_recorder.current_segment().subsegments) == 0\n\n\ndef test_ignore_multiple_match():\n    from aws_xray_sdk.ext.httplib import add_ignored\n    path = '/status/200'\n    subclass = 'tests.ext.httplib.test_httplib.CustomHttpsConnection'\n    add_ignored(subclass=subclass, hostname=BASE_URL)\n    conn = CustomHttpsConnection(BASE_URL)\n    conn.request('GET', path)\n    conn.getresponse()\n    assert len(xray_recorder.current_segment().subsegments) == 0\n\n\ndef test_ignore_multiple_no_match():\n    from aws_xray_sdk.ext.httplib import add_ignored\n    path = '/status/200'\n    subclass = 'tests.ext.httplib.test_httplib.CustomHttpsConnection'\n    add_ignored(subclass=subclass, hostname='fake.host')\n    conn = CustomHttpsConnection(BASE_URL)\n    conn.request('GET', path)\n    conn.getresponse()\n    assert len(xray_recorder.current_segment().subsegments) > 0\n"
  },
  {
    "path": "tests/ext/httpx/__init__.py",
    "content": ""
  },
  {
    "path": "tests/ext/httpx/test_httpx.py",
    "content": "import pytest\n\nimport httpx\nfrom aws_xray_sdk.core import patch\nfrom aws_xray_sdk.core import xray_recorder\nfrom aws_xray_sdk.core.context import Context\nfrom aws_xray_sdk.ext.util import strip_url, get_hostname\n\n\npatch((\"httpx\",))\n\n# httpbin.org is created by the same author of requests to make testing http easy.\nBASE_URL = \"httpbin.org\"\n\n\n@pytest.fixture(autouse=True)\ndef construct_ctx():\n    \"\"\"\n    Clean up context storage on each test run and begin a segment\n    so that later subsegment can be attached. After each test run\n    it cleans up context storage again.\n    \"\"\"\n    xray_recorder.configure(service=\"test\", sampling=False, context=Context())\n    xray_recorder.clear_trace_entities()\n    xray_recorder.begin_segment(\"name\")\n    yield\n    xray_recorder.clear_trace_entities()\n\n\n@pytest.mark.parametrize(\"use_client\", (True, False))\ndef test_ok(use_client):\n    status_code = 200\n    url = \"http://{}/status/{}?foo=bar\".format(BASE_URL, status_code)\n    if use_client:\n        with httpx.Client() as client:\n            response = client.get(url)\n    else:\n        response = httpx.get(url)\n    assert \"x-amzn-trace-id\" in response._request.headers\n\n    subsegment = xray_recorder.current_segment().subsegments[0]\n    assert get_hostname(url) == BASE_URL\n    assert subsegment.namespace == \"remote\"\n    assert subsegment.name == get_hostname(url)\n\n    http_meta = subsegment.http\n    assert http_meta[\"request\"][\"url\"] == strip_url(url)\n    assert http_meta[\"request\"][\"method\"].upper() == \"GET\"\n    assert http_meta[\"response\"][\"status\"] == status_code\n\n\n@pytest.mark.parametrize(\"use_client\", (True, False))\ndef test_error(use_client):\n    status_code = 400\n    url = \"http://{}/status/{}\".format(BASE_URL, status_code)\n    if use_client:\n        with httpx.Client() as client:\n            response = client.post(url)\n    else:\n        response = httpx.post(url)\n    assert \"x-amzn-trace-id\" in response._request.headers\n\n    subsegment = xray_recorder.current_segment().subsegments[0]\n    assert subsegment.namespace == \"remote\"\n    assert subsegment.name == get_hostname(url)\n    assert subsegment.error\n\n    http_meta = subsegment.http\n    assert http_meta[\"request\"][\"url\"] == strip_url(url)\n    assert http_meta[\"request\"][\"method\"].upper() == \"POST\"\n    assert http_meta[\"response\"][\"status\"] == status_code\n\n\n@pytest.mark.parametrize(\"use_client\", (True, False))\ndef test_throttle(use_client):\n    status_code = 429\n    url = \"http://{}/status/{}\".format(BASE_URL, status_code)\n    if use_client:\n        with httpx.Client() as client:\n            response = client.head(url)\n    else:\n        response = httpx.head(url)\n    assert \"x-amzn-trace-id\" in response._request.headers\n\n    subsegment = xray_recorder.current_segment().subsegments[0]\n    assert subsegment.namespace == \"remote\"\n    assert subsegment.name == get_hostname(url)\n    assert subsegment.error\n    assert subsegment.throttle\n\n    http_meta = subsegment.http\n    assert http_meta[\"request\"][\"url\"] == strip_url(url)\n    assert http_meta[\"request\"][\"method\"].upper() == \"HEAD\"\n    assert http_meta[\"response\"][\"status\"] == status_code\n\n\n@pytest.mark.parametrize(\"use_client\", (True, False))\ndef test_fault(use_client):\n    status_code = 500\n    url = \"http://{}/status/{}\".format(BASE_URL, status_code)\n    if use_client:\n        with httpx.Client() as client:\n            response = client.put(url)\n    else:\n        response = httpx.put(url)\n    assert \"x-amzn-trace-id\" in response._request.headers\n\n    subsegment = xray_recorder.current_segment().subsegments[0]\n    assert subsegment.namespace == \"remote\"\n    assert subsegment.name == get_hostname(url)\n    assert subsegment.fault\n\n    http_meta = subsegment.http\n    assert http_meta[\"request\"][\"url\"] == strip_url(url)\n    assert http_meta[\"request\"][\"method\"].upper() == \"PUT\"\n    assert http_meta[\"response\"][\"status\"] == status_code\n\n\n@pytest.mark.parametrize(\"use_client\", (True, False))\ndef test_nonexistent_domain(use_client):\n    with pytest.raises(httpx.ConnectError):\n        if use_client:\n            with httpx.Client() as client:\n                client.get(\"http://doesnt.exist\")\n        else:\n            httpx.get(\"http://doesnt.exist\")\n\n    subsegment = xray_recorder.current_segment().subsegments[0]\n    assert subsegment.namespace == \"remote\"\n    assert subsegment.fault\n\n    exception = subsegment.cause[\"exceptions\"][0]\n    assert exception.type == \"ConnectError\"\n\n\n@pytest.mark.parametrize(\"use_client\", (True, False))\ndef test_invalid_url(use_client):\n    url = \"KLSDFJKLSDFJKLSDJF\"\n    with pytest.raises(httpx.UnsupportedProtocol):\n        if use_client:\n            with httpx.Client() as client:\n                client.get(url)\n        else:\n            httpx.get(url)\n\n    subsegment = xray_recorder.current_segment().subsegments[0]\n    assert subsegment.namespace == \"remote\"\n    assert subsegment.name == get_hostname(url)\n    assert subsegment.fault\n\n    http_meta = subsegment.http\n    assert http_meta[\"request\"][\"url\"] == \"/{}\".format(strip_url(url))\n\n    exception = subsegment.cause[\"exceptions\"][0]\n    assert exception.type == \"UnsupportedProtocol\"\n\n\n@pytest.mark.parametrize(\"use_client\", (True, False))\ndef test_name_uses_hostname(use_client):\n    if use_client:\n        client = httpx.Client()\n    else:\n        client = httpx\n\n    try:\n        url1 = \"http://{}/fakepath/stuff/koo/lai/ahh\".format(BASE_URL)\n        client.get(url1)\n        subsegment = xray_recorder.current_segment().subsegments[-1]\n        assert subsegment.namespace == \"remote\"\n        assert subsegment.name == BASE_URL\n        http_meta1 = subsegment.http\n        assert http_meta1[\"request\"][\"url\"] == strip_url(url1)\n        assert http_meta1[\"request\"][\"method\"].upper() == \"GET\"\n\n        url2 = \"http://{}/\".format(BASE_URL)\n        client.get(url2, params={\"some\": \"payload\", \"not\": \"toBeIncluded\"})\n        subsegment = xray_recorder.current_segment().subsegments[-1]\n        assert subsegment.namespace == \"remote\"\n        assert subsegment.name == BASE_URL\n        http_meta2 = subsegment.http\n        assert http_meta2[\"request\"][\"url\"] == strip_url(url2)\n        assert http_meta2[\"request\"][\"method\"].upper() == \"GET\"\n\n        url3 = \"http://subdomain.{}/fakepath/stuff/koo/lai/ahh\".format(BASE_URL)\n        try:\n            client.get(url3)\n        except httpx.ConnectError:\n            pass\n        subsegment = xray_recorder.current_segment().subsegments[-1]\n        assert subsegment.namespace == \"remote\"\n        assert subsegment.name == \"subdomain.\" + BASE_URL\n        http_meta3 = subsegment.http\n        assert http_meta3[\"request\"][\"url\"] == strip_url(url3)\n        assert http_meta3[\"request\"][\"method\"].upper() == \"GET\"\n    finally:\n        if use_client:\n            client.close()\n\n\n@pytest.mark.parametrize(\"use_client\", (True, False))\ndef test_strip_http_url(use_client):\n    status_code = 200\n    url = \"http://{}/get?foo=bar\".format(BASE_URL)\n    if use_client:\n        with httpx.Client() as client:\n            response = client.get(url)\n    else:\n        response = httpx.get(url)\n    assert \"x-amzn-trace-id\" in response._request.headers\n\n    subsegment = xray_recorder.current_segment().subsegments[0]\n    assert subsegment.namespace == \"remote\"\n    assert subsegment.name == get_hostname(url)\n\n    http_meta = subsegment.http\n    assert http_meta[\"request\"][\"url\"] == strip_url(url)\n    assert http_meta[\"request\"][\"method\"].upper() == \"GET\"\n    assert http_meta[\"response\"][\"status\"] == status_code\n"
  },
  {
    "path": "tests/ext/httpx/test_httpx_async.py",
    "content": "import pytest\n\nimport httpx\nfrom aws_xray_sdk.core import patch\nfrom aws_xray_sdk.core import xray_recorder\nfrom aws_xray_sdk.core.context import Context\nfrom aws_xray_sdk.ext.util import strip_url, get_hostname\n\n\npatch((\"httpx\",))\n\n# httpbin.org is created by the same author of requests to make testing http easy.\nBASE_URL = \"httpbin.org\"\n\n\n@pytest.fixture(autouse=True)\ndef construct_ctx():\n    \"\"\"\n    Clean up context storage on each test run and begin a segment\n    so that later subsegment can be attached. After each test run\n    it cleans up context storage again.\n    \"\"\"\n    xray_recorder.configure(service=\"test\", sampling=False, context=Context())\n    xray_recorder.clear_trace_entities()\n    xray_recorder.begin_segment(\"name\")\n    yield\n    xray_recorder.clear_trace_entities()\n\n\n@pytest.mark.asyncio\nasync def test_ok_async():\n    status_code = 200\n    url = \"http://{}/status/{}?foo=bar\".format(BASE_URL, status_code)\n    async with httpx.AsyncClient() as client:\n        response = await client.get(url)\n    assert \"x-amzn-trace-id\" in response._request.headers\n\n    subsegment = xray_recorder.current_segment().subsegments[0]\n    assert get_hostname(url) == BASE_URL\n    assert subsegment.namespace == \"remote\"\n    assert subsegment.name == get_hostname(url)\n\n    http_meta = subsegment.http\n    assert http_meta[\"request\"][\"url\"] == strip_url(url)\n    assert http_meta[\"request\"][\"method\"].upper() == \"GET\"\n    assert http_meta[\"response\"][\"status\"] == status_code\n\n\n@pytest.mark.asyncio\nasync def test_error_async():\n    status_code = 400\n    url = \"http://{}/status/{}\".format(BASE_URL, status_code)\n    async with httpx.AsyncClient() as client:\n        response = await client.post(url)\n    assert \"x-amzn-trace-id\" in response._request.headers\n\n    subsegment = xray_recorder.current_segment().subsegments[0]\n    assert subsegment.namespace == \"remote\"\n    assert subsegment.name == get_hostname(url)\n    assert subsegment.error\n\n    http_meta = subsegment.http\n    assert http_meta[\"request\"][\"url\"] == strip_url(url)\n    assert http_meta[\"request\"][\"method\"].upper() == \"POST\"\n    assert http_meta[\"response\"][\"status\"] == status_code\n\n\n@pytest.mark.asyncio\nasync def test_throttle_async():\n    status_code = 429\n    url = \"http://{}/status/{}\".format(BASE_URL, status_code)\n    async with httpx.AsyncClient() as client:\n        response = await client.head(url)\n    assert \"x-amzn-trace-id\" in response._request.headers\n\n    subsegment = xray_recorder.current_segment().subsegments[0]\n    assert subsegment.namespace == \"remote\"\n    assert subsegment.name == get_hostname(url)\n    assert subsegment.error\n    assert subsegment.throttle\n\n    http_meta = subsegment.http\n    assert http_meta[\"request\"][\"url\"] == strip_url(url)\n    assert http_meta[\"request\"][\"method\"].upper() == \"HEAD\"\n    assert http_meta[\"response\"][\"status\"] == status_code\n\n\n@pytest.mark.asyncio\nasync def test_fault_async():\n    status_code = 500\n    url = \"http://{}/status/{}\".format(BASE_URL, status_code)\n    async with httpx.AsyncClient() as client:\n        response = await client.put(url)\n    assert \"x-amzn-trace-id\" in response._request.headers\n\n    subsegment = xray_recorder.current_segment().subsegments[0]\n    assert subsegment.namespace == \"remote\"\n    assert subsegment.name == get_hostname(url)\n    assert subsegment.fault\n\n    http_meta = subsegment.http\n    assert http_meta[\"request\"][\"url\"] == strip_url(url)\n    assert http_meta[\"request\"][\"method\"].upper() == \"PUT\"\n    assert http_meta[\"response\"][\"status\"] == status_code\n\n\n@pytest.mark.asyncio\nasync def test_nonexistent_domain_async():\n    with pytest.raises(httpx.ConnectError):\n        async with httpx.AsyncClient() as client:\n            await client.get(\"http://doesnt.exist\")\n\n    subsegment = xray_recorder.current_segment().subsegments[0]\n    assert subsegment.namespace == \"remote\"\n    assert subsegment.fault\n\n    exception = subsegment.cause[\"exceptions\"][0]\n    assert exception.type == \"ConnectError\"\n\n\n@pytest.mark.asyncio\nasync def test_invalid_url_async():\n    url = \"KLSDFJKLSDFJKLSDJF\"\n    with pytest.raises(httpx.UnsupportedProtocol):\n        async with httpx.AsyncClient() as client:\n            await client.get(url)\n\n    subsegment = xray_recorder.current_segment().subsegments[0]\n    assert subsegment.namespace == \"remote\"\n    assert subsegment.name == get_hostname(url)\n    assert subsegment.fault\n\n    http_meta = subsegment.http\n    assert http_meta[\"request\"][\"url\"] == \"/{}\".format(strip_url(url))\n\n    exception = subsegment.cause[\"exceptions\"][0]\n    assert exception.type == \"UnsupportedProtocol\"\n\n\n@pytest.mark.asyncio\nasync def test_name_uses_hostname_async():\n    async with httpx.AsyncClient() as client:\n        url1 = \"http://{}/fakepath/stuff/koo/lai/ahh\".format(BASE_URL)\n        await client.get(url1)\n        subsegment = xray_recorder.current_segment().subsegments[-1]\n        assert subsegment.namespace == \"remote\"\n        assert subsegment.name == BASE_URL\n        http_meta1 = subsegment.http\n        assert http_meta1[\"request\"][\"url\"] == strip_url(url1)\n        assert http_meta1[\"request\"][\"method\"].upper() == \"GET\"\n\n        url2 = \"http://{}/\".format(BASE_URL)\n        await client.get(url2, params={\"some\": \"payload\", \"not\": \"toBeIncluded\"})\n        subsegment = xray_recorder.current_segment().subsegments[-1]\n        assert subsegment.namespace == \"remote\"\n        assert subsegment.name == BASE_URL\n        http_meta2 = subsegment.http\n        assert http_meta2[\"request\"][\"url\"] == strip_url(url2)\n        assert http_meta2[\"request\"][\"method\"].upper() == \"GET\"\n\n        url3 = \"http://subdomain.{}/fakepath/stuff/koo/lai/ahh\".format(BASE_URL)\n        try:\n            await client.get(url3)\n        except Exception:\n            # This is an invalid url so we dont want to break the test\n            pass\n        subsegment = xray_recorder.current_segment().subsegments[-1]\n        assert subsegment.namespace == \"remote\"\n        assert subsegment.name == \"subdomain.\" + BASE_URL\n        http_meta3 = subsegment.http\n        assert http_meta3[\"request\"][\"url\"] == strip_url(url3)\n        assert http_meta3[\"request\"][\"method\"].upper() == \"GET\"\n\n\n@pytest.mark.asyncio\nasync def test_strip_http_url_async():\n    status_code = 200\n    url = \"http://{}/get?foo=bar\".format(BASE_URL)\n    async with httpx.AsyncClient() as client:\n        response = await client.get(url)\n    assert \"x-amzn-trace-id\" in response._request.headers\n\n    subsegment = xray_recorder.current_segment().subsegments[0]\n    assert subsegment.namespace == \"remote\"\n    assert subsegment.name == get_hostname(url)\n\n    http_meta = subsegment.http\n    assert http_meta[\"request\"][\"url\"] == strip_url(url)\n    assert http_meta[\"request\"][\"method\"].upper() == \"GET\"\n    assert http_meta[\"response\"][\"status\"] == status_code\n"
  },
  {
    "path": "tests/ext/pg8000/__init__.py",
    "content": ""
  },
  {
    "path": "tests/ext/pg8000/test_pg8000.py",
    "content": "import pg8000\n\nimport pytest\nimport testing.postgresql\n\nfrom aws_xray_sdk.core import patch\nfrom aws_xray_sdk.core import xray_recorder\nfrom aws_xray_sdk.core.context import Context\nfrom aws_xray_sdk.ext.pg8000 import unpatch\n\n\n@pytest.fixture(scope='module', autouse=True)\ndef patch_module():\n    patch(('pg8000',))\n    yield\n    unpatch()\n\n\n@pytest.fixture(autouse=True)\ndef construct_ctx():\n    \"\"\"\n    Clean up context storage on each test run and begin a segment\n    so that later subsegment can be attached. After each test run\n    it cleans up context storage again.\n    \"\"\"\n    xray_recorder.configure(service='test', sampling=False, context=Context())\n    xray_recorder.clear_trace_entities()\n    xray_recorder.begin_segment('name')\n    yield\n    xray_recorder.clear_trace_entities()\n\n\ndef test_execute_dsn_kwargs():\n    q = 'SELECT 1'\n    with testing.postgresql.Postgresql() as postgresql:\n        dsn = postgresql.dsn()\n        conn = pg8000.connect(database=dsn['database'],\n                              user=dsn['user'],\n                              password='',\n                              host=dsn['host'],\n                              port=dsn['port'])\n        cur = conn.cursor()\n        cur.execute(q)\n\n    subsegment = xray_recorder.current_segment().subsegments[-1]\n    assert subsegment.name == 'execute'\n    sql = subsegment.sql\n    assert sql['database_type'] == 'PostgreSQL'\n    assert sql['user'] == dsn['user']\n    assert sql['database_version']\n\n\ndef test_execute_bad_query():\n    q = 'SELECT blarg'\n    with testing.postgresql.Postgresql() as postgresql:\n        dsn = postgresql.dsn()\n        conn = pg8000.connect(database=dsn['database'],\n                              user=dsn['user'],\n                              password='',\n                              host=dsn['host'],\n                              port=dsn['port'])\n        cur = conn.cursor()\n        try:\n            cur.execute(q)\n        except Exception:\n            pass\n\n    subsegment = xray_recorder.current_segment().subsegments[-1]\n    assert subsegment.name == 'execute'\n    sql = subsegment.sql\n    assert sql['database_type'] == 'PostgreSQL'\n    assert sql['user'] == dsn['user']\n    assert sql['database_version']\n\n    exception = subsegment.cause['exceptions'][0]\n    assert exception.type == 'ProgrammingError'\n"
  },
  {
    "path": "tests/ext/psycopg/__init__.py",
    "content": ""
  },
  {
    "path": "tests/ext/psycopg/test_psycopg.py",
    "content": "import psycopg\nimport psycopg.sql\nimport psycopg_pool\n\nimport pytest\nimport testing.postgresql\n\nfrom aws_xray_sdk.core import patch\nfrom aws_xray_sdk.core import xray_recorder\nfrom aws_xray_sdk.core.context import Context\n\npatch(('psycopg',))\n\n\n@pytest.fixture(autouse=True)\ndef construct_ctx():\n    \"\"\"\n    Clean up context storage on each test run and begin a segment\n    so that later subsegment can be attached. After each test run\n    it cleans up context storage again.\n    \"\"\"\n    xray_recorder.configure(service='test', sampling=False, context=Context())\n    xray_recorder.clear_trace_entities()\n    xray_recorder.begin_segment('name')\n    yield\n    xray_recorder.clear_trace_entities()\n\n\ndef test_execute_dsn_kwargs():\n    q = 'SELECT 1'\n    with testing.postgresql.Postgresql() as postgresql:\n        url = postgresql.url()\n        dsn = postgresql.dsn()\n        conn = psycopg.connect(dbname=dsn['database'],\n                                user=dsn['user'],\n                                password='',\n                                host=dsn['host'],\n                                port=dsn['port'])\n        cur = conn.cursor()\n        cur.execute(q)\n\n    subsegment = xray_recorder.current_segment().subsegments[0]\n    assert subsegment.name == 'execute'\n    sql = subsegment.sql\n    assert sql['database_type'] == 'PostgreSQL'\n    assert sql['user'] == dsn['user']\n    assert sql['url'] == url\n    assert sql['database_version']\n\n\ndef test_execute_dsn_string():\n    q = 'SELECT 1'\n    with testing.postgresql.Postgresql() as postgresql:\n        url = postgresql.url()\n        dsn = postgresql.dsn()\n        conn = psycopg.connect('dbname=' + dsn['database'] +\n                                ' password=mypassword' +\n                                ' host=' + dsn['host'] +\n                                ' port=' + str(dsn['port']) +\n                                ' user=' + dsn['user'])\n        cur = conn.cursor()\n        cur.execute(q)\n\n    subsegment = xray_recorder.current_segment().subsegments[0]\n    assert subsegment.name == 'execute'\n    sql = subsegment.sql\n    assert sql['database_type'] == 'PostgreSQL'\n    assert sql['user'] == dsn['user']\n    assert sql['url'] == url\n    assert sql['database_version']\n\n\ndef test_execute_in_pool():\n    q = 'SELECT 1'\n    with testing.postgresql.Postgresql() as postgresql:\n        url = postgresql.url()\n        dsn = postgresql.dsn()\n        pool = psycopg_pool.ConnectionPool('dbname=' + dsn['database'] +\n                                            ' password=mypassword' +\n                                            ' host=' + dsn['host'] +\n                                            ' port=' + str(dsn['port']) +\n                                            ' user=' + dsn['user'],\n                                            min_size=1,\n                                            max_size=1)\n        with pool.connection() as conn:\n            cur = conn.cursor()\n            cur.execute(q)\n\n    subsegment = xray_recorder.current_segment().subsegments[0]\n    assert subsegment.name == 'execute'\n    sql = subsegment.sql\n    assert sql['database_type'] == 'PostgreSQL'\n    assert sql['user'] == dsn['user']\n    assert sql['url'] == url\n    assert sql['database_version']\n\n\ndef test_execute_bad_query():\n    q = 'SELECT blarg'\n    with testing.postgresql.Postgresql() as postgresql:\n        url = postgresql.url()\n        dsn = postgresql.dsn()\n        conn = psycopg.connect(dbname=dsn['database'],\n                                user=dsn['user'],\n                                password='',\n                                host=dsn['host'],\n                                port=dsn['port'])\n        cur = conn.cursor()\n        try:\n            cur.execute(q)\n        except Exception:\n            pass\n\n    subsegment = xray_recorder.current_segment().subsegments[0]\n    assert subsegment.name == 'execute'\n    sql = subsegment.sql\n    assert sql['database_type'] == 'PostgreSQL'\n    assert sql['user'] == dsn['user']\n    assert sql['url'] == url\n    assert sql['database_version']\n\n    exception = subsegment.cause['exceptions'][0]\n    assert exception.type == 'UndefinedColumn'\n\ndef test_query_as_string():\n    with testing.postgresql.Postgresql() as postgresql:\n        url = postgresql.url()\n        dsn = postgresql.dsn()\n        conn = psycopg.connect('dbname=' + dsn['database'] +\n                                ' password=mypassword' +\n                                ' host=' + dsn['host'] +\n                                ' port=' + str(dsn['port']) +\n                                ' user=' + dsn['user'])\n        test_sql = psycopg.sql.Identifier('test')\n        assert test_sql.as_string(conn)\n        assert test_sql.as_string(conn.cursor())\n"
  },
  {
    "path": "tests/ext/psycopg2/__init__.py",
    "content": ""
  },
  {
    "path": "tests/ext/psycopg2/test_psycopg2.py",
    "content": "import psycopg2\nimport psycopg2.extras\nimport psycopg2.pool\nimport psycopg2.sql\n\nimport pytest\nimport testing.postgresql\n\nfrom aws_xray_sdk.core import patch\nfrom aws_xray_sdk.core import xray_recorder\nfrom aws_xray_sdk.core.context import Context\n\npatch(('psycopg2',))\n\n\n@pytest.fixture(autouse=True)\ndef construct_ctx():\n    \"\"\"\n    Clean up context storage on each test run and begin a segment\n    so that later subsegment can be attached. After each test run\n    it cleans up context storage again.\n    \"\"\"\n    xray_recorder.configure(service='test', sampling=False, context=Context())\n    xray_recorder.clear_trace_entities()\n    xray_recorder.begin_segment('name')\n    yield\n    xray_recorder.clear_trace_entities()\n\n\ndef test_execute_dsn_kwargs():\n    q = 'SELECT 1'\n    with testing.postgresql.Postgresql() as postgresql:\n        url = postgresql.url()\n        dsn = postgresql.dsn()\n        conn = psycopg2.connect(dbname=dsn['database'],\n                                user=dsn['user'],\n                                password='',\n                                host=dsn['host'],\n                                port=dsn['port'])\n        cur = conn.cursor()\n        cur.execute(q)\n\n    subsegment = xray_recorder.current_segment().subsegments[0]\n    assert subsegment.name == 'execute'\n    sql = subsegment.sql\n    assert sql['database_type'] == 'PostgreSQL'\n    assert sql['user'] == dsn['user']\n    assert sql['url'] == url\n    assert sql['database_version']\n\n\ndef test_execute_dsn_kwargs_alt_dbname():\n    \"\"\"\n    Psycopg supports database to be passed as `database` or `dbname`\n    \"\"\"\n    q = 'SELECT 1'\n\n    with testing.postgresql.Postgresql() as postgresql:\n        url = postgresql.url()\n        dsn = postgresql.dsn()\n        conn = psycopg2.connect(database=dsn['database'],\n                                user=dsn['user'],\n                                password='',\n                                host=dsn['host'],\n                                port=dsn['port'])\n        cur = conn.cursor()\n        cur.execute(q)\n\n    subsegment = xray_recorder.current_segment().subsegments[0]\n    assert subsegment.name == 'execute'\n    sql = subsegment.sql\n    assert sql['database_type'] == 'PostgreSQL'\n    assert sql['user'] == dsn['user']\n    assert sql['url'] == url\n    assert sql['database_version']\n\n\ndef test_execute_dsn_string():\n    q = 'SELECT 1'\n    with testing.postgresql.Postgresql() as postgresql:\n        url = postgresql.url()\n        dsn = postgresql.dsn()\n        conn = psycopg2.connect('dbname=' + dsn['database'] +\n                                ' password=mypassword' +\n                                ' host=' + dsn['host'] +\n                                ' port=' + str(dsn['port']) +\n                                ' user=' + dsn['user'])\n        cur = conn.cursor()\n        cur.execute(q)\n\n    subsegment = xray_recorder.current_segment().subsegments[0]\n    assert subsegment.name == 'execute'\n    sql = subsegment.sql\n    assert sql['database_type'] == 'PostgreSQL'\n    assert sql['user'] == dsn['user']\n    assert sql['url'] == url\n    assert sql['database_version']\n\n\ndef test_execute_in_pool():\n    q = 'SELECT 1'\n    with testing.postgresql.Postgresql() as postgresql:\n        url = postgresql.url()\n        dsn = postgresql.dsn()\n        pool = psycopg2.pool.SimpleConnectionPool(1, 1,\n                                                  dbname=dsn['database'],\n                                                  user=dsn['user'],\n                                                  password='',\n                                                  host=dsn['host'],\n                                                  port=dsn['port'])\n        cur = pool.getconn(key=dsn['user']).cursor()\n        cur.execute(q)\n\n    subsegment = xray_recorder.current_segment().subsegments[0]\n    assert subsegment.name == 'execute'\n    sql = subsegment.sql\n    assert sql['database_type'] == 'PostgreSQL'\n    assert sql['user'] == dsn['user']\n    assert sql['url'] == url\n    assert sql['database_version']\n\n\ndef test_execute_bad_query():\n    q = 'SELECT blarg'\n    with testing.postgresql.Postgresql() as postgresql:\n        url = postgresql.url()\n        dsn = postgresql.dsn()\n        conn = psycopg2.connect(dbname=dsn['database'],\n                                user=dsn['user'],\n                                password='',\n                                host=dsn['host'],\n                                port=dsn['port'])\n        cur = conn.cursor()\n        try:\n            cur.execute(q)\n        except Exception:\n            pass\n\n    subsegment = xray_recorder.current_segment().subsegments[0]\n    assert subsegment.name == 'execute'\n    sql = subsegment.sql\n    assert sql['database_type'] == 'PostgreSQL'\n    assert sql['user'] == dsn['user']\n    assert sql['url'] == url\n    assert sql['database_version']\n\n    exception = subsegment.cause['exceptions'][0]\n    assert exception.type == 'UndefinedColumn'\n\n\ndef test_register_extensions():\n    with testing.postgresql.Postgresql() as postgresql:\n        url = postgresql.url()\n        dsn = postgresql.dsn()\n        conn = psycopg2.connect('dbname=' + dsn['database'] +\n                                ' password=mypassword' +\n                                ' host=' + dsn['host'] +\n                                ' port=' + str(dsn['port']) +\n                                ' user=' + dsn['user'])\n        assert psycopg2.extras.register_uuid(None, conn)\n        assert psycopg2.extras.register_uuid(None, conn.cursor())\n\n\ndef test_query_as_string():\n    with testing.postgresql.Postgresql() as postgresql:\n        url = postgresql.url()\n        dsn = postgresql.dsn()\n        conn = psycopg2.connect('dbname=' + dsn['database'] +\n                                ' password=mypassword' +\n                                ' host=' + dsn['host'] +\n                                ' port=' + str(dsn['port']) +\n                                ' user=' + dsn['user'])\n        test_sql = psycopg2.sql.Identifier('test')\n        assert test_sql.as_string(conn)\n        assert test_sql.as_string(conn.cursor())\n\n\ndef test_register_default_jsonb():\n    with testing.postgresql.Postgresql() as postgresql:\n        url = postgresql.url()\n        dsn = postgresql.dsn()\n        conn = psycopg2.connect('dbname=' + dsn['database'] +\n                                ' password=mypassword' +\n                                ' host=' + dsn['host'] +\n                                ' port=' + str(dsn['port']) +\n                                ' user=' + dsn['user'])\n\n        assert psycopg2.extras.register_default_jsonb(conn_or_curs=conn, loads=lambda x: x)\n        assert psycopg2.extras.register_default_jsonb(conn_or_curs=conn.cursor(), loads=lambda x: x)\n"
  },
  {
    "path": "tests/ext/pymysql/__init__.py",
    "content": ""
  },
  {
    "path": "tests/ext/pymysql/test_pymysql.py",
    "content": "import pymysql\n\nimport pytest\n\nfrom aws_xray_sdk.core import patch\nfrom aws_xray_sdk.core import xray_recorder\nfrom aws_xray_sdk.core.context import Context\nfrom aws_xray_sdk.ext.pymysql import unpatch\n\nMYSQL_USER = \"root\"\nMYSQL_PASSWORD = \"root\"\nMYSQL_HOST = \"localhost\"\nMYSQL_PORT = 3306\nMYSQL_DB_NAME = \"test_db\"\n\n@pytest.fixture(scope='module', autouse=True)\ndef patch_module():\n    patch(('pymysql',))\n    yield\n    unpatch()\n\n\n@pytest.fixture(autouse=True)\ndef construct_ctx():\n    \"\"\"\n    Clean up context storage on each test run and begin a segment\n    so that later subsegment can be attached. After each test run\n    it cleans up context storage again.\n    \"\"\"\n    xray_recorder.configure(service='test', sampling=False, context=Context())\n    xray_recorder.clear_trace_entities()\n    xray_recorder.begin_segment('name')\n    yield\n    xray_recorder.clear_trace_entities()\n\n\ndef test_execute_dsn_kwargs():\n    q = 'SELECT 1'\n    conn = pymysql.connect(database=MYSQL_DB_NAME,\n                          user=MYSQL_USER,\n                          password=MYSQL_PASSWORD,\n                          host=MYSQL_HOST,\n                          port=MYSQL_PORT)\n    cur = conn.cursor()\n    cur.execute(q)\n\n    subsegment = xray_recorder.current_segment().subsegments[-1]\n    assert subsegment.name == 'execute'\n    sql = subsegment.sql\n    assert sql['database_type'] == 'MySQL'\n    assert sql['user'] == MYSQL_USER\n    assert sql['driver_version'] == 'PyMySQL'\n    assert sql['database_version']\n\n\ndef test_execute_bad_query():\n    q = \"SELECT blarg\"\n    conn = pymysql.connect(database=MYSQL_DB_NAME,\n                          user=MYSQL_USER,\n                          password=MYSQL_PASSWORD,\n                          host=MYSQL_HOST,\n                          port=MYSQL_PORT)\n    cur = conn.cursor()\n    try:\n        cur.execute(q)\n    except Exception:\n        pass\n    \n    subsegment = xray_recorder.current_segment().subsegments[-1]\n    assert subsegment.name == \"execute\"\n    sql = subsegment.sql\n    assert sql['database_type'] == 'MySQL'\n    assert sql['user'] == MYSQL_USER\n    assert sql['driver_version'] == 'PyMySQL'\n    assert sql['database_version']\n\n    exception = subsegment.cause['exceptions'][0]\n    assert exception.type is not None\n"
  },
  {
    "path": "tests/ext/pynamodb/__init__.py",
    "content": ""
  },
  {
    "path": "tests/ext/pynamodb/test_pynamodb.py",
    "content": "import pytest\n\nimport botocore.session\nfrom botocore import UNSIGNED\nfrom botocore.client import Config\nfrom botocore.exceptions import ClientError\nfrom pynamodb.attributes import UnicodeAttribute\nfrom pynamodb.models import Model\n\nfrom aws_xray_sdk.core import patch\nfrom aws_xray_sdk.core import xray_recorder\nfrom aws_xray_sdk.core.context import Context\n\npatch(('pynamodb',))\n\n\n@pytest.fixture(autouse=True)\ndef construct_ctx():\n    \"\"\"\n    Clean up context storage on each test run and begin a segment\n    so that later subsegment can be attached. After each test run\n    it cleans up context storage again.\n    \"\"\"\n    xray_recorder.configure(service='test', sampling=False, context=Context())\n    xray_recorder.clear_trace_entities()\n    xray_recorder.begin_segment('name')\n    yield\n    xray_recorder.clear_trace_entities()\n\n\ndef test_exception():\n    class SampleModel(Model):\n        class Meta:\n            region = 'us-west-2'\n            table_name = 'mytable'\n\n        sample_attribute = UnicodeAttribute(hash_key=True)\n\n    try:\n        SampleModel.describe_table()\n    except Exception:\n        pass\n\n    subsegments = xray_recorder.current_segment().subsegments\n    assert len(subsegments) == 1\n    subsegment = subsegments[0]\n    assert subsegment.name == 'dynamodb'\n    assert len(subsegment.subsegments) == 0\n    assert subsegment.error\n\n    aws_meta = subsegment.aws\n    assert aws_meta['region'] == 'us-west-2'\n    assert aws_meta['operation'] == 'DescribeTable'\n    assert aws_meta['table_name'] == 'mytable'\n\n\ndef test_empty_response():\n    from aws_xray_sdk.ext.pynamodb.patch import pynamodb_meta_processor\n    subsegment = xray_recorder.begin_subsegment('test')\n\n    class TempReq:\n        def __init__(self):\n            self.headers = {'X-Amz-Target': 'ddb.ListTables'.encode('utf-8')}\n            self.url = 'ddb.us-west-2'\n            self.body = '{}'.encode('utf-8')\n\n    prepared_request = TempReq()\n    args = [prepared_request]\n\n    pynamodb_meta_processor(wrapped=None, instance=None, args=args,\n                            kwargs=None, return_value=None,\n                            exception=None, subsegment=subsegment,\n                            stack=None)\n\n    aws_meta = subsegment.aws\n    assert aws_meta['region'] == 'us-west-2'\n    assert aws_meta['operation'] == 'ListTables'\n\n\ndef test_only_dynamodb_calls_are_traced():\n    \"\"\"Test only a single subsegment is created for other AWS services.\n\n    As the pynamodb patch applies the botocore patch as well, we need\n    to ensure that only one subsegment is created for all calls not\n    made by PynamoDB. As PynamoDB calls botocore differently than the\n    botocore patch expects we also just get a single subsegment per\n    PynamoDB call.\n    \"\"\"\n    session = botocore.session.get_session()\n    s3 = session.create_client('s3', region_name='us-west-2',\n                               config=Config(signature_version=UNSIGNED))\n    try:\n        s3.get_bucket_location(Bucket='mybucket')\n    except ClientError:\n        pass\n\n    subsegments = xray_recorder.current_segment().subsegments\n    assert len(subsegments) == 1\n    assert subsegments[0].name == 's3'\n    assert len(subsegments[0].subsegments) == 0\n"
  },
  {
    "path": "tests/ext/requests/__init__.py",
    "content": ""
  },
  {
    "path": "tests/ext/requests/test_requests.py",
    "content": "import pytest\nimport requests\n\nfrom aws_xray_sdk.core import patch\nfrom aws_xray_sdk.core import xray_recorder\nfrom aws_xray_sdk.core.context import Context\nfrom aws_xray_sdk.ext.util import strip_url, get_hostname\n\n\npatch(('requests',))\n\n# httpbin.org is created by the same author of requests to make testing http easy.\nBASE_URL = 'httpbin.org'\n\n\n@pytest.fixture(autouse=True)\ndef construct_ctx():\n    \"\"\"\n    Clean up context storage on each test run and begin a segment\n    so that later subsegment can be attached. After each test run\n    it cleans up context storage again.\n    \"\"\"\n    xray_recorder.configure(service='test', sampling=False, context=Context())\n    xray_recorder.clear_trace_entities()\n    xray_recorder.begin_segment('name')\n    yield\n    xray_recorder.clear_trace_entities()\n\n\ndef test_ok():\n    status_code = 200\n    url = 'http://{}/status/{}?foo=bar'.format(BASE_URL, status_code)\n    requests.get(url)\n    subsegment = xray_recorder.current_segment().subsegments[0]\n    assert get_hostname(url) == BASE_URL\n    assert subsegment.name == get_hostname(url)\n\n    http_meta = subsegment.http\n    assert http_meta['request']['url'] == strip_url(url)\n    assert http_meta['request']['method'].upper() == 'GET'\n    assert http_meta['response']['status'] == status_code\n\n\ndef test_error():\n    status_code = 400\n    url = 'http://{}/status/{}'.format(BASE_URL, status_code)\n    requests.post(url)\n    subsegment = xray_recorder.current_segment().subsegments[0]\n    assert subsegment.name == get_hostname(url)\n    assert subsegment.error\n\n    http_meta = subsegment.http\n    assert http_meta['request']['url'] == strip_url(url)\n    assert http_meta['request']['method'].upper() == 'POST'\n    assert http_meta['response']['status'] == status_code\n\n\ndef test_throttle():\n    status_code = 429\n    url = 'http://{}/status/{}'.format(BASE_URL, status_code)\n    requests.head(url)\n    subsegment = xray_recorder.current_segment().subsegments[0]\n    assert subsegment.name == get_hostname(url)\n    assert subsegment.error\n    assert subsegment.throttle\n\n    http_meta = subsegment.http\n    assert http_meta['request']['url'] == strip_url(url)\n    assert http_meta['request']['method'].upper() == 'HEAD'\n    assert http_meta['response']['status'] == status_code\n\n\ndef test_fault():\n    status_code = 500\n    url = 'http://{}/status/{}'.format(BASE_URL, status_code)\n    requests.put(url)\n    subsegment = xray_recorder.current_segment().subsegments[0]\n    assert subsegment.name == get_hostname(url)\n    assert subsegment.fault\n\n    http_meta = subsegment.http\n    assert http_meta['request']['url'] == strip_url(url)\n    assert http_meta['request']['method'].upper() == 'PUT'\n    assert http_meta['response']['status'] == status_code\n\n\ndef test_nonexistent_domain():\n    try:\n        requests.get('http://doesnt.exist')\n    except Exception:\n        # prevent uncatch exception from breaking test run\n        pass\n    subsegment = xray_recorder.current_segment().subsegments[0]\n    assert subsegment.fault\n\n    exception = subsegment.cause['exceptions'][0]\n    assert exception.type == 'ConnectionError'\n\n\ndef test_invalid_url():\n    url = 'KLSDFJKLSDFJKLSDJF'\n    try:\n        requests.get(url)\n    except Exception:\n        # prevent uncatch exception from breaking test run\n        pass\n    subsegment = xray_recorder.current_segment().subsegments[0]\n    assert subsegment.name == get_hostname(url)\n    assert subsegment.fault\n\n    http_meta = subsegment.http\n    assert http_meta['request']['url'] == strip_url(url)\n\n    exception = subsegment.cause['exceptions'][0]\n    assert exception.type == 'MissingSchema'\n\n\ndef test_name_uses_hostname():\n    url1 = 'http://{}/fakepath/stuff/koo/lai/ahh'.format(BASE_URL)\n    requests.get(url1)\n    subsegment = xray_recorder.current_segment().subsegments[-1]\n    assert subsegment.name == BASE_URL\n    http_meta1 = subsegment.http\n    assert http_meta1['request']['url'] == strip_url(url1)\n    assert http_meta1['request']['method'].upper() == 'GET'\n\n    url2 = 'http://{}/'.format(BASE_URL)\n    requests.get(url2, params={\"some\": \"payload\", \"not\": \"toBeIncluded\"})\n    subsegment = xray_recorder.current_segment().subsegments[-1]\n    assert subsegment.name == BASE_URL\n    http_meta2 = subsegment.http\n    assert http_meta2['request']['url'] == strip_url(url2)\n    assert http_meta2['request']['method'].upper() == 'GET'\n\n    url3 = 'http://subdomain.{}/fakepath/stuff/koo/lai/ahh'.format(BASE_URL)\n    try:\n        requests.get(url3)\n    except Exception:\n        # This is an invalid url so we dont want to break the test\n        pass\n    subsegment = xray_recorder.current_segment().subsegments[-1]\n    assert subsegment.name == \"subdomain.\" + BASE_URL\n    http_meta3 = subsegment.http\n    assert http_meta3['request']['url'] == strip_url(url3)\n    assert http_meta3['request']['method'].upper() == 'GET'\n\n\ndef test_strip_http_url():\n    status_code = 200\n    url = 'http://{}/get?foo=bar'.format(BASE_URL)\n    requests.get(url)\n    subsegment = xray_recorder.current_segment().subsegments[0]\n    assert subsegment.name == get_hostname(url)\n\n    http_meta = subsegment.http\n    assert http_meta['request']['url'] == strip_url(url)\n    assert http_meta['request']['method'].upper() == 'GET'\n    assert http_meta['response']['status'] == status_code\n\n"
  },
  {
    "path": "tests/ext/sqlalchemy/__init__.py",
    "content": ""
  },
  {
    "path": "tests/ext/sqlalchemy/test_query.py",
    "content": "import pytest\nfrom aws_xray_sdk.core import xray_recorder\nfrom aws_xray_sdk.core.context import Context\nfrom aws_xray_sdk.ext.sqlalchemy.query import XRaySessionMaker\nfrom sqlalchemy.ext.declarative import declarative_base\nfrom sqlalchemy import create_engine, Column, Integer, String\nfrom ...util import find_subsegment_by_annotation\n\n\nBase = declarative_base()\n\n\nclass User(Base):\n        __tablename__ = 'users'\n\n        id = Column(Integer, primary_key=True)\n        name = Column(String)\n        fullname = Column(String)\n        password = Column(String)\n\n\n@pytest.fixture()\ndef engine():\n    return create_engine('sqlite:///:memory:')\n\n\n@pytest.fixture()\ndef session(engine):\n    \"\"\"Test Fixture to Create DataBase Tables and start a trace segment\"\"\"\n    engine = create_engine('sqlite:///:memory:')\n    xray_recorder.configure(service='test', sampling=False, context=Context())\n    xray_recorder.clear_trace_entities()\n    xray_recorder.begin_segment('SQLAlchemyTest')\n    Session = XRaySessionMaker(bind=engine)\n    Base.metadata.create_all(engine)\n    session = Session()\n    yield session\n    xray_recorder.end_segment()\n    xray_recorder.clear_trace_entities()\n\n\n@pytest.fixture()\ndef connection(engine):\n    conn = engine.connect()\n    xray_recorder.configure(service='test', sampling=False, context=Context())\n    xray_recorder.clear_trace_entities()\n    xray_recorder.begin_segment('SQLAlchemyTest')\n    Session = XRaySessionMaker(bind=conn)\n    Base.metadata.create_all(engine)\n    session = Session()\n    yield session\n    xray_recorder.end_segment()\n    xray_recorder.clear_trace_entities()\n\n\n\ndef test_all(capsys, session):\n    \"\"\" Test calling all() on get all records.\n    Verify we run the query and return the SQL as metdata\"\"\"\n    # with capsys.disabled():\n    session.query(User).all()\n    subsegment = find_subsegment_by_annotation(xray_recorder.current_segment(), 'sqlalchemy', 'sqlalchemy.orm.query.all')\n    assert subsegment['annotations']['sqlalchemy'] == 'sqlalchemy.orm.query.all'\n    assert subsegment['sql']['sanitized_query']\n    assert subsegment['sql']['url']\n\n\ndef test_supports_connection(capsys, connection):\n    \"\"\" Test that XRaySessionMaker supports connection as well as engine\"\"\"\n    connection.query(User).all()\n    subsegment = find_subsegment_by_annotation(xray_recorder.current_segment(), 'sqlalchemy',\n                                               'sqlalchemy.orm.query.all')\n    assert subsegment['annotations']['sqlalchemy'] == 'sqlalchemy.orm.query.all'\n\n\ndef test_add(capsys, session):\n    \"\"\" Test calling add() on insert a row.\n    Verify we that we capture trace for the add\"\"\"\n    # with capsys.disabled():\n    john = User(name='John', fullname=\"John Doe\", password=\"password\")\n    session.add(john)\n    subsegment = find_subsegment_by_annotation(xray_recorder.current_segment(), 'sqlalchemy', 'sqlalchemy.orm.session.add')\n    assert subsegment['annotations']['sqlalchemy'] == 'sqlalchemy.orm.session.add'\n    assert subsegment['sql']['url']\n\n\ndef test_filter_first(capsys, session):\n    \"\"\" Test calling filter().first() on get first filtered records.\n    Verify we run the query and return the SQL as metdata\"\"\"\n    # with capsys.disabled():\n    session.query(User).filter(User.password==\"mypassword!\").first()\n    subsegment = find_subsegment_by_annotation(xray_recorder.current_segment(), 'sqlalchemy', 'sqlalchemy.orm.query.first')\n    assert subsegment['annotations']['sqlalchemy'] == 'sqlalchemy.orm.query.first'\n    assert subsegment['sql']['sanitized_query']\n    assert \"mypassword!\" not in subsegment['sql']['sanitized_query']\n    assert \"users.password = ?\" in  subsegment['sql']['sanitized_query']\n    assert subsegment['sql']['url']\n"
  },
  {
    "path": "tests/ext/sqlalchemy_core/__init__.py",
    "content": ""
  },
  {
    "path": "tests/ext/sqlalchemy_core/test_base.py",
    "content": "import pytest\nfrom sqlalchemy import create_engine, Column, Integer, String\nfrom sqlalchemy.ext.declarative import declarative_base\nfrom sqlalchemy.orm import sessionmaker\n\nfrom aws_xray_sdk.core import xray_recorder, patch\nfrom aws_xray_sdk.core.context import Context\n\nBase = declarative_base()\n\n\nclass User(Base):\n        __tablename__ = 'users'\n\n        id = Column(Integer, primary_key=True)\n        name = Column(String)\n        fullname = Column(String)\n        password = Column(String)\n\n\n@pytest.fixture()\ndef db_url():\n    return 'sqlite:///:memory:'\n\n\n@pytest.fixture()\ndef engine(db_url):\n    \"\"\"\n    Clean up context storage on each test run and begin a segment\n    so that later subsegment can be attached. After each test run\n    it cleans up context storage again.\n    \"\"\"\n    from aws_xray_sdk.ext.sqlalchemy_core import unpatch\n    patch(('sqlalchemy_core',))\n    engine = create_engine(db_url)\n    xray_recorder.configure(service='test', sampling=False, context=Context())\n    xray_recorder.begin_segment('name')\n    Base.metadata.create_all(engine)\n    xray_recorder.clear_trace_entities()\n    xray_recorder.begin_segment('name')\n    yield engine\n    xray_recorder.clear_trace_entities()\n    unpatch()\n\n\n@pytest.fixture()\ndef connection(engine):\n    return engine.connect()\n\n\n@pytest.fixture()\ndef session(engine):\n    Session = sessionmaker(bind=engine)\n    return Session()\n"
  },
  {
    "path": "tests/ext/sqlalchemy_core/test_dburl.py",
    "content": "from sqlalchemy import create_engine\nimport urllib\nimport pytest\n\nfrom aws_xray_sdk.core import xray_recorder, patch\nfrom aws_xray_sdk.ext.sqlalchemy_core import unpatch\nfrom aws_xray_sdk.core.context import Context\n\nMYSQL_USER = \"test_dburl_user\"\nMYSQL_PASSWORD = \"test]password\"\nMYSQL_HOST = \"localhost\"\nMYSQL_PORT = 3306\nMYSQL_DB_NAME = \"test_dburl\"\n\npatch(('sqlalchemy_core',))\n\n@pytest.fixture(autouse=True)\ndef construct_ctx():\n    \"\"\"\n    Clean up context storage on each test run and begin a segment\n    so that later subsegment can be attached. After each test run\n    it cleans up context storage again.\n    \"\"\"\n    xray_recorder.configure(service='test', sampling=False, context=Context())\n    xray_recorder.clear_trace_entities()\n    xray_recorder.begin_segment('name')\n    yield\n    xray_recorder.clear_trace_entities()\n\n\ndef test_db_url_with_special_char():\n    password = urllib.parse.quote_plus(MYSQL_PASSWORD)\n    db_url = f\"mysql+pymysql://{MYSQL_USER}:{password}@{MYSQL_HOST}:{MYSQL_PORT}/{MYSQL_DB_NAME}\"\n\n    engine = create_engine(db_url)\n\n    conn = engine.connect()\n\n    conn.execute(\"select 1\")\n\n    subsegment = xray_recorder.current_segment().subsegments[-1]\n\n    assert subsegment.name == f\"{MYSQL_HOST}:{MYSQL_PORT}\"\n    sql = subsegment.sql\n    assert sql['database_type'] == 'mysql'\n    assert sql['user'] == MYSQL_USER\n    assert sql['driver_version'] == 'pymysql'\n    assert sql['database_version']\n"
  },
  {
    "path": "tests/ext/sqlalchemy_core/test_postgres.py",
    "content": "import pytest\n\nfrom .test_base import connection, engine, session, User\n\nfrom sqlalchemy import create_engine\nfrom sqlalchemy.dialects.postgresql import insert as pg_insert\n\nfrom aws_xray_sdk.core import xray_recorder, patch\nfrom aws_xray_sdk.core.context import Context\n\nimport testing.postgresql\n\n\n@pytest.fixture()\ndef postgres_db():\n    with testing.postgresql.Postgresql() as postgresql:\n        yield postgresql\n\n\n@pytest.fixture()\ndef db_url(postgres_db):\n    return postgres_db.url()\n\n\n@pytest.fixture()\ndef sanitized_db_url(postgres_db):\n    dsn = postgres_db.dsn()\n    return 'postgresql://{user}@{host}:{port}/{db}'.format(\n        user=dsn['user'],\n        host=dsn['host'],\n        port=dsn['port'],\n        db=dsn['database'],\n    )\n\n\ndef test_all(session, sanitized_db_url):\n    \"\"\" Test calling all() on get all records.\n    Verify we run the query and return the SQL as metdata\"\"\"\n    session.query(User).all()\n    assert len(xray_recorder.current_segment().subsegments) == 1\n    sql_meta = xray_recorder.current_segment().subsegments[0].sql\n    assert sql_meta['url'] == sanitized_db_url\n    assert sql_meta['sanitized_query'].startswith('SELECT')\n    assert sql_meta['sanitized_query'].endswith('FROM users')\n\n\ndef test_insert_on_conflict_renders(connection):\n    statement = pg_insert(User).values(name='John', fullname=\"John Doe\", password='123456')\n    statement = statement.on_conflict_do_nothing()\n\n    connection.execute(statement)\n\n    assert len(xray_recorder.current_segment().subsegments) == 1\n    sql_meta = xray_recorder.current_segment().subsegments[0].sql\n\n    assert sql_meta['sanitized_query'].startswith('INSERT INTO users')\n    assert 'ON CONFLICT DO NOTHING' in sql_meta['sanitized_query']\n"
  },
  {
    "path": "tests/ext/sqlalchemy_core/test_sqlalchemy_core.py",
    "content": "from .test_base import User, session, db_url, engine, connection\nfrom sqlalchemy.sql.expression import Insert, Delete\nfrom aws_xray_sdk.core import xray_recorder\n\ndef test_all(session):\n    \"\"\" Test calling all() on get all records.\n    Verify we run the query and return the SQL as metdata\"\"\"\n    session.query(User).all()\n    assert len(xray_recorder.current_segment().subsegments) == 1\n    sql_meta = xray_recorder.current_segment().subsegments[0].sql\n    assert sql_meta['url'] == 'sqlite:///:memory:'\n    assert sql_meta['sanitized_query'].startswith('SELECT')\n    assert sql_meta['sanitized_query'].endswith('FROM users')\n\n\ndef test_filter_first(session):\n    \"\"\" Test calling filter().first() on get first filtered records.\n    Verify we run the query and return the SQL as metdata\"\"\"\n    session.query(User).filter(User.password==\"mypassword!\").first()\n    assert len(xray_recorder.current_segment().subsegments) == 1\n    sql_meta = xray_recorder.current_segment().subsegments[0].sql\n    assert sql_meta['sanitized_query'].startswith('SELECT')\n    assert 'FROM users' in sql_meta['sanitized_query']\n    assert \"mypassword!\" not in sql_meta['sanitized_query']\n\n\ndef test_connection_add(connection):\n    password = \"123456\"\n    statement = Insert(User).values(name='John', fullname=\"John Doe\", password=password)\n    connection.execute(statement)\n    assert len(xray_recorder.current_segment().subsegments) == 1\n    sql_meta = xray_recorder.current_segment().subsegments[0].sql\n    assert sql_meta['sanitized_query'].startswith('INSERT INTO users')\n    assert sql_meta['url'] == 'sqlite:///:memory:'\n    assert password not in sql_meta['sanitized_query']\n\n\ndef test_connection_query(connection):\n    password = \"123456\"\n    statement = Delete(User).where(User.name == 'John').where(User.password == password)\n    connection.execute(statement)\n    assert len(xray_recorder.current_segment().subsegments) == 1\n    sql_meta = xray_recorder.current_segment().subsegments[0].sql\n    assert sql_meta['sanitized_query'].startswith('DELETE FROM users')\n    assert sql_meta['url'] == 'sqlite:///:memory:'\n    assert password not in sql_meta['sanitized_query']\n"
  },
  {
    "path": "tests/ext/sqlalchemy_core/test_sqlalchemy_core_2.py",
    "content": "from .test_base import User, session, db_url, engine, connection\nfrom sqlalchemy.sql.expression import select\nfrom aws_xray_sdk.core import xray_recorder\n\n# 2.0 style execution test. see https://docs.sqlalchemy.org/en/14/changelog/migration_14.html#orm-query-is-internally\n# -unified-with-select-update-delete-2-0-style-execution-available\ndef test_orm_style_select_execution(session):\n    statement = select(User).where(\n        User.name == 'John'\n    )\n    session.execute(statement)\n    assert len(xray_recorder.current_segment().subsegments) == 1\n    sql_meta = xray_recorder.current_segment().subsegments[0].sql\n    assert sql_meta['sanitized_query'].startswith('SELECT')\n    assert 'FROM users' in sql_meta['sanitized_query']\n"
  },
  {
    "path": "tests/ext/sqlite3/__init__.py",
    "content": ""
  },
  {
    "path": "tests/ext/sqlite3/test_sqlite3.py",
    "content": "import sqlite3\n\nimport pytest\n\nfrom aws_xray_sdk.core import patch\nfrom aws_xray_sdk.core import xray_recorder\nfrom aws_xray_sdk.core.context import Context\n\n\n@pytest.fixture(scope=\"module\")\ndef db():\n    patch(('sqlite3',))\n    return sqlite3.connect(\":memory:\")\n\n\n@pytest.fixture(autouse=True)\ndef construct_ctx():\n    \"\"\"\n    Clean up context storage on each test run and begin a segment\n    so that later subsegment can be attached. After each test run\n    it cleans up context storage again.\n    \"\"\"\n    xray_recorder.configure(service='test', sampling=False, context=Context())\n    xray_recorder.clear_trace_entities()\n    xray_recorder.begin_segment('name')\n    yield\n    xray_recorder.clear_trace_entities()\n\n\ndef test_execute(db):\n\n    q = 'SELECT name FROM sqlite_master'\n    db.execute(q)\n\n    subsegment = xray_recorder.current_segment().subsegments[0]\n    assert subsegment.name == ':memory:'\n    sql = subsegment.sql\n    assert sql['database_type'] == 'sqlite3'\n    assert sql['database_version']\n\n\ndef test_invalid_syntax(db):\n    q = 'some_query'\n    try:\n        db.execute(q)\n    except Exception:\n        pass\n\n    subsegment = xray_recorder.current_segment().subsegments[0]\n    assert subsegment.name == ':memory:'\n    sql = subsegment.sql\n    assert sql['database_type'] == 'sqlite3'\n    assert sql['database_version']\n\n    exception = subsegment.cause['exceptions'][0]\n    assert exception.type == 'OperationalError'\n"
  },
  {
    "path": "tests/mock_module/__init__.py",
    "content": "def mock_init():\n    pass\n"
  },
  {
    "path": "tests/mock_module/mock_file.py",
    "content": "def mock_func():\n    pass\n"
  },
  {
    "path": "tests/mock_module/mock_submodule/__init__.py",
    "content": "def mock_subinit():\n    pass\n"
  },
  {
    "path": "tests/mock_module/mock_submodule/mock_subfile.py",
    "content": "from aws_xray_sdk.core import xray_recorder\n\n\ndef mock_subfunc():\n    pass\n\n\n@xray_recorder.capture()\ndef mock_no_doublepatch():\n    pass\n\n\nclass MockClass:\n    def __init__(self):\n        pass\n\n    def mock_method(self):\n        pass\n\n    @classmethod\n    def mock_classmethod(cls):\n        # Should not be automatically patched\n        pass\n\n    @staticmethod\n    def mock_staticmethod():\n        pass\n\n\nclass MockSubclass(MockClass):\n    def __init__(self):\n        super().__init__()\n\n    def mock_submethod(self):\n        pass\n"
  },
  {
    "path": "tests/mock_sampling_rule.json",
    "content": "{\n    \"version\": 2,\n    \"default\": {\n      \"fixed_target\": 1,\n      \"rate\": 0.05\n    },\n    \"rules\": [\n    ]\n  }"
  },
  {
    "path": "tests/test_async_local_storage.py",
    "content": "import asyncio\nimport random\nimport sys\n\nfrom aws_xray_sdk.core.async_context import TaskLocalStorage\n\n\ndef test_localstorage_isolation(event_loop):\n    local_storage = TaskLocalStorage(loop=event_loop)\n\n    async def _test():\n        \"\"\"\n        Compute a random number\n        Store it in task local storage\n        Suspend task so another can run\n        Retrieve random number from task local storage\n        Compare that to the local variable\n        \"\"\"\n        try:\n            random_int = random.random()\n            local_storage.randint = random_int\n\n            if sys.version_info >= (3, 8):\n                await asyncio.sleep(0.0)\n            else:\n                await asyncio.sleep(0.0, loop=event_loop)\n\n            current_random_int = local_storage.randint\n            assert random_int == current_random_int\n\n            return True\n        except:\n            return False\n\n    # Run loads of concurrent tasks\n    if sys.version_info >= (3, 8):\n        results = event_loop.run_until_complete(\n            asyncio.wait([event_loop.create_task(_test()) for _ in range(0, 100)])\n        )\n    else:\n        results = event_loop.run_until_complete(\n            asyncio.wait(\n                [event_loop.create_task(_test()) for _ in range(0, 100)],\n                loop=event_loop,\n            )\n        )\n    results = [item.result() for item in results[0]]\n\n    # Double check all is good\n    assert all(results)\n"
  },
  {
    "path": "tests/test_async_recorder.py",
    "content": "import platform\n\nfrom .util import get_new_stubbed_recorder\nfrom aws_xray_sdk.version import VERSION\nfrom aws_xray_sdk.core.async_context import AsyncContext\nimport asyncio\n\n\nxray_recorder = get_new_stubbed_recorder()\n\n\n@xray_recorder.capture_async('test_2')\nasync def async_method2():\n    pass\n\n\n@xray_recorder.capture_async('test_1')\nasync def async_method():\n    await async_method2()\n\n\nasync def test_capture(event_loop):\n    xray_recorder.configure(\n        service='test', sampling=False, context=AsyncContext(loop=event_loop)\n    )\n\n    segment = xray_recorder.begin_segment('name')\n\n    await async_method()\n\n    # Check subsegment is created from async_method\n    assert len(segment.subsegments) == 1\n    assert segment.subsegments[0].name == 'test_1'\n\n    # Check nested subsegment is created from async_method2\n    subsegment = segment.subsegments[0]\n    assert len(subsegment.subsegments) == 1\n    assert subsegment.subsegments[0].name == 'test_2'\n\n    # Check runtime context is correctly attached\n    xray_meta = segment.aws.get('xray')\n    assert 'X-Ray for Python' == xray_meta.get('sdk')\n    assert VERSION == xray_meta.get('sdk_version')\n\n    service = segment.service\n    assert platform.python_implementation() == service.get('runtime')\n    assert platform.python_version() == service.get('runtime_version')\n\nasync def test_concurrent_calls(event_loop):\n    xray_recorder.configure(\n        service='test', sampling=False, context=AsyncContext(loop=event_loop)\n    )\n    async with xray_recorder.in_segment_async('segment') as segment:\n        global counter\n        counter = 0\n        total_tasks = 10\n        flag = asyncio.Event()\n        async def assert_task():\n            async with xray_recorder.in_subsegment_async('segment') as subsegment:\n                global counter\n                counter += 1\n                # Begin all subsegments before closing any to ensure they overlap\n                if counter < total_tasks:\n                    await flag.wait()\n                else:\n                    flag.set()\n                return subsegment.parent_id\n        tasks = [assert_task() for task in range(total_tasks)]\n        subsegs_parent_ids = await asyncio.gather(*tasks)\n        for subseg_parent_id in subsegs_parent_ids:\n            assert subseg_parent_id == segment.id\n\n\nasync def test_async_context_managers(event_loop):\n    xray_recorder.configure(\n        service='test', sampling=False, context=AsyncContext(loop=event_loop)\n    )\n\n    async with xray_recorder.in_segment_async('segment') as segment:\n        async with xray_recorder.capture_async('aio_capture') as subsegment:\n            assert segment.subsegments[0].name == 'aio_capture'\n        assert subsegment.in_progress is  False\n        async with xray_recorder.in_subsegment_async('in_sub') as subsegment:\n            assert segment.subsegments[1].name == 'in_sub'\n            assert subsegment.in_progress is  True\n        assert subsegment.in_progress is  False\n"
  },
  {
    "path": "tests/test_daemon_config.py",
    "content": "import pytest\n\nfrom aws_xray_sdk.core.daemon_config import DaemonConfig\nfrom aws_xray_sdk.core.exceptions.exceptions import InvalidDaemonAddressException\n\n\nDEFAULT_IP = '127.0.0.1'\nDEFAULT_PORT = 2000\n\n\ndef test_default_config():\n    config = DaemonConfig()\n\n    assert config.udp_ip == DEFAULT_IP\n    assert config.tcp_ip == DEFAULT_IP\n    assert config.udp_port == 2000\n    assert config.tcp_port == 2000\n\n\ndef test_single_address():\n\n    config = DaemonConfig('192.168.0.1:3000')\n\n    assert config.udp_ip == '192.168.0.1'\n    assert config.tcp_ip == '192.168.0.1'\n    assert config.udp_port == 3000\n    assert config.tcp_port == 3000\n\n\ndef test_set_tcp_udp_separately():\n\n    config = DaemonConfig('tcp:192.168.0.1:3000 udp:127.0.0.2:8080')\n\n    assert config.udp_ip == '127.0.0.2'\n    assert config.tcp_ip == '192.168.0.1'\n    assert config.udp_port == 8080\n    assert config.tcp_port == 3000\n\n    # order can be reversed\n    config = DaemonConfig('udp:127.0.0.2:8080 tcp:192.168.0.1:3000')\n\n    assert config.udp_ip == '127.0.0.2'\n    assert config.tcp_ip == '192.168.0.1'\n    assert config.udp_port == 8080\n    assert config.tcp_port == 3000\n\n\ndef test_invalid_address():\n    with pytest.raises(InvalidDaemonAddressException):\n        DaemonConfig('192.168.0.1')\n\n    with pytest.raises(InvalidDaemonAddressException):\n        DaemonConfig('tcp:192.168.0.1:3000')\n\n    with pytest.raises(InvalidDaemonAddressException):\n        DaemonConfig('127.0.0.2:8080 192.168.0.1:3000')\n\n    with pytest.raises(InvalidDaemonAddressException):\n        DaemonConfig('udp:127.0.0.2:8080 192.168.0.1:3000')\n"
  },
  {
    "path": "tests/test_dummy_entites.py",
    "content": "from aws_xray_sdk.core.models.dummy_entities import DummySegment, DummySubsegment\nfrom aws_xray_sdk.core.models import http\n\n\ndef test_not_sampled():\n    segment = DummySegment()\n    subsegment = DummySubsegment(segment)\n\n    assert not segment.sampled\n    assert not subsegment.sampled\n\n\ndef test_no_ops():\n    segment = DummySegment()\n    segment.put_metadata('key', 'value')\n    segment.put_annotation('key', 'value')\n    segment.put_http_meta(http.URL, 'url')\n    segment.set_user('user')\n\n    assert not segment.metadata\n    assert not segment.annotations\n    assert not segment.http\n    assert not segment.user\n\n    subsegment = DummySubsegment(segment)\n    subsegment.put_metadata('key', 'value')\n    subsegment.put_annotation('key', 'value')\n    subsegment.put_http_meta(http.URL, 'url')\n    subsegment.set_aws({'key': 'value'})\n    subsegment.set_sql({'key': 'value'})\n\n    assert not subsegment.metadata\n    assert not subsegment.annotations\n    assert not subsegment.http\n    assert not subsegment.aws\n    assert not subsegment.sql\n\n    assert not segment.serialize()\n    assert not subsegment.serialize()\n\n\ndef test_structure_intact():\n    segment = DummySegment()\n    subsegment = DummySubsegment(segment)\n    subsegment2 = DummySubsegment(segment)\n    subsegment.add_subsegment(subsegment2)\n    segment.add_subsegment(subsegment)\n\n    assert segment.subsegments[0] is subsegment\n    assert subsegment.subsegments[0] is subsegment2\n\n    subsegment2.close()\n    subsegment.close()\n    segment.close()\n    assert segment.ready_to_send()\n\n\ndef test_invalid_entity_name():\n    segment = DummySegment('DummySegment() Test?')\n    subsegment = DummySubsegment(segment, 'Dummy*Sub!segment$')\n\n    assert segment.name == 'DummySegment Test'\n    assert subsegment.name == 'DummySubsegment'\n\n\ndef test_dummy_segment_trace_id():\n    segment = DummySegment()\n\n    assert segment.trace_id != 'dummy'\n    assert '-' in segment.trace_id\n    # checking version of trace id\n    assert segment.trace_id[:1] == '1'\n"
  },
  {
    "path": "tests/test_facade_segment.py",
    "content": "import pytest\n\nfrom aws_xray_sdk.core.models.facade_segment import FacadeSegment\nfrom aws_xray_sdk.core.models.subsegment import Subsegment\nfrom aws_xray_sdk.core.exceptions.exceptions import FacadeSegmentMutationException\nfrom aws_xray_sdk.core.models import http\n\n\ndef test_not_ready():\n\n    segment = FacadeSegment('name', 'id', 'id', True)\n    segment.in_progress = False\n    assert not segment.ready_to_send()\n\n\ndef test_initializing():\n\n    segment = FacadeSegment('name', 'id', 'id', False)\n    assert not segment.initializing\n\n    segment2 = FacadeSegment('name', None, 'id', True)\n    assert segment2.initializing\n\n\ndef test_unsupported_operations():\n\n    segment = FacadeSegment('name', 'id', 'id', False)\n\n    with pytest.raises(FacadeSegmentMutationException):\n        segment.put_annotation('key', 'value')\n\n    with pytest.raises(FacadeSegmentMutationException):\n        segment.put_metadata('key', 'value')\n\n    with pytest.raises(FacadeSegmentMutationException):\n        segment.set_user('user')\n\n    with pytest.raises(FacadeSegmentMutationException):\n        segment.close()\n\n    with pytest.raises(FacadeSegmentMutationException):\n        segment.serialize()\n\n    with pytest.raises(FacadeSegmentMutationException):\n        segment.put_http_meta(http.URL, 'value')\n\n\ndef test_structure_intact():\n\n    segment = FacadeSegment('name', 'id', 'id', True)\n    subsegment = Subsegment('name', 'local', segment)\n    subsegment2 = Subsegment('name', 'local', segment)\n    segment.add_subsegment(subsegment)\n    subsegment.add_subsegment(subsegment2)\n\n    assert segment.subsegments[0] is subsegment\n    assert subsegment.subsegments[0] is subsegment2\n\ndef test_adding_unsampled_subsegment():\n\n    segment = FacadeSegment('name', 'id', 'id', True)\n    subsegment = Subsegment('sampled', 'local', segment)\n    subsegment2 = Subsegment('unsampled', 'local', segment)\n    subsegment2.sampled = False\n\n    segment.add_subsegment(subsegment)\n    subsegment.add_subsegment(subsegment2)\n\n\n    assert segment.subsegments[0] is subsegment\n    assert subsegment.subsegments[0] is subsegment2\n    assert subsegment2.sampled == False\n"
  },
  {
    "path": "tests/test_lambda_context.py",
    "content": "import os\n\nfrom aws_xray_sdk import global_sdk_config\nimport pytest\nfrom aws_xray_sdk.core import lambda_launcher\nfrom aws_xray_sdk.core.models.dummy_entities import DummySegment\nfrom aws_xray_sdk.core.models.subsegment import Subsegment\n\n\nTRACE_ID = '1-5759e988-bd862e3fe1be46a994272793'\nPARENT_ID = '53995c3f42cd8ad8'\nDATA = 'Foo=Bar'\nHEADER_VAR = \"Root=%s;Parent=%s;Sampled=1;%s\" % (TRACE_ID, PARENT_ID, DATA)\n\nos.environ[lambda_launcher.LAMBDA_TRACE_HEADER_KEY] = HEADER_VAR\ncontext = lambda_launcher.LambdaContext()\n\n\n@pytest.fixture(autouse=True)\ndef setup():\n    yield\n    global_sdk_config.set_sdk_enabled(True)\n\n\ndef test_facade_segment_generation():\n\n    segment = context.get_trace_entity()\n    assert segment.id == PARENT_ID\n    assert segment.trace_id == TRACE_ID\n    assert segment.sampled\n    assert DATA in segment.get_origin_trace_header().to_header_str()\n\n\ndef test_put_subsegment():\n\n    segment = context.get_trace_entity()\n    subsegment = Subsegment('name', 'local', segment)\n    context.put_subsegment(subsegment)\n    assert context.get_trace_entity().id == subsegment.id\n\n    subsegment2 = Subsegment('name', 'local', segment)\n    context.put_subsegment(subsegment2)\n    assert context.get_trace_entity().id == subsegment2.id\n\n    assert subsegment.subsegments[0] is subsegment2\n    assert subsegment2.parent_id == subsegment.id\n    assert subsegment.parent_id == segment.id\n    assert subsegment2.parent_segment is segment\n    assert DATA in subsegment2.parent_segment.get_origin_trace_header().to_header_str()\n\n    context.end_subsegment()\n    assert context.get_trace_entity().id == subsegment.id\n\n    context.end_subsegment()\n    assert context.get_trace_entity().id == segment.id\n\n\ndef test_disable():\n    context.clear_trace_entities()\n    segment = context.get_trace_entity()\n    assert segment.sampled\n\n    context.clear_trace_entities()\n    global_sdk_config.set_sdk_enabled(False)\n    segment = context.get_trace_entity()\n    assert not segment.sampled\n    assert DATA in segment.get_origin_trace_header().to_header_str()\n\n\ndef test_non_initialized():\n    # Context that hasn't been initialized by lambda container should not add subsegments to the dummy segment.\n    temp_header_var = os.environ[lambda_launcher.LAMBDA_TRACE_HEADER_KEY]\n    del os.environ[lambda_launcher.LAMBDA_TRACE_HEADER_KEY]\n\n    temp_context = lambda_launcher.LambdaContext()\n    dummy_segment = temp_context.get_trace_entity()\n    subsegment = Subsegment(\"TestSubsegment\", \"local\", dummy_segment)\n    temp_context.put_subsegment(subsegment)\n\n    assert temp_context.get_trace_entity() == dummy_segment\n\n    # \"Lambda\" container added metadata now. Should see subsegment now.\n    # The following put_segment call will overwrite the dummy segment in the context with an intialized facade segment that accepts a subsegment.\n    os.environ[lambda_launcher.LAMBDA_TRACE_HEADER_KEY] = temp_header_var\n    temp_context.put_subsegment(subsegment)\n\n    assert temp_context.get_trace_entity() == subsegment\n\ndef test_lambda_passthrough():\n    # Hold previous environment value\n    temp_header_var = os.environ[lambda_launcher.LAMBDA_TRACE_HEADER_KEY]\n    del os.environ[lambda_launcher.LAMBDA_TRACE_HEADER_KEY]\n\n    # Set header to lambda passthrough style header\n    os.environ[lambda_launcher.LAMBDA_TRACE_HEADER_KEY] = \"Root=%s;Lineage=10:1234abcd:3\" % TRACE_ID\n\n    temp_context = lambda_launcher.LambdaContext()\n    dummy_segment = temp_context.get_trace_entity()\n    subsegment = Subsegment(\"TestSubsegment\", \"local\", dummy_segment)\n    temp_context.put_subsegment(subsegment)\n\n    # Resulting entity is not the same dummy segment, so simply check that it is a dummy segment\n    assert isinstance(temp_context.get_trace_entity(), DummySegment)\n\n    # Reset header value and ensure behaviour returns to normal\n    del os.environ[lambda_launcher.LAMBDA_TRACE_HEADER_KEY]\n    os.environ[lambda_launcher.LAMBDA_TRACE_HEADER_KEY] = temp_header_var\n    temp_context.put_subsegment(subsegment)\n\n    assert temp_context.get_trace_entity() == subsegment\n\n\n\ndef test_set_trace_entity():\n    segment = context.get_trace_entity()\n    subsegment = Subsegment('name', 'local', segment)\n\n    context. clear_trace_entities()\n\n    # should set the parent segment in thread local\n    context.set_trace_entity(subsegment)\n    tl = context._local\n    assert tl.__getattribute__('segment') == segment\n    assert context.get_trace_entity() == subsegment\n\n    context.clear_trace_entities()\n\n    # should set the segment in thread local\n    context.set_trace_entity(segment)\n    tl = context._local\n    assert tl.__getattribute__('segment') == segment\n    assert context.get_trace_entity() == segment\n"
  },
  {
    "path": "tests/test_local_sampling.py",
    "content": "import copy\nimport pytest\n\nfrom aws_xray_sdk.core.sampling.local.sampling_rule import SamplingRule\nfrom aws_xray_sdk.core.sampling.local.sampler import LocalSampler\nfrom aws_xray_sdk.core.exceptions.exceptions import InvalidSamplingManifestError\n\n\nRULE = {\"description\": \"Player moves.\",\n        \"host\": \"*\",\n        \"http_method\": \"*\",\n        \"url_path\": \"/api/move/*\",\n        \"fixed_target\": 0,\n        \"rate\": 0.05\n        }\n\n\nRULE_MANIFEST = {\n    \"version\": 2,\n    \"rules\": [{\n        \"description\": \"Player moves.\",\n        \"host\": \"*\",\n        \"http_method\": \"*\",\n        \"url_path\": \"/api/move/*\",\n        \"fixed_target\": 0,\n        \"rate\": 0\n    }],\n    \"default\": {\n        \"fixed_target\": 1,\n        \"rate\": 1\n    }\n}\n\n\ndef test_should_trace():\n\n    sampler = LocalSampler(RULE_MANIFEST)\n    assert sampler.should_trace({'method': 'GET', 'path': '/view'})\n    assert not sampler.should_trace({'host': 'name', 'method': 'method',\n                                    'path': '/api/move/left'})\n\n\ndef test_missing_version_num():\n\n    rule = copy.deepcopy(RULE_MANIFEST)\n    del rule['version']\n    with pytest.raises(InvalidSamplingManifestError):\n        LocalSampler(rule)\n\n\ndef test_default_matching():\n    sampler = LocalSampler(RULE_MANIFEST)\n    assert sampler.should_trace()\n\n\ndef test_path_matching():\n\n    rule = SamplingRule(RULE)\n    assert rule.applies('name', 'GET', '/api/move/up')\n    assert rule.applies(None, 'POST', '/api/move/up')\n    assert rule.applies('name', None, '/api/move/up')\n    assert rule.applies('name', 'PUT', None)\n    assert not rule.applies(None, 'GET', '/root')\n\n\ndef test_negative_rate():\n\n    rule = copy.deepcopy(RULE)\n    rule['rate'] = -1\n    with pytest.raises(InvalidSamplingManifestError):\n        SamplingRule(rule)\n\n\ndef test_negative_fixed_target():\n\n    rule = copy.deepcopy(RULE)\n    rule['fixed_target'] = -1\n    with pytest.raises(InvalidSamplingManifestError):\n        SamplingRule(rule)\n\n\ndef test_invalid_default():\n\n    with pytest.raises(InvalidSamplingManifestError):\n        SamplingRule(RULE, 2, default=True)\n\n\ndef test_incomplete_path_rule():\n\n    rule = copy.deepcopy(RULE)\n    del rule['url_path']\n    with pytest.raises(InvalidSamplingManifestError):\n        SamplingRule(rule)\n"
  },
  {
    "path": "tests/test_local_sampling_benchmark.py",
    "content": "import json\nimport pkgutil\nfrom pathlib import Path\n\n# Faster\ndef test_pkgutil_static_read(benchmark):\n    def get_sampling_rule():\n        return json.loads(pkgutil.get_data(__name__, 'mock_sampling_rule.json').decode('utf-8'))\n    benchmark(get_sampling_rule)\n\n# Slower\ndef test_pathlib_static_read(benchmark):\n    def get_sampling_rule():\n        with open(Path(__file__).parent / 'mock_sampling_rule.json') as f:\n            return json.load(f)\n    benchmark(get_sampling_rule)\n"
  },
  {
    "path": "tests/test_patcher.py",
    "content": "import inspect\nimport pytest\nimport sys\nimport wrapt\ntry:\n    # Python versions >= 3.4\n    from importlib import reload\nexcept ImportError:\n    # Python versions 3 <= x < 3.4 have reload in the imp module\n    try:\n        from imp import reload\n    except ImportError:\n        # Python versions < 3 have reload built-in\n        pass\n\nfrom aws_xray_sdk import global_sdk_config\nfrom aws_xray_sdk.core import patcher, xray_recorder\nfrom aws_xray_sdk.core.context import Context\n\n\nTEST_MODULES = (\n    'tests.mock_module',\n    'tests.mock_module.mock_file',\n    'tests.mock_module.mock_submodule',\n    'tests.mock_module.mock_submodule.mock_subfile',\n)\n\n\n@pytest.fixture(autouse=True)\ndef construct_ctx():\n    \"\"\"\n    Clean up context storage on each test run and begin a segment\n    so that later subsegment can be attached. After each test run\n    it cleans up context storage again.\n    \"\"\"\n    pre_run_modules = set(module for module in sys.modules.keys())\n\n    xray_recorder.configure(service='test', sampling=False, context=Context())\n    xray_recorder.clear_trace_entities()\n    xray_recorder.begin_segment('name')\n    yield\n    xray_recorder.end_segment()\n    xray_recorder.clear_trace_entities()\n    global_sdk_config.set_sdk_enabled(True)\n\n    # Reload wrapt.importer references to modules to start off clean\n    reload(wrapt)\n    reload(wrapt.importer)\n    # Reload patcher references to already patched modules\n    reload(patcher)\n    # Cleanup the already imported module references in the system\n    for module_name, module in sorted(sys.modules.items(), key=lambda m: len(m[0]), reverse=True):\n        if module_name not in pre_run_modules and inspect.ismodule(module):\n            reload(module)\n\n    for module_name in sorted(sys.modules.keys(), key=lambda m: len(m), reverse=True):\n        if module_name not in pre_run_modules:\n            del sys.modules[module_name]\n\n\ndef _call_all_mock_functions():\n    from .mock_module import mock_file, mock_init\n    from .mock_module.mock_submodule import mock_subfile, mock_subinit\n\n    mock_init()\n    mock_subinit()\n    mock_file.mock_func()\n    mock_subfile.mock_subfunc()\n    mock_subfile.mock_no_doublepatch()\n    mock_subfile.MockClass.mock_classmethod()\n    mock_subfile.MockClass.mock_staticmethod()\n    mock_subfile.MockClass().mock_method()\n    mock_subfile.MockSubclass().mock_submethod()\n\n\n@pytest.mark.parametrize('modules', [\n    ('nonexisting.module',),\n    ('psycopg2', 'nonexisting.module',),\n    ('nonexisting.module', 'psycopg2',),\n])\ndef test_incorrect_import_fails(modules):\n    with pytest.raises(Exception) as e:\n        patcher.patch(modules)\n    assert str(e.value) == 'modules nonexisting.module are currently not supported for patching'\n\n\ndef test_external_file():\n    patcher.patch(['tests.mock_module.mock_file'])\n    assert len(xray_recorder.current_segment().subsegments) == 0\n    # We want to make sure patching does not load any of the patched modules\n    imported_modules = [module for module in TEST_MODULES if module in sys.modules]\n    assert not imported_modules\n\n    _call_all_mock_functions()\n\n    assert len(xray_recorder.current_segment().subsegments) == 2\n    assert xray_recorder.current_segment().subsegments[0].name == 'mock_func'\n    assert xray_recorder.current_segment().subsegments[1].name == 'mock_no_doublepatch'  # It is patched with decorator\n\n\ndef test_external_module():\n    patcher.patch(['tests.mock_module.mock_submodule'])\n    assert len(xray_recorder.current_segment().subsegments) == 0\n    # We want to make sure patching does not load any of the patched modules\n    imported_modules = [module for module in TEST_MODULES if module in sys.modules]\n    assert not imported_modules\n\n    _call_all_mock_functions()\n\n    assert len(xray_recorder.current_segment().subsegments) == 8\n    assert xray_recorder.current_segment().subsegments[0].name == 'mock_subinit'\n    assert xray_recorder.current_segment().subsegments[1].name == 'mock_subfunc'\n    assert xray_recorder.current_segment().subsegments[2].name == 'mock_no_doublepatch'  # Should appear only once\n    assert xray_recorder.current_segment().subsegments[3].name == 'mock_staticmethod'\n    assert xray_recorder.current_segment().subsegments[4].name == 'MockClass.__init__'\n    assert xray_recorder.current_segment().subsegments[5].name == 'mock_method'\n    assert xray_recorder.current_segment().subsegments[6].name == 'MockSubclass.__init__'\n    assert xray_recorder.current_segment().subsegments[7].name == 'mock_submethod'\n\n\ndef test_external_submodules_full():\n    patcher.patch(['tests.mock_module'])\n    assert len(xray_recorder.current_segment().subsegments) == 0\n    # We want to make sure patching does not load any of the patched modules\n    imported_modules = [module for module in TEST_MODULES if module in sys.modules]\n    assert not imported_modules\n\n    _call_all_mock_functions()\n\n    assert len(xray_recorder.current_segment().subsegments) == 10\n    assert xray_recorder.current_segment().subsegments[0].name == 'mock_init'\n    assert xray_recorder.current_segment().subsegments[1].name == 'mock_subinit'\n    assert xray_recorder.current_segment().subsegments[2].name == 'mock_func'\n    assert xray_recorder.current_segment().subsegments[3].name == 'mock_subfunc'\n    assert xray_recorder.current_segment().subsegments[4].name == 'mock_no_doublepatch'\n    assert xray_recorder.current_segment().subsegments[5].name == 'mock_staticmethod'\n    assert xray_recorder.current_segment().subsegments[6].name == 'MockClass.__init__'\n    assert xray_recorder.current_segment().subsegments[7].name == 'mock_method'\n    assert xray_recorder.current_segment().subsegments[8].name == 'MockSubclass.__init__'\n    assert xray_recorder.current_segment().subsegments[9].name == 'mock_submethod'\n\n\ndef test_external_submodules_ignores_file():\n    patcher.patch(['tests.mock_module'], ignore_module_patterns=['tests.mock_module.mock_file'])\n    assert len(xray_recorder.current_segment().subsegments) == 0\n    # We want to make sure patching does not load any of the patched modules\n    imported_modules = [module for module in TEST_MODULES if module in sys.modules]\n    assert not imported_modules\n\n    _call_all_mock_functions()\n\n    assert len(xray_recorder.current_segment().subsegments) == 9\n    assert xray_recorder.current_segment().subsegments[0].name == 'mock_init'\n    assert xray_recorder.current_segment().subsegments[1].name == 'mock_subinit'\n    assert xray_recorder.current_segment().subsegments[2].name == 'mock_subfunc'\n    assert xray_recorder.current_segment().subsegments[3].name == 'mock_no_doublepatch'\n    assert xray_recorder.current_segment().subsegments[4].name == 'mock_staticmethod'\n    assert xray_recorder.current_segment().subsegments[5].name == 'MockClass.__init__'\n    assert xray_recorder.current_segment().subsegments[6].name == 'mock_method'\n    assert xray_recorder.current_segment().subsegments[7].name == 'MockSubclass.__init__'\n    assert xray_recorder.current_segment().subsegments[8].name == 'mock_submethod'\n\n\ndef test_external_submodules_ignores_module():\n    patcher.patch(['tests.mock_module'], ignore_module_patterns=['tests.mock_module.mock_submodule'])\n    assert len(xray_recorder.current_segment().subsegments) == 0\n    # We want to make sure patching does not load any of the patched modules\n    imported_modules = [module for module in TEST_MODULES if module in sys.modules]\n    assert not imported_modules\n\n    _call_all_mock_functions()\n\n    assert len(xray_recorder.current_segment().subsegments) == 3\n    assert xray_recorder.current_segment().subsegments[0].name == 'mock_init'\n    assert xray_recorder.current_segment().subsegments[1].name == 'mock_func'\n    assert xray_recorder.current_segment().subsegments[2].name == 'mock_no_doublepatch'  # It is patched with decorator\n\n\ndef test_disable_sdk_disables_patching():\n    global_sdk_config.set_sdk_enabled(False)\n    patcher.patch(['tests.mock_module'])\n    imported_modules = [module for module in TEST_MODULES if module in sys.modules]\n    assert not imported_modules\n    assert len(xray_recorder.current_segment().subsegments) == 0\n"
  },
  {
    "path": "tests/test_plugins.py",
    "content": "from unittest.mock import patch\n\nfrom aws_xray_sdk.core.plugins.utils import get_plugin_modules\n\nsupported_plugins = (\n    'ec2_plugin',\n    'ecs_plugin',\n    'elasticbeanstalk_plugin',\n)\n\n\ndef test_runtime_context_available():\n    plugins = get_plugin_modules(supported_plugins)\n\n    for plugin in plugins:\n        plugin.initialize()\n        assert hasattr(plugin, 'runtime_context')\n\n\n@patch('aws_xray_sdk.core.plugins.ec2_plugin.do_request')\ndef test_ec2_plugin_imdsv2_success(mock_do_request):\n    v2_json_str = \"{\\\"availabilityZone\\\" : \\\"us-east-2a\\\", \\\"imageId\\\" : \\\"ami-03cca83dd001d4666\\\",\" \\\n               \" \\\"instanceId\\\" : \\\"i-07a181803de94c666\\\", \\\"instanceType\\\" : \\\"t3.xlarge\\\"}\"\n\n    mock_do_request.side_effect = ['token', v2_json_str]\n\n    ec2_plugin = get_plugin_modules(('ec2_plugin',))[0]\n    ec2_plugin.initialize()\n    assert hasattr(ec2_plugin, 'runtime_context')\n    r_c = getattr(ec2_plugin, 'runtime_context')\n    assert r_c['instance_id'] == 'i-07a181803de94c666'\n    assert r_c['availability_zone'] == 'us-east-2a'\n    assert r_c['instance_type'] == 't3.xlarge'\n    assert r_c['ami_id'] == 'ami-03cca83dd001d4666'\n\n\n@patch('aws_xray_sdk.core.plugins.ec2_plugin.do_request')\ndef test_ec2_plugin_v2_fail_v1_success(mock_do_request):\n    v1_json_str = \"{\\\"availabilityZone\\\" : \\\"cn-north-1a\\\", \\\"imageId\\\" : \\\"ami-03cca83dd001d4111\\\",\" \\\n                  \" \\\"instanceId\\\" : \\\"i-07a181803de94c111\\\", \\\"instanceType\\\" : \\\"t2.xlarge\\\"}\"\n\n    mock_do_request.side_effect = [Exception(\"Boom!\"), v1_json_str]\n\n    ec2_plugin = get_plugin_modules(('ec2_plugin',))[0]\n    ec2_plugin.initialize()\n    assert hasattr(ec2_plugin, 'runtime_context')\n    r_c = getattr(ec2_plugin, 'runtime_context')\n    assert r_c['instance_id'] == 'i-07a181803de94c111'\n    assert r_c['availability_zone'] == 'cn-north-1a'\n    assert r_c['instance_type'] == 't2.xlarge'\n    assert r_c['ami_id'] == 'ami-03cca83dd001d4111'\n\n\n@patch('aws_xray_sdk.core.plugins.ec2_plugin.do_request')\ndef test_ec2_plugin_v2_fail_v1_fail(mock_do_request):\n    mock_do_request.side_effect = [Exception(\"Boom v2!\"), Exception(\"Boom v1!\")]\n\n    ec2_plugin = get_plugin_modules(('ec2_plugin',))[0]\n    ec2_plugin.initialize()\n    assert hasattr(ec2_plugin, 'runtime_context')\n    r_c = getattr(ec2_plugin, 'runtime_context')\n    assert r_c == {}\n"
  },
  {
    "path": "tests/test_recorder.py",
    "content": "import platform\nimport time\n\nimport pytest\n\nfrom aws_xray_sdk.core.sampling.sampling_rule import SamplingRule\nfrom aws_xray_sdk.core.sampling.rule_cache import RuleCache\nfrom aws_xray_sdk.core.sampling.sampler import DefaultSampler\nfrom aws_xray_sdk.version import VERSION\nfrom .util import get_new_stubbed_recorder\n\nfrom aws_xray_sdk import global_sdk_config\nfrom aws_xray_sdk.core.models.segment import Segment\nfrom aws_xray_sdk.core.models.subsegment import Subsegment\nfrom aws_xray_sdk.core.models.dummy_entities import DummySegment, DummySubsegment\nfrom aws_xray_sdk.core.exceptions.exceptions import SegmentNotFoundException\n\nxray_recorder = get_new_stubbed_recorder()\n\n\n@pytest.fixture(autouse=True)\ndef construct_ctx(monkeypatch):\n    \"\"\"\n    Clean up context storage before and after each test run.\n    \"\"\"\n    monkeypatch.delattr(\"botocore.session.Session.get_credentials\")\n    xray_recorder.configure(sampling=False)\n    xray_recorder.clear_trace_entities()\n    yield\n    xray_recorder.clear_trace_entities()\n    global_sdk_config.set_sdk_enabled(True)\n\n\ndef test_default_runtime_context():\n    segment = xray_recorder.begin_segment('name')\n    xray_meta = segment.aws.get('xray')\n    assert 'X-Ray for Python' == xray_meta.get('sdk')\n    assert VERSION == xray_meta.get('sdk_version')\n\n    service = segment.service\n    assert platform.python_implementation() == service.get('runtime')\n    assert platform.python_version() == service.get('runtime_version')\n\n\ndef test_subsegment_parenting():\n    segment = xray_recorder.begin_segment('name')\n    subsegment = xray_recorder.begin_subsegment('name')\n    xray_recorder.end_subsegment('name')\n    assert xray_recorder.get_trace_entity() is segment\n\n    subsegment1 = xray_recorder.begin_subsegment('name1')\n    subsegment2 = xray_recorder.begin_subsegment('name2')\n\n    assert subsegment2.parent_id == subsegment1.id\n    assert subsegment1.parent_id == segment.id\n    assert subsegment.parent_id == xray_recorder.current_segment().id\n\n    xray_recorder.end_subsegment()\n    assert not subsegment2.in_progress\n    assert subsegment1.in_progress\n    assert xray_recorder.current_subsegment().id == subsegment1.id\n\n    xray_recorder.end_subsegment()\n    assert not subsegment1.in_progress\n    assert xray_recorder.get_trace_entity() is segment\n\n\ndef test_subsegments_streaming():\n    xray_recorder.configure(streaming_threshold=10)\n    segment = xray_recorder.begin_segment('name')\n    for i in range(0, 11):\n        xray_recorder.begin_subsegment(name=str(i))\n    for i in range(0, 1):\n        # subsegment '10' will be streamed out upon close\n        xray_recorder.end_subsegment()\n\n    assert segment.get_total_subsegments_size() == 10\n    assert xray_recorder.current_subsegment().name == '9'\n\n\ndef test_subsegment_streaming_set_zero():\n    xray_recorder.configure(streaming_threshold=0)\n    segment = xray_recorder.begin_segment('name')\n    xray_recorder.begin_subsegment(name='sub')\n    xray_recorder.end_subsegment()\n\n    assert xray_recorder.streaming.streaming_threshold == 0\n    assert segment.get_total_subsegments_size() == 0\n\n\ndef test_put_annotation_metadata():\n    segment = xray_recorder.begin_segment('name')\n    xray_recorder.put_annotation('key1', 'value1')\n    subsegment = xray_recorder.begin_subsegment('name')\n    xray_recorder.put_metadata('key2', 'value2')\n\n    assert 'value1' == segment.annotations['key1']\n    assert not segment.annotations.get('key2')\n    assert 'value2' == subsegment.metadata['default']['key2']\n    assert not subsegment.metadata['default'].get('key1')\n\n\ndef test_default_pass_through_with_missing_context():\n    xray_recorder = get_new_stubbed_recorder()\n    xray_recorder.configure(sampling=False) # default context_missing = 'LOG_ERROR'\n    assert not xray_recorder.is_sampled()\n\n    xray_recorder.put_annotation('key', 'value')\n    xray_recorder.put_metadata('key', 'value')\n    xray_recorder.end_segment()\n\ndef test_raise_runtime_error_with_missing_context():\n    xray_recorder = get_new_stubbed_recorder()\n    xray_recorder.configure(sampling=False, context_missing='RUNTIME_ERROR')\n\n    with pytest.raises(SegmentNotFoundException):\n        assert not xray_recorder.is_sampled()\n        xray_recorder.end_segment()\n\ndef test_capture_not_suppress_exception():\n    xray_recorder = get_new_stubbed_recorder()\n    xray_recorder.configure(sampling=False)\n\n    @xray_recorder.capture()\n    def buggy_func():\n        return 1 / 0\n\n    with pytest.raises(ZeroDivisionError):\n        buggy_func()\n\n\ndef test_capture_not_swallow_return():\n    xray_recorder = get_new_stubbed_recorder()\n    xray_recorder.configure(sampling=False)\n    value = 1\n\n    @xray_recorder.capture()\n    def my_func():\n        return value\n\n    actual = my_func()\n    assert actual == value\n\n\ndef test_first_begin_segment_sampled():\n    xray_recorder = get_new_stubbed_recorder()\n    xray_recorder.configure(sampling=True)\n    segment = xray_recorder.begin_segment('name')\n\n    assert segment.sampled\n\ndef test_unsampled_subsegment_of_sampled_parent():\n    xray_recorder = get_new_stubbed_recorder()\n    xray_recorder.configure(sampling=True)\n    segment = xray_recorder.begin_segment('name', sampling=True)\n    subsegment = xray_recorder.begin_subsegment_without_sampling('unsampled')\n\n    assert segment.sampled == True\n    assert subsegment.sampled == False\n\ndef test_begin_subsegment_unsampled():\n    xray_recorder = get_new_stubbed_recorder()\n    xray_recorder.configure(sampling=False)\n    segment = xray_recorder.begin_segment('name', sampling=False)\n    subsegment = xray_recorder.begin_subsegment_without_sampling('unsampled')\n\n    assert segment.sampled == False\n    assert subsegment.sampled == False\n\n\ndef test_in_segment_closing():\n    xray_recorder = get_new_stubbed_recorder()\n    xray_recorder.configure(sampling=False)\n\n    with xray_recorder.in_segment('name') as segment:\n        assert segment.in_progress is True\n        segment.put_metadata('key1', 'value1')\n        segment.put_annotation('key2', 'value2')\n        with xray_recorder.in_subsegment('subsegment') as subsegment:\n            assert subsegment.in_progress is True\n\n        with xray_recorder.capture('capture') as subsegment:\n            assert subsegment.in_progress is True\n            assert subsegment.name == 'capture'\n\n    assert subsegment.in_progress is False\n    assert segment.in_progress is False\n    assert segment.annotations['key2'] == 'value2'\n    assert segment.metadata['default']['key1'] == 'value1'\n\n\ndef test_in_segment_exception():\n    xray_recorder = get_new_stubbed_recorder()\n    xray_recorder.configure(sampling=False)\n\n    with pytest.raises(Exception):\n        with xray_recorder.in_segment('name') as segment:\n            assert segment.in_progress is True\n            assert 'exceptions' not in segment.cause\n            raise Exception('test exception')\n\n    assert segment.in_progress is False\n    assert segment.fault is True\n    assert len(segment.cause['exceptions']) == 1\n\n    with pytest.raises(Exception):\n        with xray_recorder.in_segment('name') as segment:\n            with xray_recorder.in_subsegment('name') as subsegment:\n                    assert subsegment.in_progress is True\n                    raise Exception('test exception')\n\n    assert len(subsegment.cause['exceptions']) == 1\n\n\ndef test_default_enabled():\n    assert global_sdk_config.sdk_enabled()\n    segment = xray_recorder.begin_segment('name')\n    subsegment = xray_recorder.begin_subsegment('name')\n    assert type(xray_recorder.current_segment()) is Segment\n    assert type(xray_recorder.current_subsegment()) is Subsegment\n\n\ndef test_disable_is_dummy():\n    global_sdk_config.set_sdk_enabled(False)\n    segment = xray_recorder.begin_segment('name')\n    subsegment = xray_recorder.begin_subsegment('name')\n    assert type(xray_recorder.current_segment()) is DummySegment\n    assert type(xray_recorder.current_subsegment()) is DummySubsegment\n\ndef test_unsampled_subsegment_is_dummy():\n    assert global_sdk_config.sdk_enabled()\n    segment = xray_recorder.begin_segment('name')\n    subsegment = xray_recorder.begin_subsegment_without_sampling('name')\n    \n    assert type(xray_recorder.current_subsegment()) is DummySubsegment\n\ndef test_subsegment_respects_parent_sampling_decision():\n    assert global_sdk_config.sdk_enabled()\n    segment = xray_recorder.begin_segment('name')\n    subsegment = xray_recorder.begin_subsegment_without_sampling('name2')\n    subsegment2 = xray_recorder.begin_subsegment('unsampled-subsegment')\n\n    assert type(xray_recorder.current_subsegment()) is DummySubsegment\n    assert subsegment.sampled == False\n    assert subsegment2.sampled == False\n\n\ndef test_disabled_empty_context_current_calls():\n    global_sdk_config.set_sdk_enabled(False)\n    assert type(xray_recorder.current_segment()) is DummySegment\n    assert type(xray_recorder.current_subsegment()) is DummySubsegment\n\n\ndef test_disabled_out_of_order_begins():\n    global_sdk_config.set_sdk_enabled(False)\n    xray_recorder.begin_subsegment(\"Test\")\n    xray_recorder.begin_segment(\"Test\")\n    xray_recorder.begin_subsegment(\"Test1\")\n    xray_recorder.begin_subsegment(\"Test2\")\n    assert type(xray_recorder.begin_subsegment(\"Test3\")) is DummySubsegment\n    assert type(xray_recorder.begin_segment(\"Test4\")) is DummySegment\n\n\ndef test_disabled_put_methods():\n    global_sdk_config.set_sdk_enabled(False)\n    xray_recorder.put_annotation(\"Test\", \"Value\")\n    xray_recorder.put_metadata(\"Test\", \"Value\", \"Namespace\")\n\n\n# Test for random end segments/subsegments without any entities in context.\n# Should not throw any exceptions\ndef test_disabled_ends():\n    global_sdk_config.set_sdk_enabled(False)\n    xray_recorder.end_segment()\n    xray_recorder.end_subsegment()\n    xray_recorder.end_segment()\n    xray_recorder.end_segment()\n    xray_recorder.end_subsegment()\n    xray_recorder.end_subsegment()\n\n\n# Begin subsegment should not fail on its own.\ndef test_disabled_begin_subsegment():\n    global_sdk_config.set_sdk_enabled(False)\n    subsegment_entity = xray_recorder.begin_subsegment(\"Test\")\n    assert type(subsegment_entity) is DummySubsegment\n\n\n# When disabled, force sampling should still return dummy entities.\ndef test_disabled_force_sampling():\n    global_sdk_config.set_sdk_enabled(False)\n    xray_recorder.configure(sampling=True)\n    segment_entity = xray_recorder.begin_segment(\"Test1\")\n    subsegment_entity = xray_recorder.begin_subsegment(\"Test2\")\n    assert type(segment_entity) is DummySegment\n    assert type(subsegment_entity) is DummySubsegment\n\n\n# When disabled, get_trace_entity should return DummySegment if an entity is not present in the context\ndef test_disabled_get_context_entity():\n    global_sdk_config.set_sdk_enabled(False)\n    entity = xray_recorder.get_trace_entity()\n    assert type(entity) is DummySegment\n\n\ndef test_max_stack_trace_zero():\n    xray_recorder.configure(max_trace_back=1)\n    with pytest.raises(Exception):\n        with xray_recorder.in_segment('name') as segment_with_stack:\n            assert segment_with_stack.in_progress is True\n            assert 'exceptions' not in segment_with_stack.cause.__dict__\n            raise Exception('Test Exception')\n    assert len(segment_with_stack.cause['exceptions']) == 1\n\n    xray_recorder.configure(max_trace_back=0)\n    with pytest.raises(Exception):\n        with xray_recorder.in_segment('name') as segment_no_stack:\n            assert segment_no_stack.in_progress is True\n            assert 'exceptions' not in segment_no_stack.cause.__dict__\n            raise Exception('Test Exception')\n    assert len(segment_no_stack.cause['exceptions']) == 1\n\n    assert len(segment_with_stack.cause['exceptions'][0].stack) == 1\n    assert len(segment_no_stack.cause['exceptions'][0].stack) == 0\n\n\n# CustomSampler to mimic the DefaultSampler,\n# but without the rule and target polling logic.\nclass CustomSampler(DefaultSampler):\n    def start(self):\n        pass\n\n    def should_trace(self, sampling_req=None):\n        rule_cache = RuleCache()\n        rule_cache.last_updated = int(time.time())\n        sampling_rule_a = SamplingRule(name='rule_a',\n                                       priority=2,\n                                       rate=0.5,\n                                       reservoir_size=1,\n                                       service='app_a')\n        sampling_rule_b = SamplingRule(name='rule_b',\n                                       priority=2,\n                                       rate=0.5,\n                                       reservoir_size=1,\n                                       service='app_b')\n        rule_cache.load_rules([sampling_rule_a, sampling_rule_b])\n        now = int(time.time())\n        if sampling_req and not sampling_req.get('service_type', None):\n            sampling_req['service_type'] = self._origin\n        elif sampling_req is None:\n            sampling_req = {'service_type': self._origin}\n        matched_rule = rule_cache.get_matched_rule(sampling_req, now)\n        if matched_rule:\n            return self._process_matched_rule(matched_rule, now)\n        else:\n            return self._local_sampler.should_trace(sampling_req)\n\n\ndef test_begin_segment_matches_sampling_rule_on_name():\n    xray_recorder.configure(sampling=True, sampler=CustomSampler())\n    segment = xray_recorder.begin_segment(\"app_b\")\n    assert segment.aws.get('xray').get('sampling_rule_name') == 'rule_b'\n"
  },
  {
    "path": "tests/test_sampling_rule_cache.py",
    "content": "import time\nimport pytest\n\nfrom aws_xray_sdk.core.sampling.sampling_rule import SamplingRule\nfrom aws_xray_sdk.core.sampling.rule_cache import RuleCache\nfrom aws_xray_sdk.core.sampling.reservoir import Reservoir\n\nrule_0 = SamplingRule(name='a', priority=1, rate=0.1,\n                      reservoir_size=1, host='*mydomain*',\n                      method='GET', path='myop', service='random',\n                      service_type='random')\nrule_1 = SamplingRule(name='aa', priority=2, rate=0.1,\n                      reservoir_size=1, host='*random*',\n                      method='POST', path='random', service='proxy',\n                      service_type='random')\nrule_2 = SamplingRule(name='b', priority=2, rate=0.1,\n                      reservoir_size=1, host='*', method='GET',\n                      path='ping', service='myapp',\n                      service_type='AWS::EC2::Instance')\nrule_default = SamplingRule(name='Default', priority=1000, rate=0.1,\n                            reservoir_size=1)\n\n\n@pytest.fixture(autouse=True)\ndef reset_rules():\n    \"\"\"\n    Clean up context storage before and after each test run.\n    \"\"\"\n    rules = [rule_default, rule_2, rule_0, rule_1]\n    for rule in rules:\n        rule.snapshot_statistics()\n        rule.reservoir = Reservoir()\n    yield\n\n\ndef test_rules_sorting():\n    cache = RuleCache()\n    rules = [rule_default, rule_2, rule_0, rule_1]\n    cache.load_rules(rules)\n    sorted_rules = cache.rules\n\n    assert sorted_rules[0] == rule_0\n    assert sorted_rules[1] == rule_1\n    assert sorted_rules[2] == rule_2\n    assert sorted_rules[3] == rule_default\n\n\ndef test_evict_deleted_rules():\n    cache = RuleCache()\n    cache.load_rules([rule_default, rule_1, rule_0])\n    cache.load_rules([rule_default, rule_2])\n\n    assert len(cache.rules) == 2\n    assert rule_1 not in cache.rules\n    assert rule_0 not in cache.rules\n\n\ndef test_rule_matching():\n    cache = RuleCache()\n    now = int(time.time())\n    cache.load_rules([rule_default, rule_1, rule_2, rule_0])\n    cache.last_updated = now\n\n    sampling_req = {'host': 'mydomain.com'}\n    rule = cache.get_matched_rule(sampling_req, now)\n    assert rule.name == 'a'\n\n    sampling_req = {'method': 'POST'}\n    rule = cache.get_matched_rule(sampling_req, now)\n    assert rule.name == 'aa'\n\n    sampling_req = {'path': 'ping'}\n    rule = cache.get_matched_rule(sampling_req, now)\n    assert rule.name == 'b'\n\n    sampling_req = {'service': 'proxy'}\n    rule = cache.get_matched_rule(sampling_req, now)\n    assert rule.name == 'aa'\n\n    sampling_req = {'service_type': 'AWS::EC2::Instance'}\n    rule = cache.get_matched_rule(sampling_req, now)\n    assert rule.name == 'b'\n\n    # Default should be always returned when there is no match\n    sampling_req = {'host': 'unknown', 'path': 'unknown'}\n    rule = cache.get_matched_rule(sampling_req, now)\n    assert rule.is_default()\n\n\ndef test_preserving_sampling_statistics():\n    cache = RuleCache()\n    cache.load_rules([rule_default, rule_0])\n    rule_0.increment_request_count()\n    rule_0.increment_sampled_count()\n    rule_0.reservoir.load_quota(quota=3, TTL=15, interval=None)\n\n    new_rule_0 = SamplingRule(name='a', priority=1,\n                              rate=0.1, reservoir_size=1)\n    cache.load_rules([rule_default, new_rule_0])\n\n    statistics = cache.rules[0].snapshot_statistics()\n    reservoir = cache.rules[0].reservoir\n\n    assert statistics['request_count'] == 1\n    assert statistics['sampled_count'] == 1\n    assert reservoir.quota == 3\n    assert reservoir.TTL == 15\n\n\ndef test_correct_target_mapping():\n    cache = RuleCache()\n    cache.load_rules([rule_default, rule_0])\n    targets = {\n        'a': {'quota': 3, 'TTL': None, 'interval': None, 'rate': 0.1},\n        'b': {'quota': 2, 'TTL': None, 'interval': None, 'rate': 0.1},\n        'Default': {'quota': 5, 'TTL': None, 'interval': None, 'rate': 0.1},\n    }\n    cache.load_targets(targets)\n\n    assert rule_0.reservoir.quota == 3\n    assert rule_default.reservoir.quota == 5\n\n\ndef test_expired_cache():\n    cache = RuleCache()\n    now = int(time.time())\n    cache.load_rules([rule_default, rule_1, rule_2, rule_0])\n    cache.last_updated = now - 60 * 60 * 24  # makes rule cache one day before\n\n    sampling_req = {'host': 'myhost.com', 'method': 'GET',\n                    'path': 'operation', 'service': 'app'}\n\n    rule = cache.get_matched_rule(sampling_req, now)\n    assert rule is None\n\n    cache.last_updated = now\n    rule = cache.get_matched_rule(sampling_req, now)\n    assert rule.is_default()\n"
  },
  {
    "path": "tests/test_sdk_config.py",
    "content": "from aws_xray_sdk import global_sdk_config\nimport os\nimport pytest\n\n\nXRAY_ENABLED_KEY = \"AWS_XRAY_SDK_ENABLED\"\n\n\n@pytest.fixture(autouse=True)\ndef cleanup():\n    \"\"\"\n    Clean up Environmental Variable for enable before and after tests\n    \"\"\"\n    if XRAY_ENABLED_KEY in os.environ:\n        del os.environ[XRAY_ENABLED_KEY]\n    yield\n    if XRAY_ENABLED_KEY in os.environ:\n        del os.environ[XRAY_ENABLED_KEY]\n    global_sdk_config.set_sdk_enabled(True)\n\n\ndef test_enable_key():\n    assert global_sdk_config.XRAY_ENABLED_KEY == XRAY_ENABLED_KEY\n\n\ndef test_default_enabled():\n    assert global_sdk_config.sdk_enabled() is True\n\n\ndef test_env_var_precedence():\n    os.environ[XRAY_ENABLED_KEY] = \"true\"\n    # Env Variable takes precedence. This is called to activate the internal check\n    global_sdk_config.set_sdk_enabled(False)\n    assert global_sdk_config.sdk_enabled() is True\n    os.environ[XRAY_ENABLED_KEY] = \"false\"\n    global_sdk_config.set_sdk_enabled(False)\n    assert global_sdk_config.sdk_enabled() is False\n    os.environ[XRAY_ENABLED_KEY] = \"false\"\n    global_sdk_config.set_sdk_enabled(True)\n    assert global_sdk_config.sdk_enabled() is False\n    os.environ[XRAY_ENABLED_KEY] = \"true\"\n    global_sdk_config.set_sdk_enabled(True)\n    assert global_sdk_config.sdk_enabled() is True\n    os.environ[XRAY_ENABLED_KEY] = \"true\"\n    global_sdk_config.set_sdk_enabled(None)\n    assert global_sdk_config.sdk_enabled() is True\n\n\ndef test_env_enable_case():\n    os.environ[XRAY_ENABLED_KEY] = \"TrUE\"\n    # Env Variable takes precedence. This is called to activate the internal check\n    global_sdk_config.set_sdk_enabled(True)\n    assert global_sdk_config.sdk_enabled() is True\n\n    os.environ[XRAY_ENABLED_KEY] = \"true\"\n    global_sdk_config.set_sdk_enabled(True)\n    assert global_sdk_config.sdk_enabled() is True\n\n    os.environ[XRAY_ENABLED_KEY] = \"1\"\n    global_sdk_config.set_sdk_enabled(True)\n    assert global_sdk_config.sdk_enabled() is True\n\n    os.environ[XRAY_ENABLED_KEY] = \"y\"\n    global_sdk_config.set_sdk_enabled(True)\n    assert global_sdk_config.sdk_enabled() is True\n\n    os.environ[XRAY_ENABLED_KEY] = \"t\"\n    global_sdk_config.set_sdk_enabled(True)\n    assert global_sdk_config.sdk_enabled() is True\n\n    os.environ[XRAY_ENABLED_KEY] = \"False\"\n    global_sdk_config.set_sdk_enabled(True)\n    assert global_sdk_config.sdk_enabled() is False\n\n    os.environ[XRAY_ENABLED_KEY] = \"falSE\"\n    global_sdk_config.set_sdk_enabled(True)\n    assert global_sdk_config.sdk_enabled() is False\n\n    os.environ[XRAY_ENABLED_KEY] = \"0\"\n    global_sdk_config.set_sdk_enabled(True)\n    assert global_sdk_config.sdk_enabled() is False\n\n\ndef test_invalid_env_string():\n    os.environ[XRAY_ENABLED_KEY] = \"INVALID\"\n    # Env Variable takes precedence. This is called to activate the internal check\n    global_sdk_config.set_sdk_enabled(True)\n    assert global_sdk_config.sdk_enabled() is True\n\n    os.environ[XRAY_ENABLED_KEY] = \"1.0\"\n    global_sdk_config.set_sdk_enabled(True)\n    assert global_sdk_config.sdk_enabled() is True\n\n    os.environ[XRAY_ENABLED_KEY] = \"1-.0\"\n    global_sdk_config.set_sdk_enabled(False)\n    assert global_sdk_config.sdk_enabled() is True\n\n    os.environ[XRAY_ENABLED_KEY] = \"T RUE\"\n    global_sdk_config.set_sdk_enabled(True)\n    assert global_sdk_config.sdk_enabled() is True\n"
  },
  {
    "path": "tests/test_serialize_entities.py",
    "content": "import ast\nimport datetime\nimport json\nimport platform\nimport pytest\n\nfrom aws_xray_sdk.version import VERSION\nfrom aws_xray_sdk.core.models import http\nfrom aws_xray_sdk.core.models.segment import Segment\nfrom aws_xray_sdk.core.models.subsegment import Subsegment\n    \nfrom .util import entity_to_dict\n\t\ndef test_serialize_segment():\n\n    segment = Segment('test')\n    segment.close()\n        \n    expected_segment_dict = {\n    \"name\": \"test\",\n    \"start_time\": segment.start_time,\n    \"trace_id\": segment.trace_id,\n    \"end_time\": segment.end_time,\n    \"in_progress\": False,\n    \"id\": segment.id\n    }\n    \n    actual_segment_dict = entity_to_dict(segment)\n    \n    assert expected_segment_dict == actual_segment_dict\n    \ndef test_serialize_segment_with_aws():\n\n    segment = Segment('test')\n    \n    XRAY_META = {\n        'xray': {\n            'sdk': 'X-Ray for Python',\n            'sdk_version': VERSION\n        }\n    }\n    \n    segment.set_aws(XRAY_META)\n    \n    segment.close()\n    \n    expected_segment_dict = {\n    \"name\": \"test\",\n    \"start_time\": segment.start_time,\n    \"trace_id\": segment.trace_id,\n    \"end_time\": segment.end_time,\n    \"in_progress\": False,\n    \"aws\": {\n        \"xray\": {\n            \"sdk\": \"X-Ray for Python\",\n            \"sdk_version\": VERSION\n        }\n    },\n    \"id\": segment.id\n    }\n    \n    actual_segment_dict = entity_to_dict(segment)\n    \n    assert  expected_segment_dict == actual_segment_dict\n    \ndef test_serialize_segment_with_services():\n\n    segment = Segment('test')\n    \n    SERVICE_INFO = {\n        'runtime': platform.python_implementation(),\n        'runtime_version': platform.python_version()\n    }\n    \n    segment.set_service(SERVICE_INFO)\n    \n    segment.close()\n    \n    expected_segment_dict = {\n    \"name\": \"test\",\n    \"start_time\": segment.start_time,\n    \"trace_id\": segment.trace_id,\n    \"end_time\": segment.end_time,\n    \"in_progress\": False,\n    \"service\": {\n        \"runtime\": segment.service['runtime'],\n        \"runtime_version\": segment.service['runtime_version']\n    },\n    \"id\": segment.id\n    }\n    \n    actual_segment_dict = entity_to_dict(segment)\n    \n    assert  expected_segment_dict == actual_segment_dict \n    \ndef test_serialize_segment_with_annotation():\n\n    segment = Segment('test')\n    \n    segment.put_annotation('key', 'value')\n    \n    segment.close()\n    \n    expected_segment_dict = {\n    \"id\": segment.id,\n    \"name\": \"test\",\n    \"start_time\": segment.start_time,\n    \"in_progress\": False,\n    \"annotations\": {\n        \"key\": \"value\"\n    },\n    \"trace_id\": segment.trace_id,\n    \"end_time\": segment.end_time\n    }\n \n    actual_segment_dict = entity_to_dict(segment)\n      \n    assert  expected_segment_dict == actual_segment_dict\n    \ndef test_serialize_segment_with_metadata():\n\n    class TestMetadata():\n        def __init__(self, parameter_one, parameter_two):\n            self.parameter_one = parameter_one\n            self.parameter_two = parameter_two\n        \n            self.parameter_three = {'test'} #set\n            self.parameter_four = {'a': [1, 2, 3], 'b': True, 'c': (1.1, 2.2), 'd': list} #dict\n            self.parameter_five = [TestSubMetadata(datetime.time(9, 25, 31)), TestSubMetadata(datetime.time(23, 14, 6))] #list\n        \n    class TestSubMetadata():\n        def __init__(self, time):\n            self.time = time\n\n    segment = Segment('test')\n    \n    segment.put_metadata('key_one', TestMetadata(1,2), 'namespace_one')\n    segment.put_metadata('key_two', TestMetadata(3,4), 'namespace_two')\n    \n    segment.close()\n    \n    expected_segment_dict = {\n    \"id\": segment.id,\n    \"name\": \"test\",\n    \"start_time\": segment.start_time,\n    \"in_progress\": False,\n    \"metadata\": {\n        \"namespace_one\": {\n            \"key_one\": {\n                \"parameter_one\": 1,\n                \"parameter_two\": 2,\n                \"parameter_three\": [\n                    \"test\"\n                ],\n                \"parameter_four\": {\n                    \"a\": [\n                        1,\n                        2,\n                        3\n                    ],\n                    \"b\": True,\n                    \"c\": [\n                        1.1,\n                        2.2\n                    ],\n                    \"d\": str(list)\n                },\n                \"parameter_five\": [\n                    {\n                        \"time\": \"09:25:31\"\n                    },\n                    {\n                        \"time\": \"23:14:06\"\n                    }\n                ]\n            }\n        },\n        \"namespace_two\": {\n            \"key_two\": {\n                \"parameter_one\": 3,\n                \"parameter_two\": 4,\n                \"parameter_three\": [\n                    \"test\"\n                ],\n                \"parameter_four\": {\n                    \"a\": [\n                        1,\n                        2,\n                        3\n                    ],\n                    \"b\": True,\n                    \"c\": [\n                        1.1,\n                        2.2\n                    ],\n                    \"d\": str(list)\n                },\n                \"parameter_five\": [\n                    {\n                        \"time\": \"09:25:31\"\n                    },\n                    {\n                        \"time\": \"23:14:06\"\n                    }\n                ]\n            }\n        }\n    },\n    \"trace_id\": segment.trace_id,\n    \"end_time\": segment.end_time\n    }\n\n    actual_segment_dict = entity_to_dict(segment) \n    \n    assert  expected_segment_dict == actual_segment_dict\n    \ndef test_serialize_segment_with_http():\n\n    segment = Segment('test')\n    \n    segment.put_http_meta(http.URL, 'https://aws.amazon.com')\n    segment.put_http_meta(http.METHOD, 'get')\n    segment.put_http_meta(http.USER_AGENT, 'test')\n    segment.put_http_meta(http.CLIENT_IP, '127.0.0.1')\n    segment.put_http_meta(http.X_FORWARDED_FOR, True)\n    segment.put_http_meta(http.STATUS, 200)\n    segment.put_http_meta(http.CONTENT_LENGTH, 0)\n    \n    segment.close()\n    \n    expected_segment_dict = {\n    \"id\": segment.id,\n    \"name\": \"test\",\n    \"start_time\": segment.start_time,\n    \"in_progress\": False,\n    \"http\": {\n        \"request\": {\n            \"url\": \"https://aws.amazon.com\",\n            \"method\": \"get\",\n            \"user_agent\": \"test\",\n            \"client_ip\": \"127.0.0.1\",\n            \"x_forwarded_for\": True\n        },\n        \"response\": {\n            \"status\": 200,\n            \"content_length\": 0\n        }\n    },\n    \"trace_id\": segment.trace_id,\n    \"end_time\": segment.end_time\n    }\n\n    actual_segment_dict = entity_to_dict(segment)\n    \n    assert expected_segment_dict == actual_segment_dict\n    \ndef test_serialize_segment_with_exception():\n\n    class TestException(Exception):\n        def __init__(self, message):\n            super().__init__(message)\n\n    segment_one = Segment('test')\n    \n    stack_one = [\n        ('/path/to/test.py', 10, 'module', 'another_function()'),\n        ('/path/to/test.py', 3, 'another_function', 'wrong syntax')\n    ]\n    \n    stack_two = [\n        ('/path/to/test.py', 11, 'module', 'another_function()'),\n        ('/path/to/test.py', 4, 'another_function', 'wrong syntax')\n    ]\n\n    exception_one = TestException('test message one')\n    exception_two = TestException('test message two')\n\n    segment_one.add_exception(exception_one, stack_one, True)\n    segment_one.add_exception(exception_two, stack_two, False)\n    \n    segment_one.close()\n    \n    expected_segment_one_dict = {\n    \"id\": segment_one.id,\n    \"name\": \"test\",\n    \"start_time\": segment_one.start_time,\n    \"in_progress\": False,\n    \"cause\": {\n        \"working_directory\": segment_one.cause['working_directory'],\n        \"exceptions\": [\n            {\n                \"id\": exception_one._cause_id,\n                \"message\": \"test message one\",\n                \"type\": \"TestException\",\n                \"remote\": True,\n                \"stack\": [\n                    {\n                        \"path\": \"test.py\",\n                        \"line\": 10,\n                        \"label\": \"module\"\n                    },\n                    {\n                        \"path\": \"test.py\",\n                        \"line\": 3,\n                        \"label\": \"another_function\"\n                    }\n                ]\n            },\n            {\n                \"id\": exception_two._cause_id,\n                \"message\": \"test message two\",\n                \"type\": \"TestException\",\n                \"remote\": False,\n                \"stack\": [\n                    {\n                        \"path\": \"test.py\",\n                        \"line\": 11,\n                        \"label\": \"module\"\n                    },\n                    {\n                        \"path\": \"test.py\",\n                        \"line\": 4,\n                        \"label\": \"another_function\"\n                    }\n                ]\n            }\n        ]\n    },\n    \"trace_id\": segment_one.trace_id,\n    \"fault\": True,\n    \"end_time\": segment_one.end_time\n    }\n\n    segment_two = Segment('test')\n    subsegment = Subsegment('test', 'local', segment_two)\n\n    subsegment.add_exception(exception_one, stack_one, True)\n    subsegment.add_exception(exception_two, stack_two, False)\n    subsegment.close()\n    \n    # will record cause id instead as same exception already recorded in its subsegment\n    segment_two.add_exception(exception_one, stack_one, True)\n    \n    segment_two.close()\n    \n    expected_segment_two_dict = {\n    \"id\": segment_two.id,\n    \"name\": \"test\",\n    \"start_time\": segment_two.start_time,\n    \"in_progress\": False,\n    \"cause\": exception_one._cause_id,\n    \"trace_id\": segment_two.trace_id,\n    \"fault\": True,\n    \"end_time\": segment_two.end_time\n    }\n\n    actual_segment_one_dict = entity_to_dict(segment_one)\n    actual_segment_two_dict = entity_to_dict(segment_two)\n    \n    assert expected_segment_one_dict == actual_segment_one_dict\n    assert expected_segment_two_dict == actual_segment_two_dict\n    \ndef test_serialize_subsegment():\n\n    segment = Segment('test')\n    subsegment = Subsegment('test', 'local', segment)\n    \n    subsegment.close()  \n    segment.close()\n\n    expected_subsegment_dict = {\n    \"id\": subsegment.id,\n    \"name\": \"test\",\n    \"start_time\": subsegment.start_time,\n    \"in_progress\": False,\n    \"trace_id\": subsegment.trace_id,\n    \"type\": \"subsegment\",\n    \"namespace\": \"local\",\n    \"end_time\": subsegment.end_time\n    }\n\n    actual_subsegment_dict = entity_to_dict(subsegment)\n    \n    assert expected_subsegment_dict == actual_subsegment_dict\n    \ndef test_serialize_subsegment_with_http():\n\n    segment = Segment('test')\n    subsegment = Subsegment('test', 'remote', segment)\n    \n    subsegment.put_http_meta(http.URL, 'https://aws.amazon.com')\n    subsegment.put_http_meta(http.METHOD, 'get')\n\n    subsegment.put_http_meta(http.STATUS, 200)\n    subsegment.put_http_meta(http.CONTENT_LENGTH, 0)\n    \n    subsegment.close()  \n    segment.close()\n\n    expected_subsegment_dict = {\n    \"id\": subsegment.id,\n    \"name\": \"test\",\n    \"start_time\": subsegment.start_time,\n    \"in_progress\": False,\n    \"http\": {\n        \"request\": {\n            \"url\": \"https://aws.amazon.com\",\n            \"method\": \"get\"\n        },\n        \"response\": {\n            \"status\": 200,\n            \"content_length\": 0\n        }\n    },\n    \"trace_id\": subsegment.trace_id,\n    \"type\": \"subsegment\",\n    \"namespace\": \"remote\",\n    \"end_time\": subsegment.end_time\n    }\n\n    actual_subsegment_dict = entity_to_dict(subsegment)\n    \n    assert expected_subsegment_dict == actual_subsegment_dict\n     \ndef test_serialize_subsegment_with_sql():\n\n    segment = Segment('test')\n    subsegment = Subsegment('test', 'remote', segment)\n    \n    sql = {\n        \"url\": \"jdbc:postgresql://aawijb5u25wdoy.cpamxznpdoq8.us-west-2.rds.amazonaws.com:5432/ebdb\",\n        \"preparation\": \"statement\",\n        \"database_type\": \"PostgreSQL\",\n        \"database_version\": \"9.5.4\",\n        \"driver_version\": \"PostgreSQL 9.4.1211.jre7\",\n        \"user\" : \"dbuser\",\n        \"sanitized_query\" : \"SELECT  *  FROM  customers  WHERE  customer_id=?;\"\n    }\n\n    subsegment.set_sql(sql)\n    \n    subsegment.close()  \n    segment.close()\n\n    expected_subsegment_dict = {\n    \"id\": subsegment.id,\n    \"name\": \"test\",\n    \"start_time\": subsegment.start_time,\n    \"in_progress\": False,\n    \"trace_id\": subsegment.trace_id,\n    \"type\": \"subsegment\",\n    \"namespace\": \"remote\",\n    \"sql\": {\n        \"url\": \"jdbc:postgresql://aawijb5u25wdoy.cpamxznpdoq8.us-west-2.rds.amazonaws.com:5432/ebdb\",\n        \"preparation\": \"statement\",\n        \"database_type\": \"PostgreSQL\",\n        \"database_version\": \"9.5.4\",\n        \"driver_version\": \"PostgreSQL 9.4.1211.jre7\",\n        \"user\": \"dbuser\",\n        \"sanitized_query\": \"SELECT  *  FROM  customers  WHERE  customer_id=?;\"\n    },\n    \"end_time\": subsegment.end_time\n    }\n\n    actual_subsegment_dict = entity_to_dict(subsegment)\n    \n    assert expected_subsegment_dict == actual_subsegment_dict\n    \ndef test_serialize_subsegment_with_aws():\n\n    segment = Segment('test')\n    subsegment = Subsegment('test', 'aws', segment)\n    \n    aws = {\n        \"bucket_name\": \"testbucket\",\n        \"region\": \"us-east-1\",\n        \"operation\": \"GetObject\",\n        \"request_id\": \"0000000000000000\",\n        \"key\": \"123\",\n        \"resource_names\": [\n            \"testbucket\"\n        ]\n    }\n    \n    subsegment.set_aws(aws)\n    \n    subsegment.close()\n    segment.close()\n    \n    expected_subsegment_dict = {\n    \"id\": subsegment.id,\n    \"name\": \"test\",\n    \"start_time\": subsegment.start_time,\n    \"in_progress\": False,\n    \"aws\": {\n        \"bucket_name\": \"testbucket\",\n        \"region\": \"us-east-1\",\n        \"operation\": \"GetObject\",\n        \"request_id\": \"0000000000000000\",\n        \"key\": \"123\",\n        \"resource_names\": [\n            \"testbucket\"\n        ]\n    },\n    \"trace_id\": subsegment.trace_id,\n    \"type\": \"subsegment\",\n    \"namespace\": \"aws\",\n    \"end_time\": subsegment.end_time\n    }\n\n    actual_subsegment_dict = entity_to_dict(subsegment)\n    \n    assert expected_subsegment_dict == actual_subsegment_dict\n    \ndef test_serialize_with_ast_metadata():\n\n    class_string = \"\"\"\\\nclass A:\n    def __init__(self, a):\n        self.a = a\n\"\"\"\n    \n    ast_obj = ast.parse(class_string)\n    \n    segment = Segment('test')\n    \n    segment.put_metadata('ast', ast_obj)\n    \n    segment.close()\n\n    actual_segment_dict = entity_to_dict(segment)\n        \n    assert  'ast' in actual_segment_dict['metadata']['default']\n"
  },
  {
    "path": "tests/test_sqs_message_helper.py",
    "content": "from aws_xray_sdk.core.utils.sqs_message_helper import SqsMessageHelper\n\nimport pytest\n\nsampleSqsMessageEvent = {\n        \"Records\": [\n            {\n                \"messageId\": \"059f36b4-87a3-44ab-83d2-661975830a7d\",\n                \"receiptHandle\": \"AQEBwJnKyrHigUMZj6rYigCgxlaS3SLy0a...\",\n                \"body\": \"Test message.\",\n                \"attributes\": {\n                    \"ApproximateReceiveCount\": \"1\",\n                    \"SentTimestamp\": \"1545082649183\",\n                    \"SenderId\": \"AIDAIENQZJOLO23YVJ4VO\",\n                    \"ApproximateFirstReceiveTimestamp\": \"1545082649185\",\n                    \"AWSTraceHeader\":\"Root=1-632BB806-bd862e3fe1be46a994272793;Sampled=1\"\n                },\n                \"messageAttributes\": {},\n                \"md5OfBody\": \"e4e68fb7bd0e697a0ae8f1bb342846b3\",\n                \"eventSource\": \"aws:sqs\",\n                \"eventSourceARN\": \"arn:aws:sqs:us-east-2:123456789012:my-queue\",\n                \"awsRegion\": \"us-east-2\"\n            },\n            {\n                \"messageId\": \"2e1424d4-f796-459a-8184-9c92662be6da\",\n                \"receiptHandle\": \"AQEBzWwaftRI0KuVm4tP+/7q1rGgNqicHq...\",\n                \"body\": \"Test message.\",\n                \"attributes\": {\n                    \"ApproximateReceiveCount\": \"1\",\n                    \"SentTimestamp\": \"1545082650636\",\n                    \"SenderId\": \"AIDAIENQZJOLO23YVJ4VO\",\n                    \"ApproximateFirstReceiveTimestamp\": \"1545082650649\",\n                    \"AWSTraceHeader\":\"Root=1-5759e988-bd862e3fe1be46a994272793;Parent=53995c3f42cd8ad8;Sampled=0\"\n                },\n                \"messageAttributes\": {},\n                \"md5OfBody\": \"e4e68fb7bd0e697a0ae8f1bb342846b3\",\n                \"eventSource\": \"aws:sqs\",\n                \"eventSourceARN\": \"arn:aws:sqs:us-east-2:123456789012:my-queue\",\n                \"awsRegion\": \"us-east-2\"\n            },\n            {\n                \"messageId\": \"2e1424d4-f796-459a-8184-9c92662be6da\",\n                \"receiptHandle\": \"AQEBzWwaftRI0KuVm4tP+/7q1rGgNqicHq...\",\n                \"body\": \"Test message.\",\n                \"attributes\": {\n                    \"ApproximateReceiveCount\": \"1\",\n                    \"SentTimestamp\": \"1545082650636\",\n                    \"SenderId\": \"AIDAIENQZJOLO23YVJ4VO\",\n                    \"ApproximateFirstReceiveTimestamp\": \"1545082650649\",\n                    \"AWSTraceHeader\":\"Root=1-5759e988-bd862e3fe1be46a994272793;Parent=53995c3f42cd8ad8\"\n                },\n                \"messageAttributes\": {},\n                \"md5OfBody\": \"e4e68fb7bd0e697a0ae8f1bb342846b3\",\n                \"eventSource\": \"aws:sqs\",\n                \"eventSourceARN\": \"arn:aws:sqs:us-east-2:123456789012:my-queue\",\n                \"awsRegion\": \"us-east-2\"\n            }\n        ]\n    }\n\ndef test_return_true_when_sampling_1():\n    assert SqsMessageHelper.isSampled(sampleSqsMessageEvent['Records'][0]) == True\n\ndef test_return_false_when_sampling_0():\n    assert SqsMessageHelper.isSampled(sampleSqsMessageEvent['Records'][1]) == False\n\ndef test_return_false_with_no_sampling_flag():\n    assert SqsMessageHelper.isSampled(sampleSqsMessageEvent['Records'][2]) == False"
  },
  {
    "path": "tests/test_throwable.py",
    "content": "from aws_xray_sdk.core.models.throwable import Throwable\n\n\ndef test_message_and_type():\n\n    e = TypeError('msg')\n    throwable = Throwable(e, None, True)\n    assert throwable.message == 'msg'\n    assert throwable.type == type(e).__name__\n    assert throwable.remote\n\n\ndef test_stack_trace_parsing():\n    # sample output using `traceback.extract_stack()`\n    stack = [\n        ('/path/to/test.py', 10, 'module', 'another_function()'),\n        ('/path/to/test.py', 3, 'another_function', 'wrong syntax'),\n    ]\n\n    throwable = Throwable(TypeError(), stack)\n\n    entry1 = throwable.stack[0]\n    assert entry1['path'] == 'test.py'\n    assert entry1['line'] == 10\n    assert entry1['label'] == 'module'\n\n    entry2 = throwable.stack[1]\n    assert entry2['path'] == 'test.py'\n    assert entry2['line'] == 3\n    assert entry2['label'] == 'another_function'\n"
  },
  {
    "path": "tests/test_trace_entities.py",
    "content": "# -*- coding: iso-8859-15 -*-\n\nimport pytest\n\nfrom aws_xray_sdk.core.models.segment import Segment\nfrom aws_xray_sdk.core.models.subsegment import Subsegment\nfrom aws_xray_sdk.core.models import http\nfrom aws_xray_sdk.core.exceptions.exceptions import SegmentNameMissingException\nfrom aws_xray_sdk.core.exceptions.exceptions import SegmentNotFoundException\nfrom aws_xray_sdk.core.exceptions.exceptions import AlreadyEndedException\n\nfrom .util import entity_to_dict\nfrom .util import get_new_stubbed_recorder\n\nxray_recorder = get_new_stubbed_recorder()\n\n\ndef test_unicode_entity_name():\n\n    name1 = u'福'\n    name2 = u'セツナ'\n    segment = Segment(name1)\n    subsegment = Subsegment(name2, 'local', segment)\n\n    assert segment.name == name1\n    assert subsegment.name == name2\n\n\ndef test_segment_user():\n    segment = Segment('seg')\n    segment.set_user('whoami')\n    doc = entity_to_dict(segment)\n\n    assert doc['user'] == 'whoami'\n\n\ndef test_put_http_meta():\n\n    segment = Segment('seg')\n    segment.put_http_meta(http.URL, 'my url')\n    segment.put_http_meta(http.STATUS, 200)\n    # unsupported key should be dropped\n    segment.put_http_meta('somekey', 'somevalue')\n\n    doc = entity_to_dict(segment)\n    assert doc['http']['request'][http.URL] == 'my url'\n    assert doc['http']['response'][http.STATUS] == 200\n    assert 'somekey' not in doc\n\n\ndef test_put_metadata():\n\n    segment = Segment('seg')\n    meta = {\n        'key1': 'value1',\n        'key2': 'value2',\n    }\n    segment.put_metadata('key', meta)\n\n    subsegment = Subsegment('sub', 'local', segment)\n    segment.add_subsegment(subsegment)\n    subsegment.put_metadata('key', meta, 'my namespace')\n\n    doc = entity_to_dict(segment)\n    assert doc['metadata']['default']['key'] == meta\n\n    sub_doc = doc['subsegments'][0]\n    assert sub_doc['metadata']['my namespace']['key'] == meta\n\n\ndef test_put_annotation():\n\n    segment = Segment('seg')\n    invalid = {\n        'key1': 'value1',\n        'key2': 'value2',\n    }\n    # invalid annotation key-value pair should be dropped\n    segment.put_annotation('valid_key', invalid)\n    segment.put_annotation('invalid-key', 'validvalue')\n    segment.put_annotation('number', 1)\n\n    subsegment = Subsegment('sub', 'local', segment)\n    segment.add_subsegment(subsegment)\n    subsegment.put_annotation('bool', False)\n\n    doc = entity_to_dict(segment)\n    assert doc['annotations']['number'] == 1\n    assert 'invalid-value' not in doc['annotations']\n    assert 'invalid-key' not in doc['annotations']\n\n    sub_doc = doc['subsegments'][0]\n    assert not sub_doc['annotations']['bool']\n\n\ndef test_reference_counting():\n\n    segment = Segment('seg')\n    subsegment = Subsegment('sub', 'local', segment)\n    segment.add_subsegment(subsegment)\n    subsegment = Subsegment('sub', 'local', segment)\n    subsubsegment = Subsegment('subsub', 'local', segment)\n    subsegment.add_subsegment(subsubsegment)\n\n    assert not segment.ready_to_send()\n    assert segment.ref_counter.get_current() == 2\n\n    subsubsegment.close()\n    assert not segment.ready_to_send()\n    assert segment.ref_counter.get_current() == 1\n\n    subsegment.close()\n    assert not segment.ready_to_send()\n    assert segment.ref_counter.get_current() == 0\n\n    segment.close()\n    assert segment.ready_to_send()\n    assert segment.get_total_subsegments_size() == 2\n\n\ndef test_flags_on_status_code():\n\n    segment1 = Segment('seg')\n    segment1.apply_status_code(429)\n    assert segment1.throttle\n    assert segment1.error\n\n    segment2 = Segment('seg')\n    segment2.apply_status_code(503)\n    assert segment2.fault\n\n    segment3 = Segment('seg')\n    segment3.apply_status_code(403)\n    assert segment3.error\n\n\ndef test_mutate_closed_entity():\n\n    segment = Segment('seg')\n    segment.close()\n\n    with pytest.raises(AlreadyEndedException):\n        segment.put_annotation('key', 'value')\n\n    with pytest.raises(AlreadyEndedException):\n        segment.put_metadata('key', 'value')\n\n    with pytest.raises(AlreadyEndedException):\n        segment.put_http_meta('url', 'my url')\n\n    with pytest.raises(AlreadyEndedException):\n        segment.close()\n\n\ndef test_no_rule_name_pollution():\n    segment1 = Segment('seg1')\n    segment2 = Segment('seg2')\n    segment1.set_rule_name('rule1')\n    segment2.set_rule_name('rule2')\n\n    assert segment1.aws['xray']['sampling_rule_name'] == 'rule1'\n    assert segment2.aws['xray']['sampling_rule_name'] == 'rule2'\n\n\ndef test_no_empty_properties():\n\n    segment = Segment('seg')\n    segment.close()\n    doc = entity_to_dict(segment)\n\n    assert 'http' not in doc\n    assert 'aws' not in doc\n    assert 'metadata' not in doc\n    assert 'annotations' not in doc\n    assert 'subsegments' not in doc\n    assert 'cause' not in doc\n\n\ndef test_required_properties():\n\n    segment = Segment('seg')\n    segment.close()\n    doc = entity_to_dict(segment)\n\n    assert 'trace_id' in doc\n    assert 'id' in doc\n    assert 'start_time' in doc\n    assert 'end_time' in doc\n\n\ndef test_missing_segment_name():\n\n    with pytest.raises(SegmentNameMissingException):\n        Segment(None)\n\n\ndef test_missing_parent_segment():\n\n    with pytest.raises(SegmentNotFoundException):\n        Subsegment('name', 'local', None)\n\n\ndef test_add_exception():\n    segment = Segment('seg')\n    exception = Exception(\"testException\")\n    stack = [['path', 'line', 'label']]\n    segment.add_exception(exception=exception, stack=stack)\n    segment.close()\n\n    cause = segment.cause\n    assert 'exceptions' in cause\n    exceptions = cause['exceptions']\n    assert len(exceptions) == 1\n    assert 'working_directory' in cause\n    exception = exceptions[0]\n    assert 'testException' == exception.message\n    expected_stack = [{'path': 'path', 'line': 'line', 'label': 'label'}]\n    assert expected_stack == exception.stack\n\n\ndef test_add_exception_referencing():\n    segment = Segment('seg')\n    subseg = Subsegment('subseg', 'remote', segment)\n    exception = Exception(\"testException\")\n    stack = [['path', 'line', 'label']]\n    subseg.add_exception(exception=exception, stack=stack)\n    segment.add_exception(exception=exception, stack=stack)\n    subseg.close()\n    segment.close()\n\n    seg_cause = segment.cause\n    subseg_cause = subseg.cause\n\n    assert isinstance(subseg_cause, dict)\n    assert isinstance(seg_cause, str)\n    assert seg_cause == subseg_cause['exceptions'][0].id\n\n\ndef test_add_exception_cause_resetting():\n    segment = Segment('seg')\n    subseg = Subsegment('subseg', 'remote', segment)\n    exception = Exception(\"testException\")\n    stack = [['path', 'line', 'label']]\n    subseg.add_exception(exception=exception, stack=stack)\n    segment.add_exception(exception=exception, stack=stack)\n\n    segment.add_exception(exception=Exception(\"newException\"), stack=stack)\n    subseg.close()\n    segment.close()\n\n    seg_cause = segment.cause\n    assert isinstance(seg_cause, dict)\n    assert 'newException' == seg_cause['exceptions'][0].message\n\n\ndef test_add_exception_appending_exceptions():\n    segment = Segment('seg')\n    stack = [['path', 'line', 'label']]\n    segment.add_exception(exception=Exception(\"testException\"), stack=stack)\n    segment.add_exception(exception=Exception(\"newException\"), stack=stack)\n    segment.close()\n\n    assert isinstance(segment.cause, dict)\n    assert len(segment.cause['exceptions']) == 2\n\ndef test_adding_subsegments_with_recorder():\n    xray_recorder.configure(sampling=False)\n    xray_recorder.clear_trace_entities()\n\n    segment = xray_recorder.begin_segment('parent');\n    subsegment = xray_recorder.begin_subsegment('sampled-child')\n    unsampled_subsegment = xray_recorder.begin_subsegment_without_sampling('unsampled-child1')\n    unsampled_child_subsegment = xray_recorder.begin_subsegment('unsampled-child2')\n\n    assert segment.sampled == True\n    assert subsegment.sampled == True\n    assert unsampled_subsegment.sampled == False\n    assert unsampled_child_subsegment.sampled == False\n\n    xray_recorder.clear_trace_entities()\n"
  },
  {
    "path": "tests/test_trace_header.py",
    "content": "from aws_xray_sdk.core.models.trace_header import TraceHeader\n\n\nTRACE_ID = '1-5759e988-bd862e3fe1be46a994272793'\nPARENT_ID = '53995c3f42cd8ad8'\n\n\ndef test_no_sample():\n    header = TraceHeader(root=TRACE_ID, parent=PARENT_ID)\n    assert header.sampled is None\n    assert header.root == TRACE_ID\n    assert header.parent == PARENT_ID\n    assert header.to_header_str() == 'Root=%s;Parent=%s' % (TRACE_ID, PARENT_ID)\n\n\ndef test_no_parent():\n    header = TraceHeader(root=TRACE_ID, sampled=1)\n    assert header.parent is None\n    assert header.to_header_str() == 'Root=%s;Sampled=1' % TRACE_ID\n\n\ndef test_from_str():\n    # a full header string that has all fields present\n    header_str1 = 'Root=%s;Parent=%s;Sampled=1' % (TRACE_ID, PARENT_ID)\n    header1 = TraceHeader.from_header_str(header_str1)\n    assert header1.root == TRACE_ID\n    assert header1.parent == PARENT_ID\n    assert header1.sampled == 1\n\n    # missing parent id\n    header_str2 = 'Root=%s;Sampled=?' % TRACE_ID\n    header2 = TraceHeader.from_header_str(header_str2)\n    assert header2.root == TRACE_ID\n    assert header2.parent is None\n    assert header2.sampled == '?'\n\n    # missing sampled\n    header_str3 = 'Root=%s;Parent=%s' % (TRACE_ID, PARENT_ID)\n    header3 = TraceHeader.from_header_str(header_str3)\n    assert header3.root == TRACE_ID\n    assert header3.parent == PARENT_ID\n    assert header3.sampled is None\n\n\ndef test_arbitrary_fields():\n    origin_header_str = 'Root=%s;k1=v1;k2=v2' % TRACE_ID\n    header = TraceHeader.from_header_str(origin_header_str)\n    header_str = header.to_header_str()\n\n    assert 'k1=v1' in header_str\n    assert 'k2=v2' in header_str\n\n\ndef test_invalid_str():\n    header = TraceHeader.from_header_str('some invalid string')\n    assert header.root is None\n    assert header.parent is None\n    assert header.sampled is None\n"
  },
  {
    "path": "tests/test_traceid.py",
    "content": "import os\nimport pytest\nfrom aws_xray_sdk.core import xray_recorder\nfrom aws_xray_sdk.core.models.traceid import TraceId\n\n\n@pytest.fixture(autouse=True)\ndef cleanup():\n    \"\"\"\n    Clean up Environmental Variable for enable before and after tests\n    \"\"\"\n    if 'AWS_XRAY_NOOP_ID' in os.environ:\n        del os.environ['AWS_XRAY_NOOP_ID']\n    yield\n    if 'AWS_XRAY_NOOP_ID' in os.environ:\n        del os.environ['AWS_XRAY_NOOP_ID']\n\n\ndef test_id_format():\n    trace_id = TraceId().to_id()\n    assert len(trace_id) == 35\n\n    parts = trace_id.split(TraceId.DELIMITER)\n    assert parts[0] == '1'\n    int(parts[1], 16)\n    int(parts[2], 16)\n\n\ndef test_id_generation_default_sampling_false():\n    segment = xray_recorder.begin_segment('segment_name', sampling=False)\n\n    # Start and end a subsegment\n    subsegment = xray_recorder.begin_subsegment('subsegment_name')\n    xray_recorder.end_subsegment()\n\n    # Close the segment\n    xray_recorder.end_segment()\n\n    assert segment.id == '0000000000000000'\n    assert segment.trace_id == '1-00000000-000000000000000000000000'\n    assert subsegment.id == '0000000000000000'\n    assert subsegment.trace_id == '1-00000000-000000000000000000000000'\n    assert subsegment.parent_id == '0000000000000000'\n\n\ndef test_id_generation_default_sampling_true():\n    segment = xray_recorder.begin_segment('segment_name', sampling=True)\n\n    # Start and end a subsegment\n    subsegment = xray_recorder.begin_subsegment('subsegment_name')\n    xray_recorder.end_subsegment()\n\n    # Close the segment\n    xray_recorder.end_segment()\n\n    assert segment.id != '0000000000000000'\n    assert segment.trace_id != '1-00000000-000000000000000000000000'\n    assert subsegment.id != '0000000000000000'\n    assert subsegment.trace_id != '1-00000000-000000000000000000000000'\n    assert subsegment.parent_id != '0000000000000000'\n\n\ndef test_id_generation_noop_true():\n    os.environ['AWS_XRAY_NOOP_ID'] = 'True'\n    segment = xray_recorder.begin_segment('segment_name', sampling=False)\n\n    # Start and end a subsegment\n    subsegment = xray_recorder.begin_subsegment('subsegment_name')\n    xray_recorder.end_subsegment()\n\n    # Close the segment\n    xray_recorder.end_segment()\n\n    assert segment.id == '0000000000000000'\n    assert segment.trace_id == '1-00000000-000000000000000000000000'\n    assert subsegment.id == '0000000000000000'\n    assert subsegment.trace_id == '1-00000000-000000000000000000000000'\n    assert subsegment.parent_id == '0000000000000000'\n\n\ndef test_id_generation_noop_false():\n    os.environ['AWS_XRAY_NOOP_ID'] = 'FALSE'\n    segment = xray_recorder.begin_segment('segment_name', sampling=False)\n\n    # Start and end a subsegment\n    subsegment = xray_recorder.begin_subsegment('subsegment_name')\n    xray_recorder.end_subsegment()\n\n    # Close the segment\n    xray_recorder.end_segment()\n\n    assert segment.id != '0000000000000000'\n    assert segment.trace_id != '1-00000000-000000000000000000000000'\n    assert subsegment.id != '0000000000000000'\n    assert subsegment.trace_id != '1-00000000-000000000000000000000000'\n    assert subsegment.parent_id != '0000000000000000'\n"
  },
  {
    "path": "tests/test_utils.py",
    "content": "from aws_xray_sdk.ext.util import to_snake_case, get_hostname, strip_url, inject_trace_header\nfrom aws_xray_sdk.core.models.segment import Segment\nfrom aws_xray_sdk.core.models.subsegment import Subsegment\nfrom aws_xray_sdk.core.models.dummy_entities import DummySegment, DummySubsegment\nfrom .util import get_new_stubbed_recorder\n\nxray_recorder = get_new_stubbed_recorder()\n\nUNKNOWN_HOST = \"UNKNOWN HOST\"\n\n\ndef test_to_snake_case():\n    s1 = to_snake_case('Bucket')\n    assert s1 == 'bucket'\n\n    s2 = to_snake_case('TableName')\n    assert s2 == 'table_name'\n\n    s3 = to_snake_case('ACLName')\n    assert s3 == 'acl_name'\n\n    s4 = to_snake_case('getHTTPResponse')\n    assert s4 == 'get_http_response'\n\n\ndef test_get_hostname():\n    s1 = get_hostname(\"https://amazon.com/\")\n    assert s1 == \"amazon.com\"\n\n    s2 = get_hostname(\"https://amazon.com/avery_long/path/and/stuff\")\n    assert s2 == \"amazon.com\"\n\n    s3 = get_hostname(\"http://aws.amazon.com/should_get/sub/domains\")\n    assert s3 == \"aws.amazon.com\"\n\n    s4 = get_hostname(\"https://amazon.com/somestuff?get=request&data=chiem\")\n    assert s4 == \"amazon.com\"\n\n    s5 = get_hostname(\"INVALID_URL\")\n    assert s5 == UNKNOWN_HOST\n\n    s6 = get_hostname(\"\")\n    assert s6 == UNKNOWN_HOST\n\n    s7 = get_hostname(None)\n    assert s7 == UNKNOWN_HOST\n\n\ndef test_strip_url():\n    s1 = strip_url(\"https://amazon.com/page?getdata=response&stuff=morestuff\")\n    assert s1 == \"https://amazon.com/page\"\n\n    s2 = strip_url(\"aws.google.com/index.html?field=data&suchcool=data\")\n    assert s2 == \"aws.google.com/index.html\"\n\n    s3 = strip_url(\"INVALID_URL\")\n    assert s3 == \"INVALID_URL\"\n\n    assert strip_url(\"\") == \"\"\n    assert not strip_url(None)\n\n\ndef test_inject_trace_header_unsampled():\n    headers = {'host': 'test', 'accept': '*/*', 'connection': 'keep-alive', 'X-Amzn-Trace-Id': 'Root=1-6369739a-7d8bb07e519b795eb24d382d;Parent=089e3de743fb9e79;Sampled=1'}\n    xray_recorder = get_new_stubbed_recorder()\n    xray_recorder.configure(sampling=True)\n    segment = xray_recorder.begin_segment('name', sampling=True)\n    subsegment = xray_recorder.begin_subsegment_without_sampling('unsampled')\n\n    inject_trace_header(headers, subsegment)\n\n    assert 'Sampled=0' in headers['X-Amzn-Trace-Id']\n\ndef test_inject_trace_header_respects_parent_subsegment():\n    headers = {'host': 'test', 'accept': '*/*', 'connection': 'keep-alive', 'X-Amzn-Trace-Id': 'Root=1-6369739a-7d8bb07e519b795eb24d382d;Parent=089e3de743fb9e79;Sampled=1'}\n\n    xray_recorder = get_new_stubbed_recorder()\n    xray_recorder.configure(sampling=True)\n    segment = xray_recorder.begin_segment('name', sampling=True)\n    subsegment = xray_recorder.begin_subsegment_without_sampling('unsampled')\n    subsegment2 = xray_recorder.begin_subsegment('unsampled2')\n    inject_trace_header(headers, subsegment2)\n\n    assert 'Sampled=0' in headers['X-Amzn-Trace-Id']\n\ndef test_inject_trace_header_sampled():\n    headers = {'host': 'test', 'accept': '*/*', 'connection': 'keep-alive', 'X-Amzn-Trace-Id': 'Root=1-6369739a-7d8bb07e519b795eb24d382d;Parent=089e3de743fb9e79;Sampled=1'}\n    xray_recorder = get_new_stubbed_recorder()\n    xray_recorder.configure(sampling=True)\n    segment = xray_recorder.begin_segment('name')\n    subsegment = xray_recorder.begin_subsegment('unsampled')\n\n    inject_trace_header(headers, subsegment)\n\n    assert 'Sampled=1' in headers['X-Amzn-Trace-Id']"
  },
  {
    "path": "tests/test_wildcard_match.py",
    "content": "from aws_xray_sdk.core.utils.search_pattern import wildcard_match\n\n\ndef test_match_exact_positive():\n    pat = 'foo'\n    bar = 'foo'\n    assert wildcard_match(pat, bar)\n\n\ndef test_match_exact_negative():\n    pat = 'foo'\n    bar = 'cat'\n    assert not wildcard_match(pat, bar)\n\n\ndef test_single_wildcard_positive():\n    pat = 'fo?'\n    bar = 'foo'\n    assert wildcard_match(pat, bar)\n\n\ndef test_single_wildcard_negative():\n    pat = 'f?o'\n    bar = 'boo'\n    assert not wildcard_match(pat, bar)\n\n\ndef test_multiple_wildcard_positive():\n    pat = '?o?'\n    bar = 'foo'\n    assert wildcard_match(pat, bar)\n\n\ndef test_multiple_wildcard_negative():\n    pat = 'f??'\n    bar = 'boo'\n    assert not wildcard_match(pat, bar)\n\n\ndef test_glob_positive_zero_or_more():\n    pat = 'foo*'\n    bar = 'foo'\n    assert wildcard_match(pat, bar)\n\n\ndef test_glob_negative_zero_or_more():\n    pat = 'foo*'\n    bar = 'fo0'\n    assert not wildcard_match(pat, bar)\n\n\ndef test_glob_negative():\n    pat = 'fo*'\n    bar = 'boo'\n    assert not wildcard_match(pat, bar)\n\n\ndef test_glob_and_single_positive():\n    pat = '*o?'\n    bar = 'foo'\n    assert wildcard_match(pat, bar)\n\n\ndef test_glob_and_single_negative():\n    pat = 'f?*'\n    bar = 'boo'\n    assert not wildcard_match(pat, bar)\n\n\ndef test_pure_wildcard():\n    pat = '*'\n    bar = 'foo'\n    assert wildcard_match(pat, bar)\n\n\ndef test_exact_match():\n    pat = '6573459'\n    bar = '6573459'\n    assert wildcard_match(pat, bar)\n\n\ndef test_misc():\n    animal1 = '?at'\n    animal2 = '?o?se'\n    animal3 = '*s'\n\n    vehicle1 = 'J*'\n    vehicle2 = '????'\n\n    assert wildcard_match(animal1, 'bat')\n    assert wildcard_match(animal1, 'cat')\n    assert wildcard_match(animal2, 'horse')\n    assert wildcard_match(animal2, 'mouse')\n    assert wildcard_match(animal3, 'dogs')\n    assert wildcard_match(animal3, 'horses')\n\n    assert wildcard_match(vehicle1, 'Jeep')\n    assert wildcard_match(vehicle2, 'ford')\n    assert not wildcard_match(vehicle2, 'chevy')\n    assert wildcard_match('*', 'cAr')\n\n    assert wildcard_match('*/foo', '/bar/foo')\n\n\ndef test_case_insensitivity():\n    assert wildcard_match('Foo', 'Foo', False)\n    assert wildcard_match('Foo', 'Foo', True)\n\n    assert not wildcard_match('Foo', 'FOO', False)\n    assert wildcard_match('Foo', 'FOO', True)\n\n    assert wildcard_match('Fo*', 'Foo0', False)\n    assert wildcard_match('Fo*', 'Foo0', True)\n\n    assert not wildcard_match('Fo*', 'FOo0', False)\n    assert wildcard_match('Fo*', 'FOo0', True)\n\n    assert wildcard_match('Fo?', 'Foo', False)\n    assert wildcard_match('Fo?', 'Foo', True)\n\n    assert not wildcard_match('Fo?', 'FOo', False)\n    assert wildcard_match('Fo?', 'FoO', False)\n    assert wildcard_match('Fo?', 'FOO', True)\n\n\ndef test_no_globs():\n    assert not wildcard_match('abcd', 'abc')\n\n\ndef test_edge_case_globs():\n    assert wildcard_match('', '')\n    assert wildcard_match('a', 'a')\n    assert wildcard_match('*a', 'a')\n    assert wildcard_match('*a', 'ba')\n    assert wildcard_match('a*', 'a')\n    assert wildcard_match('a*', 'ab')\n    assert wildcard_match('a*a', 'aa')\n    assert wildcard_match('a*a', 'aba')\n    assert wildcard_match('a*a', 'aaa')\n    assert wildcard_match('a*a*', 'aa')\n    assert wildcard_match('a*a*', 'aba')\n    assert wildcard_match('a*a*', 'aaa')\n    assert wildcard_match('a*a*', 'aaaaaaaaaaaaaaaaaaaaaaaaaa')\n    assert wildcard_match('a*b*a*b*a*b*a*b*a*',\n                          'akljd9gsdfbkjhaabajkhbbyiaahkjbjhbuykjakjhabkjhbabjhkaabbabbaaakljdfsjklababkjbsdabab')\n    assert not wildcard_match('a*na*ha', 'anananahahanahana')\n\n\ndef test_multi_globs():\n    assert wildcard_match('*a', 'a')\n    assert wildcard_match('**a', 'a')\n    assert wildcard_match('***a', 'a')\n    assert wildcard_match('**a*', 'a')\n    assert wildcard_match('**a**', 'a')\n\n    assert wildcard_match('a**b', 'ab')\n    assert wildcard_match('a**b', 'abb')\n\n    assert wildcard_match('*?', 'a')\n    assert wildcard_match('*?', 'aa')\n    assert wildcard_match('*??', 'aa')\n    assert not wildcard_match('*???', 'aa')\n    assert wildcard_match('*?', 'aaa')\n\n    assert wildcard_match('?', 'a')\n    assert not wildcard_match('??', 'a')\n\n    assert wildcard_match('?*', 'a')\n    assert wildcard_match('*?', 'a')\n    assert not wildcard_match('?*?', 'a')\n    assert wildcard_match('?*?', 'aa')\n    assert wildcard_match('*?*', 'a')\n\n    assert not wildcard_match('*?*a', 'a')\n    assert wildcard_match('*?*a*', 'ba')\n"
  },
  {
    "path": "tests/util.py",
    "content": "import json\nimport threading\n\nfrom aws_xray_sdk.core.recorder import AWSXRayRecorder\nfrom aws_xray_sdk.core.emitters.udp_emitter import UDPEmitter\nfrom aws_xray_sdk.core.sampling.sampler import DefaultSampler\nfrom aws_xray_sdk.core.utils.conversion import metadata_to_dict\n\n\nclass CircularReferenceClass:\n    \"\"\"Test class that can create circular references\"\"\"\n    def __init__(self, name):\n        self.name = name\n        self.ref = None\n\n\nclass StubbedEmitter(UDPEmitter):\n\n    def __init__(self, daemon_address='127.0.0.1:2000'):\n        super().__init__(daemon_address)\n        self._local = threading.local()\n\n    def send_entity(self, entity):\n        setattr(self._local, 'cache', entity)\n\n    def pop(self):\n        if hasattr(self._local, 'cache'):\n            entity = self._local.cache\n        else:\n            entity = None\n\n        self._local.__dict__.clear()\n        return entity\n\n\nclass StubbedSampler(DefaultSampler):\n\n    def start(self):\n        pass\n\n\ndef get_new_stubbed_recorder():\n    \"\"\"\n    Returns a new AWSXRayRecorder object with emitter stubbed\n    \"\"\"\n    from aws_xray_sdk.core.async_recorder import AsyncAWSXRayRecorder\n\n    recorder = AsyncAWSXRayRecorder()\n    recorder.configure(emitter=StubbedEmitter(), sampler=StubbedSampler())\n    return recorder\n\n\ndef entity_to_dict(trace_entity):\n\n    raw = json.loads(trace_entity.serialize())\n    return raw\n\n\ndef _search_entity(entity, name):\n    \"\"\"Helper function to that recursivly looks at subentities\n    Returns a serialized entity that matches the name given or None\"\"\"\n    if 'name' in entity:\n        my_name = entity['name']\n        if my_name == name:\n            return entity\n        else:\n            if \"subsegments\" in entity:\n                for s in entity['subsegments']:\n                    result = _search_entity(s, name)\n                    if result is not None:\n                        return result\n    return None\n\n\ndef find_subsegment(segment, name):\n    \"\"\"Helper function to find a subsegment by name in the entity tree\"\"\"\n    segment = entity_to_dict(segment)\n    for entity in segment['subsegments']:\n        result = _search_entity(entity, name)\n        if result is not None:\n            return result\n    return None\n\n\ndef find_subsegment_by_annotation(segment, key, value):\n    \"\"\"Helper function to find a subsegment by annoation key & value in the entity tree\"\"\"\n    segment = entity_to_dict(segment)\n    for entity in segment['subsegments']:\n        result = _search_entity_by_annotation(entity, key, value)\n        if result is not None:\n            return result\n    return None\n\n\ndef _search_entity_by_annotation(entity, key, value):\n    \"\"\"Helper function to that recursivly looks at subentities\n    Returns a serialized entity that matches the annoation key & value given or None\"\"\"\n    if 'annotations' in entity:\n        if key in entity['annotations']:\n            my_value = entity['annotations'][key]\n            if my_value == value:\n                return entity\n        else:\n            if \"subsegments\" in entity:\n                for s in entity['subsegments']:\n                    result = _search_entity_by_annotation(s, key, value)\n                    if result is not None:\n                        return result\n    return None\n\n\ndef test_metadata_to_dict_self_reference():\n    \"\"\"Test that self-referencing objects don't cause stack overflow\"\"\"\n    obj = CircularReferenceClass(\"self_ref\")\n    obj.ref = obj  # Self reference\n    \n    # This should not cause stack overflow\n    result = metadata_to_dict(obj)\n    \n    # The function should handle the self reference gracefully\n    assert isinstance(result, dict)\n"
  },
  {
    "path": "tox-distributioncheck.ini",
    "content": "[tox]\nskipsdist = true\n\n[testenv:distribution-check]\ndeps =\n    pytest > 5.2.0\n    aws-xray-sdk\ncommands =\n    pytest tests/distributioncheck\n"
  },
  {
    "path": "tox.ini",
    "content": "[tox]\nskip_missing_interpreters = True\nenvlist =\n    py{37,38,39,310,311,312}-core\n\n    py{37,38,39,310,311,312}-ext-aiobotocore\n\n    py{37,38,39,310,311,312}-ext-aiohttp\n\n    py{37,38,39,310,311,312}-ext-botocore\n\n    py{37,38,39,310,311,312}-ext-bottle\n\n    py{37,38,39}-ext-django-2\n\n    py{37,38,39,310}-ext-django-3\n\n    ; Django4 is only for python 3.8+\n    py{38,39,310,311,312}-ext-django-4\n\n    py{37,38,39,310,311,312}-ext-flask\n\n    py{37,38,39,310,311,312}-ext-flask_sqlalchemy\n\n    py{37,38,39,310,311,312}-ext-httplib\n\n    py{37,38,39,310,311,312}-ext-httpx\n\n    py{37,38,39,310,311,312}-ext-pg8000\n\n    py{37,38,39,310,311,312}-ext-psycopg2\n\n    py{37,38,39,310,311}-ext-psycopg\n    \n    py{37,38,39,310,311,312}-ext-pymysql\n\n    py{37,38,39,310,311,312}-ext-pynamodb\n\n    py{37,38,39,310,311,312}-ext-requests\n\n    py{37,38,39,310,311,312}-ext-sqlalchemy\n\n    py{37,38,39,310,311,312}-ext-sqlalchemy_core\n\n    py{37,38,39,310,311,312}-ext-sqlite3\n\n[testenv]\npassenv = TOXENV,CI,CODECOV_*\n\ndeps =\n    ; Testing packages\n    pytest > 3.0.0, < 8.0.0\n    pytest-benchmark\n    coverage == 7.2.7\n    codecov\n\n    ; Packages common to all test environments\n    wrapt\n\n    ; Python 3.5+ only deps\n    py{37,38,39,310,311,312}: pytest-asyncio == 0.21.2\n\n    ; For pkg_resources\n    py{37,38,39,310,311,312}: setuptools\n\n    ext-aiobotocore: aiobotocore >= 0.10.0\n    ext-aiobotocore: pytest-asyncio\n\n    ext-aiohttp: aiohttp >= 3.3.0\n    ext-aiohttp: pytest-aiohttp < 1.1.0\n\n    ext-httpx: httpx >= 0.20\n    ext-httpx: pytest-asyncio >= 0.19\n\n    ext-requests: requests\n\n    ext-bottle: bottle >= 0.10\n    ext-bottle: webtest\n\n    ext-flask: flask >= 0.10\n\n    ext-flask_sqlalchemy: flask >= 0.10,<3.0.0\n    ext-flask_sqlalchemy: Flask-SQLAlchemy <= 2.5.1\n    ext-flask_sqlalchemy: sqlalchemy >=1.0.0,<2.0.0\n\n    ext-sqlalchemy: sqlalchemy >=1.0.0,<2.0.0\n\n    ext-sqlalchemy_core: sqlalchemy >=1.0.0,<2.0.0\n    ext-sqlalchemy_core: testing.postgresql\n    ext-sqlalchemy_core: psycopg2\n    ext-sqlalchemy_core: pymysql >= 1.0.0\n    ext-sqlalchemy_core: cryptography\n\n    ext-django-2: Django >=2.0,<3.0\n    ext-django-3: Django >=3.0,<4.0\n    ext-django-4: Django >=4.0,<5.0\n    ext-django: django-fake-model\n\n    py{37,38,39,310,311,312}-ext-pynamodb: pynamodb >=3.3.1,<6.0.0\n\n    ext-psycopg2: psycopg2\n    ext-psycopg2: testing.postgresql\n\n    ext-psycopg: psycopg\n    ext-psycopg: psycopg[pool]\n    ext-psycopg: testing.postgresql\n\n    ext-pg8000: pg8000 <= 1.20.0\n    ext-pg8000: testing.postgresql\n\n    py{37,38,39,310,311,312}-ext-pymysql: pymysql >= 1.0.0\n    py{37,38,39,310,311,312}-ext-pymysql: cryptography\n\nsetenv =\n    DJANGO_SETTINGS_MODULE = tests.ext.django.app.settings\n    AWS_SECRET_ACCESS_KEY = fake_key\n    AWS_ACCESS_KEY_ID=fake_id\n\ncommands =\n    coverage erase\n\n    py{37,38,39,310,311,312}-core: coverage run --append --source aws_xray_sdk -m pytest --ignore tests/ext {posargs}\n\n    ext-aiobotocore: coverage run --append --source aws_xray_sdk -m pytest tests/ext/aiobotocore {posargs}\n\n    ext-aiohttp: coverage run --append --source aws_xray_sdk -m pytest tests/ext/aiohttp {posargs}\n\n    ext-botocore: coverage run --append --source aws_xray_sdk -m pytest tests/ext/botocore {posargs}\n\n    ext-bottle: coverage run --append --source aws_xray_sdk -m pytest tests/ext/bottle {posargs}\n\n    ext-django: coverage run --append --source aws_xray_sdk -m pytest tests/ext/django {posargs}\n\n    ext-flask: coverage run --append --source aws_xray_sdk -m pytest tests/ext/flask {posargs}\n\n    ext-flask_sqlalchemy: coverage run --append --source aws_xray_sdk -m pytest tests/ext/flask_sqlalchemy {posargs}\n\n    ext-httplib: coverage run --append --source aws_xray_sdk -m pytest tests/ext/httplib {posargs}\n\n    ext-httpx: coverage run --append --source aws_xray_sdk -m pytest tests/ext/httpx {posargs}\n\n    ext-pg8000: coverage run --append --source aws_xray_sdk -m pytest tests/ext/pg8000 {posargs}\n\n    ext-psycopg2: coverage run --append --source aws_xray_sdk -m pytest tests/ext/psycopg2 {posargs}\n    \n    ext-psycopg: coverage run --append --source aws_xray_sdk -m pytest tests/ext/psycopg {posargs}\n\n    ext-pymysql: coverage run --append --source aws_xray_sdk -m pytest tests/ext/pymysql {posargs}\n\n    ext-pynamodb: coverage run --append --source aws_xray_sdk -m pytest tests/ext/pynamodb {posargs}\n\n    ext-requests: coverage run --append --source aws_xray_sdk -m pytest tests/ext/requests {posargs}\n\n    ext-sqlalchemy: coverage run --append --source aws_xray_sdk -m pytest tests/ext/sqlalchemy {posargs}\n\n    py{37,38,39,310,311,312}-ext-sqlalchemy_core: coverage run --append --source aws_xray_sdk -m pytest tests/ext/sqlalchemy_core {posargs}\n\n    ext-sqlite3: coverage run --append --source aws_xray_sdk -m pytest tests/ext/sqlite3 {posargs}\n\n    ; TODO: add additional logic to combine coverage from \"core\" and \"ext\" test runs\n    ; codecov\n"
  }
]