Showing preview only (638K chars total). Download the full file or copy to clipboard to get everything.
Repository: aws/aws-xray-sdk-python
Branch: master
Commit: 48b6a8f2bb13
Files: 261
Total size: 574.9 KB
Directory structure:
gitextract_js1ahssn/
├── .github/
│ ├── CODEOWNERS
│ ├── PULL_REQUEST_TEMPLATE.md
│ ├── dependency-check-suppressions.xml
│ ├── stale.yml
│ ├── trivy/
│ │ └── daily-scan.trivyignore.yaml
│ └── workflows/
│ ├── IntegrationTesting.yaml
│ ├── Release.yaml
│ ├── UnitTesting.yaml
│ ├── continuous-monitoring.yml
│ └── daily-scan.yml
├── .gitignore
├── CHANGELOG.rst
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── LICENSE
├── MANIFEST.in
├── NOTICE
├── README.md
├── __init__.py
├── aws_xray_sdk/
│ ├── __init__.py
│ ├── core/
│ │ ├── __init__.py
│ │ ├── async_context.py
│ │ ├── async_recorder.py
│ │ ├── context.py
│ │ ├── daemon_config.py
│ │ ├── emitters/
│ │ │ ├── __init__.py
│ │ │ └── udp_emitter.py
│ │ ├── exceptions/
│ │ │ ├── __init__.py
│ │ │ └── exceptions.py
│ │ ├── lambda_launcher.py
│ │ ├── models/
│ │ │ ├── __init__.py
│ │ │ ├── default_dynamic_naming.py
│ │ │ ├── dummy_entities.py
│ │ │ ├── entity.py
│ │ │ ├── facade_segment.py
│ │ │ ├── http.py
│ │ │ ├── noop_traceid.py
│ │ │ ├── segment.py
│ │ │ ├── subsegment.py
│ │ │ ├── throwable.py
│ │ │ ├── trace_header.py
│ │ │ └── traceid.py
│ │ ├── patcher.py
│ │ ├── plugins/
│ │ │ ├── __init__.py
│ │ │ ├── ec2_plugin.py
│ │ │ ├── ecs_plugin.py
│ │ │ ├── elasticbeanstalk_plugin.py
│ │ │ └── utils.py
│ │ ├── recorder.py
│ │ ├── sampling/
│ │ │ ├── __init__.py
│ │ │ ├── connector.py
│ │ │ ├── local/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── reservoir.py
│ │ │ │ ├── sampler.py
│ │ │ │ ├── sampling_rule.json
│ │ │ │ └── sampling_rule.py
│ │ │ ├── reservoir.py
│ │ │ ├── rule_cache.py
│ │ │ ├── rule_poller.py
│ │ │ ├── sampler.py
│ │ │ ├── sampling_rule.py
│ │ │ └── target_poller.py
│ │ ├── streaming/
│ │ │ ├── __init__.py
│ │ │ └── default_streaming.py
│ │ └── utils/
│ │ ├── __init__.py
│ │ ├── atomic_counter.py
│ │ ├── compat.py
│ │ ├── conversion.py
│ │ ├── search_pattern.py
│ │ ├── sqs_message_helper.py
│ │ └── stacktrace.py
│ ├── ext/
│ │ ├── __init__.py
│ │ ├── aiobotocore/
│ │ │ ├── __init__.py
│ │ │ └── patch.py
│ │ ├── aiohttp/
│ │ │ ├── __init__.py
│ │ │ ├── client.py
│ │ │ └── middleware.py
│ │ ├── boto_utils.py
│ │ ├── botocore/
│ │ │ ├── __init__.py
│ │ │ └── patch.py
│ │ ├── bottle/
│ │ │ ├── __init__.py
│ │ │ └── middleware.py
│ │ ├── dbapi2.py
│ │ ├── django/
│ │ │ ├── __init__.py
│ │ │ ├── apps.py
│ │ │ ├── conf.py
│ │ │ ├── db.py
│ │ │ ├── middleware.py
│ │ │ └── templates.py
│ │ ├── flask/
│ │ │ ├── __init__.py
│ │ │ └── middleware.py
│ │ ├── flask_sqlalchemy/
│ │ │ ├── __init__.py
│ │ │ └── query.py
│ │ ├── httplib/
│ │ │ ├── __init__.py
│ │ │ └── patch.py
│ │ ├── httpx/
│ │ │ ├── __init__.py
│ │ │ └── patch.py
│ │ ├── mysql/
│ │ │ ├── __init__.py
│ │ │ └── patch.py
│ │ ├── pg8000/
│ │ │ ├── README.md
│ │ │ ├── __init__.py
│ │ │ └── patch.py
│ │ ├── psycopg/
│ │ │ ├── __init__.py
│ │ │ └── patch.py
│ │ ├── psycopg2/
│ │ │ ├── __init__.py
│ │ │ └── patch.py
│ │ ├── pymongo/
│ │ │ ├── __init__.py
│ │ │ └── patch.py
│ │ ├── pymysql/
│ │ │ ├── __init__.py
│ │ │ └── patch.py
│ │ ├── pynamodb/
│ │ │ ├── __init__.py
│ │ │ └── patch.py
│ │ ├── requests/
│ │ │ ├── __init__.py
│ │ │ └── patch.py
│ │ ├── resources/
│ │ │ └── aws_para_whitelist.json
│ │ ├── sqlalchemy/
│ │ │ ├── __init__.py
│ │ │ ├── query.py
│ │ │ └── util/
│ │ │ ├── __init__.py
│ │ │ └── decorators.py
│ │ ├── sqlalchemy_core/
│ │ │ ├── __init__.py
│ │ │ └── patch.py
│ │ ├── sqlite3/
│ │ │ ├── __init__.py
│ │ │ └── patch.py
│ │ └── util.py
│ ├── sdk_config.py
│ └── version.py
├── docs/
│ ├── .gitignore
│ ├── Makefile
│ ├── _templates/
│ │ └── layout.html
│ ├── aws_xray_sdk.core.emitters.rst
│ ├── aws_xray_sdk.core.exceptions.rst
│ ├── aws_xray_sdk.core.models.rst
│ ├── aws_xray_sdk.core.plugins.rst
│ ├── aws_xray_sdk.core.rst
│ ├── aws_xray_sdk.core.sampling.rst
│ ├── aws_xray_sdk.core.streaming.rst
│ ├── aws_xray_sdk.core.utils.rst
│ ├── aws_xray_sdk.ext.aiobotocore.rst
│ ├── aws_xray_sdk.ext.aiohttp.rst
│ ├── aws_xray_sdk.ext.botocore.rst
│ ├── aws_xray_sdk.ext.django.rst
│ ├── aws_xray_sdk.ext.flask.rst
│ ├── aws_xray_sdk.ext.flask_sqlalchemy.rst
│ ├── aws_xray_sdk.ext.httplib.rst
│ ├── aws_xray_sdk.ext.httpx.rst
│ ├── aws_xray_sdk.ext.mysql.rst
│ ├── aws_xray_sdk.ext.pg8000.rst
│ ├── aws_xray_sdk.ext.psycopg2.rst
│ ├── aws_xray_sdk.ext.pymongo.rst
│ ├── aws_xray_sdk.ext.pymysql.rst
│ ├── aws_xray_sdk.ext.pynamodb.rst
│ ├── aws_xray_sdk.ext.requests.rst
│ ├── aws_xray_sdk.ext.rst
│ ├── aws_xray_sdk.ext.sqlalchemy.rst
│ ├── aws_xray_sdk.ext.sqlalchemy.util.rst
│ ├── aws_xray_sdk.ext.sqlalchemy_core.rst
│ ├── aws_xray_sdk.ext.sqlite3.rst
│ ├── aws_xray_sdk.rst
│ ├── basic.rst
│ ├── changes.rst
│ ├── conf.py
│ ├── configurations.rst
│ ├── frameworks.rst
│ ├── index.rst
│ ├── license.rst
│ ├── make.bat
│ ├── modules.rst
│ └── thirdparty.rst
├── sample-apps/
│ ├── LICENSE
│ └── flask/
│ ├── Dockerfile
│ ├── application.py
│ └── requirements.txt
├── setup.cfg
├── setup.py
├── terraform/
│ ├── eb.tf
│ ├── fixtures.us-west-2.tfvars
│ └── variables.tf
├── tests/
│ ├── __init__.py
│ ├── distributioncheck/
│ │ ├── __init__.py
│ │ └── test_sanity.py
│ ├── ext/
│ │ ├── __init__.py
│ │ ├── aiobotocore/
│ │ │ ├── __init__.py
│ │ │ └── test_aiobotocore.py
│ │ ├── aiohttp/
│ │ │ ├── __init__.py
│ │ │ ├── test_client.py
│ │ │ └── test_middleware.py
│ │ ├── botocore/
│ │ │ ├── __init__.py
│ │ │ └── test_botocore.py
│ │ ├── bottle/
│ │ │ ├── __init__.py
│ │ │ ├── test_bottle.py
│ │ │ └── views/
│ │ │ └── index.tpl
│ │ ├── django/
│ │ │ ├── __init__.py
│ │ │ ├── app/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── settings.py
│ │ │ │ ├── templates/
│ │ │ │ │ ├── block.html
│ │ │ │ │ ├── block_user.html
│ │ │ │ │ └── index.html
│ │ │ │ └── views.py
│ │ │ ├── test_db.py
│ │ │ ├── test_middleware.py
│ │ │ └── test_settings.py
│ │ ├── flask/
│ │ │ ├── __init__.py
│ │ │ └── test_flask.py
│ │ ├── flask_sqlalchemy/
│ │ │ ├── __init__.py
│ │ │ └── test_query.py
│ │ ├── httplib/
│ │ │ ├── __init__.py
│ │ │ └── test_httplib.py
│ │ ├── httpx/
│ │ │ ├── __init__.py
│ │ │ ├── test_httpx.py
│ │ │ └── test_httpx_async.py
│ │ ├── pg8000/
│ │ │ ├── __init__.py
│ │ │ └── test_pg8000.py
│ │ ├── psycopg/
│ │ │ ├── __init__.py
│ │ │ └── test_psycopg.py
│ │ ├── psycopg2/
│ │ │ ├── __init__.py
│ │ │ └── test_psycopg2.py
│ │ ├── pymysql/
│ │ │ ├── __init__.py
│ │ │ └── test_pymysql.py
│ │ ├── pynamodb/
│ │ │ ├── __init__.py
│ │ │ └── test_pynamodb.py
│ │ ├── requests/
│ │ │ ├── __init__.py
│ │ │ └── test_requests.py
│ │ ├── sqlalchemy/
│ │ │ ├── __init__.py
│ │ │ └── test_query.py
│ │ ├── sqlalchemy_core/
│ │ │ ├── __init__.py
│ │ │ ├── test_base.py
│ │ │ ├── test_dburl.py
│ │ │ ├── test_postgres.py
│ │ │ ├── test_sqlalchemy_core.py
│ │ │ └── test_sqlalchemy_core_2.py
│ │ └── sqlite3/
│ │ ├── __init__.py
│ │ └── test_sqlite3.py
│ ├── mock_module/
│ │ ├── __init__.py
│ │ ├── mock_file.py
│ │ └── mock_submodule/
│ │ ├── __init__.py
│ │ └── mock_subfile.py
│ ├── mock_sampling_rule.json
│ ├── test_async_local_storage.py
│ ├── test_async_recorder.py
│ ├── test_daemon_config.py
│ ├── test_dummy_entites.py
│ ├── test_facade_segment.py
│ ├── test_lambda_context.py
│ ├── test_local_sampling.py
│ ├── test_local_sampling_benchmark.py
│ ├── test_patcher.py
│ ├── test_plugins.py
│ ├── test_recorder.py
│ ├── test_sampling_rule_cache.py
│ ├── test_sdk_config.py
│ ├── test_serialize_entities.py
│ ├── test_sqs_message_helper.py
│ ├── test_throwable.py
│ ├── test_trace_entities.py
│ ├── test_trace_header.py
│ ├── test_traceid.py
│ ├── test_utils.py
│ ├── test_wildcard_match.py
│ └── util.py
├── tox-distributioncheck.ini
└── tox.ini
================================================
FILE CONTENTS
================================================
================================================
FILE: .github/CODEOWNERS
================================================
#####################################################
#
# List of approvers for this repository
#
#####################################################
#
# Learn about CODEOWNERS file format:
# https://help.github.com/en/articles/about-code-owners
#
* @aws/aws-x-ray
================================================
FILE: .github/PULL_REQUEST_TEMPLATE.md
================================================
*Issue #, if available:*
*Description of changes:*
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
================================================
FILE: .github/dependency-check-suppressions.xml
================================================
<?xml version="1.0" encoding="UTF-8"?>
<suppressions xmlns="https://jeremylong.github.io/DependencyCheck/dependency-suppression.1.3.xsd">
</suppressions>
================================================
FILE: .github/stale.yml
================================================
# Number of days of inactivity before an issue becomes stale
daysUntilStale: 30
# Number of days of inactivity before a stale issue is closed
daysUntilClose: 7
# Limit to only `issues` or `pulls`
only: issues
# Issues with these labels will never be considered stale
exemptLabels:
- pinned
- bug
- enhancement
- feature-request
- help wanted
- work-in-progress
- pending release
# Label to use when marking an issue as stale
staleLabel: stale
# Comment to post when marking an issue as stale. Set to `false` to disable
markComment: >
This issue has been automatically marked as stale because it has not had
recent activity. It will be closed if no further activity occurs in next 7 days. Thank you
for your contributions.
# Comment to post when closing a stale issue. Set to `false` to disable
closeComment: false
================================================
FILE: .github/trivy/daily-scan.trivyignore.yaml
================================================
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
# Trivy ignore file for daily scans.
# This file is intentionally empty. Daily scans should flag all CVEs.
# See: https://aquasecurity.github.io/trivy/latest/docs/configuration/filtering/
# Format:
# - id: <CVE-###>
# statement: "<Why are we excluding?> <link to CVE where we can track status>"
# expired_at: <required - YYYY-MM-DD>
vulnerabilities: []
================================================
FILE: .github/workflows/IntegrationTesting.yaml
================================================
name: Integration Testing
on:
push:
branches:
- master
permissions:
id-token: write
contents: read
jobs:
build_SDK:
name: Build X-Ray Python SDK
runs-on: ubuntu-latest
steps:
- name: Pull in source code from aws-xray-sdk-python Github repository
uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 #v3.6.0
- name: Setup python
uses: actions/setup-python@7f4fc3e22c37d6ff65e88745f38bd3157c663f7c #v4.9.1
with:
python-version: '3.8'
- name: Build X-Ray Python SDK
run: python setup.py sdist
- name: Upload SDK build artifact
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 #v4.6.2
with:
name: sdk-build-artifact
path: .
build_WebApp:
name: Build Web Application
needs: build_SDK
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 #v3.6.0
- name: Setup python
uses: actions/setup-python@7f4fc3e22c37d6ff65e88745f38bd3157c663f7c #v4.9.1
with:
python-version: '3.8'
- name: Download X-Ray SDK build artifact
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 #v4.3.0
with:
name: sdk-build-artifact
path: ./sample-apps/flask
- name: Build WebApp with X-Ray Python SDK
run: pip3 install . -t .
working-directory: ./sample-apps/flask
- name: Zip up the deployment package
run: zip -r deploy.zip . -x '*.git*'
working-directory: ./sample-apps/flask
- name: Upload WebApp with X-Ray SDK build artifact
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 #v4.6.2
with:
name: sdk-flask-build-artifact
path: ./sample-apps/flask/deploy.zip
deploy_WebApp:
name: Deploy Web Application
needs: build_WebApp
runs-on: ubuntu-latest
steps:
- name: Checkout X-Ray SDK to get terraform source
uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 #v3.6.0
- name: Download WebApp with X-Ray SDK build artifact
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 #v4.3.0
with:
name: sdk-flask-build-artifact
- name: Copy deployment package to terraform directory
run: cp deploy.zip ./terraform
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@7474bc4690e29a8392af63c5b98e7449536d5c3a #v4.3.1
with:
role-to-assume: ${{ secrets.AWS_INTEG_TEST_ROLE_ARN }}
aws-region: us-west-2
- name: Setup Terraform
uses: hashicorp/setup-terraform@633666f66e0061ca3b725c73b2ec20cd13a8fdd1 #v2.0.3
- name: Terraform Init
run: terraform init
working-directory: ./terraform
- name: Terraform Validate
run: terraform validate -no-color
working-directory: ./terraform
- name: Terraform Plan
run: terraform plan -var-file="fixtures.us-west-2.tfvars" -no-color
env:
TF_VAR_resource_prefix: '${{ github.run_id }}-${{ github.run_number }}'
continue-on-error: true
working-directory: ./terraform
- name: Terraform Apply
run: terraform apply -var-file="fixtures.us-west-2.tfvars" -auto-approve
env:
TF_VAR_resource_prefix: '${{ github.run_id }}-${{ github.run_number }}'
working-directory: ./terraform
- name: Upload terraform state files for destorying resources
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 #v4.6.2
with:
name: terraform-state-artifact
path: ./terraform
test_WebApp:
name: Test WebApp
needs: deploy_WebApp
runs-on: ubuntu-latest
steps:
- uses: actions/setup-java@17f84c3641ba7b8f6deff6309fc4c864478f5d62 #v3.14.1
with:
distribution: 'zulu'
java-version: 14
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@7474bc4690e29a8392af63c5b98e7449536d5c3a #v4.3.1
with:
role-to-assume: ${{ secrets.AWS_INTEG_TEST_ROLE_ARN }}
aws-region: us-west-2
- name: Checkout test framework
uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 #v3.6.0
with:
repository: aws-observability/aws-otel-test-framework
ref: terraform
- name: Run testing suite
run: ./gradlew :validator:run --args='-c default-xray-trace-validation.yml --endpoint http://${{ github.run_id }}-${{ github.run_number }}-eb-app-env.us-west-2.elasticbeanstalk.com'
cleanup:
name: Resource tear down
needs: test_WebApp
if: always()
runs-on: ubuntu-latest
steps:
- name: Download terraform state artifact
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 #v4.3.0
with:
name: terraform-state-artifact
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@7474bc4690e29a8392af63c5b98e7449536d5c3a #v4.3.1
with:
role-to-assume: ${{ secrets.AWS_INTEG_TEST_ROLE_ARN }}
aws-region: us-west-2
- name: Setup Terraform
uses: hashicorp/setup-terraform@633666f66e0061ca3b725c73b2ec20cd13a8fdd1 #v2.0.3
- name: Terraform Init
run: terraform init
- name: set permissions to terraform plugins
run: chmod -R a+x .terraform/*
- name: Destroy resources
run: terraform destroy -state="terraform.tfstate" -var-file="fixtures.us-west-2.tfvars" -auto-approve
env:
TF_VAR_resource_prefix: '${{ github.run_id }}-${{ github.run_number }}'
================================================
FILE: .github/workflows/Release.yaml
================================================
name: Release X-Ray Python SDK
on:
workflow_dispatch:
inputs:
version:
description: The version to tag the release with, e.g., 1.2.0, 1.3.0
required: true
jobs:
release:
permissions:
contents: write
runs-on: ubuntu-latest
steps:
- name: Checkout master branch
uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 #v3.6.0
- name: Create Release
id: create_release
uses: actions/create-release@0cb9c9b65d5d1901c1f53e5e66eaf4afd303e70e #v1.1.4
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
tag_name: '${{ github.event.inputs.version }}'
release_name: '${{ github.event.inputs.version }} Release'
body: 'See details in [CHANGELOG](https://github.com/aws/aws-xray-sdk-python/blob/master/CHANGELOG.rst)'
draft: true
prerelease: false
================================================
FILE: .github/workflows/UnitTesting.yaml
================================================
name: Unit Testing
permissions:
contents: read
on:
push:
branches:
- master
pull_request:
branches:
- master
jobs:
test:
runs-on: ubuntu-22.04
env:
py37: 3.7
py38: 3.8
py39: 3.9
py310: '3.10'
py311: '3.11'
py312: '3.12'
DB_DATABASE: test_db
DB_USER: root
DB_PASSWORD: root
strategy:
fail-fast: false
matrix:
python-version: [py37, py38, py39, py310, py311, py312]
testenv: [core, ext]
steps:
- name: Checkout repo
uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 #v3.6.0
- name: Start MySQL
if: ${{ matrix.testenv == 'ext' }}
run: |
sudo /etc/init.d/mysql start
mysql -e 'CREATE DATABASE ${{ env.DB_DATABASE }};' -u${{ env.DB_USER }} -p${{ env.DB_PASSWORD }}
mysql -e 'CREATE DATABASE test_dburl;' -u${{ env.DB_USER }} -p${{ env.DB_PASSWORD }}
mysql -e "CREATE USER test_dburl_user@localhost IDENTIFIED BY 'test]password';" -u${{ env.DB_USER }} -p${{ env.DB_PASSWORD }}
mysql -e "GRANT ALL PRIVILEGES ON test_dburl.* TO test_dburl_user@localhost;" -u${{ env.DB_USER }} -p${{ env.DB_PASSWORD }}
mysql -e "FLUSH PRIVILEGES;" -u${{ env.DB_USER }} -p${{ env.DB_PASSWORD }}
- name: Setup Python
uses: actions/setup-python@7f4fc3e22c37d6ff65e88745f38bd3157c663f7c #v4.9.1
with:
python-version: ${{ env[matrix.python-version] }}
- name: Install tox
run: pip install "tox<=3.27.1" -U tox-factor setuptools
- name: Cache tox environment
# Preserves .tox directory between runs for faster installs
uses: actions/cache@6f8efc29b200d32929f49075959781ed54ec270c #v3.5.0
with:
path: |
.tox
~/.cache/pip
key: tox-cache-${{ matrix.python-version }}-${{ matrix.testenv }}-${{ hashFiles('tox.ini') }}
- name: Run tox
run: |
tox -f ${{ matrix.python-version }}-${{ matrix.testenv }}
static-code-checks:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #5.0.0
with:
fetch-depth: 0
- name: Check for versioned GitHub actions
if: always()
run: |
# Get changed GitHub workflow/action files
CHANGED_FILES=$(git diff --name-only origin/${{ github.base_ref }}..HEAD | grep -E "^\.github/(workflows|actions)/.*\.ya?ml$" || true)
if [ -n "$CHANGED_FILES" ]; then
# Check for any versioned actions, excluding comments and this validation script
VIOLATIONS=$(grep -Hn "uses:.*@v" $CHANGED_FILES | grep -v "grep.*uses:.*@v" | grep -v "#.*@v" || true)
if [ -n "$VIOLATIONS" ]; then
echo "Found versioned GitHub actions. Use commit SHAs instead:"
echo "$VIOLATIONS"
exit 1
fi
fi
echo "No versioned actions found in changed files"
================================================
FILE: .github/workflows/continuous-monitoring.yml
================================================
name: Continuous monitoring of distribution channels
on:
workflow_dispatch:
schedule:
- cron: '*/10 * * * *'
permissions:
id-token: write
contents: read
jobs:
smoke-tests:
name: Run smoke tests
runs-on: ubuntu-latest
steps:
- name: Checkout Repository
uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 #v3.6.0
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@7474bc4690e29a8392af63c5b98e7449536d5c3a #v4.3.1
with:
role-to-assume: ${{ secrets.AWS_INTEG_TEST_ROLE_ARN }}
aws-region: us-east-1
- uses: actions/setup-python@7f4fc3e22c37d6ff65e88745f38bd3157c663f7c #v4.9.1
with:
python-version: '3.x'
- run: pip install tox
- name: Run smoke tests
id: distribution-availability
run: tox -c tox-distributioncheck.ini
- name: Publish metric on X-Ray Python SDK distribution availability
if: ${{ always() }}
run: |
if [[ "${{ steps.distribution-availability.outcome }}" == "failure" ]]; then
aws cloudwatch put-metric-data --metric-name XRayPythonSDKDistributionUnavailability --dimensions failure=rate --namespace MonitorSDK --value 1 --timestamp $(date +%s)
else
aws cloudwatch put-metric-data --metric-name XRayPythonSDKDistributionUnavailability --dimensions failure=rate --namespace MonitorSDK --value 0 --timestamp $(date +%s)
fi
================================================
FILE: .github/workflows/daily-scan.yml
================================================
## Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
## SPDX-License-Identifier: Apache-2.0
# Performs a daily scan of:
# * The X-Ray Python SDK published artifact dependencies, using Trivy
# * Project dependencies, using DependencyCheck
#
# Publishes results to CloudWatch Metrics.
name: Daily scan
on:
schedule: # scheduled to run every 6 hours
- cron: '20 */6 * * *' # "At minute 20 past every 6th hour."
workflow_dispatch: # be able to run the workflow on demand
env:
AWS_DEFAULT_REGION: us-east-1
permissions:
id-token: write
contents: read
jobs:
scan_and_report:
runs-on: ubuntu-latest
steps:
- name: Checkout repo for dependency scan
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #5.0.0
with:
fetch-depth: 0
- name: Setup Python for dependency scan
uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b #v5.3.0
with:
python-version: '3.x'
- name: Install published package for scanning
run: |
mkdir -p scan-target
python -m venv scan-venv
source scan-venv/bin/activate
pip install aws-xray-sdk
pip freeze > scan-target/requirements.txt
- name: Install Java for dependency scan
uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 #v5.0.0
with:
java-version: 17
distribution: 'temurin'
- name: Configure AWS credentials for dependency scan
uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #5.0.0
with:
role-to-assume: ${{ secrets.SECRET_MANAGER_ROLE_ARN }}
aws-region: ${{ env.AWS_DEFAULT_REGION }}
- name: Get secrets for dependency scan
uses: aws-actions/aws-secretsmanager-get-secrets@a9a7eb4e2f2871d30dc5b892576fde60a2ecc802 #v2.0.10
id: nvd_api_key
with:
secret-ids: |
${{ secrets.NVD_API_KEY_SECRET_ARN }}
OSS_INDEX, ${{ secrets.OSS_INDEX_SECRET_ARN }}
parse-json-secrets: true
# See http://jeremylong.github.io/DependencyCheck/dependency-check-cli/ for installation explanation
- name: Install and run dependency scan
id: dep_scan
if: always()
run: |
gpg --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys 259A55407DD6C00299E6607EFFDE55BE73A2D1ED
VERSION=$(curl -s https://jeremylong.github.io/DependencyCheck/current.txt | head -n1 | cut -d" " -f1)
curl -Ls "https://github.com/dependency-check/DependencyCheck/releases/download/v$VERSION/dependency-check-$VERSION-release.zip" --output dependency-check.zip
curl -Ls "https://github.com/dependency-check/DependencyCheck/releases/download/v$VERSION/dependency-check-$VERSION-release.zip.asc" --output dependency-check.zip.asc
gpg --verify dependency-check.zip.asc
unzip dependency-check.zip
./dependency-check/bin/dependency-check.sh --enableExperimental --failOnCVSS 0 --nvdApiKey ${{ env.NVD_API_KEY_NVD_API_KEY }} --ossIndexUsername ${{ env.OSS_INDEX_USERNAME }} --ossIndexPassword ${{ env.OSS_INDEX_PASSWORD }} --suppression .github/dependency-check-suppressions.xml -s "scan-target/"
- name: Print dependency scan results on failure
if: ${{ steps.dep_scan.outcome != 'success' }}
run: less dependency-check-report.html
- name: Perform high severity scan on published artifact dependencies
if: always()
id: high_scan_latest
uses: aquasecurity/trivy-action@97e0b3872f55f89b95b2f65b3dbab56962816478 # v0.34.2
with:
scan-type: 'fs'
scan-ref: 'scan-target/'
severity: 'CRITICAL,HIGH'
exit-code: '1'
scanners: 'vuln'
env:
TRIVY_IGNOREFILE: .github/trivy/daily-scan.trivyignore.yaml
- name: Perform low severity scan on published artifact dependencies
if: always()
id: low_scan_latest
uses: aquasecurity/trivy-action@97e0b3872f55f89b95b2f65b3dbab56962816478 # v0.34.2
with:
scan-type: 'fs'
scan-ref: 'scan-target/'
severity: 'MEDIUM,LOW,UNKNOWN'
exit-code: '1'
scanners: 'vuln'
env:
TRIVY_IGNOREFILE: .github/trivy/daily-scan.trivyignore.yaml
- name: Configure AWS Credentials for emitting metrics
if: always()
uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #5.0.0
with:
role-to-assume: ${{ secrets.AWS_INTEG_TEST_ROLE_ARN }}
aws-region: ${{ env.AWS_DEFAULT_REGION }}
- name: Publish high scan status
if: always()
run: |
value="${{ steps.high_scan_latest.outcome == 'success' && '1.0' || '0.0' }}"
aws cloudwatch put-metric-data --namespace 'MonitorSDK' \
--metric-name Success \
--dimensions repository=${{ github.repository }},branch=${{ github.ref_name }},workflow=daily_scan_high \
--value $value
- name: Publish low scan status
if: always()
run: |
value="${{ steps.low_scan_latest.outcome == 'success' && steps.dep_scan.outcome == 'success' && '1.0' || '0.0' }}"
aws cloudwatch put-metric-data --namespace 'MonitorSDK' \
--metric-name Success \
--dimensions repository=${{ github.repository }},branch=${{ github.ref_name }},workflow=daily_scan_low \
--value $value
================================================
FILE: .gitignore
================================================
.DS_Store
*.pyc
.Python
.cache
.pytest_cache
man
build
bin
include
lib
dist
*.egg
*.egg-info
.tox
.python-version
.pytest_cache
pip-selfcheck.json
.coverage*
htmlcov
venv
.idea
================================================
FILE: CHANGELOG.rst
================================================
=========
CHANGELOG
=========
Unreleased
==========
2.15.0
==========
* bugfix: Fix log stack overflow if metadata contains circular reference `https://github.com/aws/aws-xray-sdk-python/pull/464`
2.14.0
==========
* bugfix: Fix warning message condition for subsegment ending `https://github.com/aws/aws-xray-sdk-python/pull/434`
2.13.1
==========
* improvement: Bump idna from 3.6 to 3.7 in /sample-apps/flask `https://github.com/aws/aws-xray-sdk-python/pull/425`
* bugfix: Fix end_time param type docstring from int to float `https://github.com/aws/aws-xray-sdk-python/pull/426`
* improvement: Bump werkzeug from 3.0.1 to 3.0.3 in /sample-apps/flask `https://github.com/aws/aws-xray-sdk-python/pull/428`
* improvement: [LambdaContext] Create dummy segment when trace header is incomplete `https://github.com/aws/aws-xray-sdk-python/pull/429`
* bugfix: [LambdaContext] Fix logging to only happen inside lambda function `https://github.com/aws/aws-xray-sdk-python/pull/431`
2.13.0
==========
* bugfix: Fix passing multiple values in testenv.passenv in tox.ini `https://github.com/aws/aws-xray-sdk-python/pull/399`
* improvement: Pin flask < 3.x for flask sqlalchemy tests `https://github.com/aws/aws-xray-sdk-python/pull/412`
* improvement: Bump werkzeug from 2.2.3 to 3.0.1 in /sample-apps/flask `https://github.com/aws/aws-xray-sdk-python/pull/413`
* improvement: Fix typo in docs `https://github.com/aws/aws-xray-sdk-python/pull/419`
* bugfix: Fix sqlalchemy_core patch errors for unencoded special characters in db url `https://github.com/aws/aws-xray-sdk-python/pull/418`
* bugfix: Fix EB platform version for integration test `https://github.com/aws/aws-xray-sdk-python/pull/420`
2.12.1
==========
* bugfix: set_trace_entity() in lambda adds segment to thread `PR409 https://github.com/aws/aws-xray-sdk-python/pull/409`
* bugfix: Cleanup after drop of support for Python `PR387 https://github.com/aws/aws-xray-sdk-python/pull/387`
2.12.0
==========
* improvement: Default Context Missing Strategy set to Log Error `PR372 https://github.com/aws/aws-xray-sdk-python/pull/372`
* bugfix: Pin tox version to <=3.27.1 to fix CI tests `PR374 https://github.com/aws/aws-xray-sdk-python/pull/374`
* improvement: Sample app dependency update `PR373 https://github.com/aws/aws-xray-sdk-python/pull/373`
* bugfix: Fix pynamodb tests for Python < 3.6 `PR375 https://github.com/aws/aws-xray-sdk-python/pull/375`
* improvement: Use latest GH Actions versions in CI tests `PR365 https://github.com/aws/aws-xray-sdk-python/pull/365`
* improvement: Simplify setup script `PR363 https://github.com/aws/aws-xray-sdk-python/pull/363`
* bugfix: Fix deprecation warnings related to asyncio `PR364 https://github.com/aws/aws-xray-sdk-python/pull/364`
* improvement: Run tests against Python 3.10 and 3.11 `PR376 https://github.com/aws/aws-xray-sdk-python/pull/376`
* improvement: Sample app dependency update `PR380 https://github.com/aws/aws-xray-sdk-python/pull/380`
* bugfix: Pin sqlalchemy version to 1.x to fix tests `PR381 https://github.com/aws/aws-xray-sdk-python/pull/381`
* bugfix: Fix sample app dependencies incompatibility with XRay SDK `PR382 https://github.com/aws/aws-xray-sdk-python/pull/382`
* bugfix: Start MySQL from GH Actions, upgrade Ubuntu, and remove Python versions for unit tests `PR384 https://github.com/aws/aws-xray-sdk-python/pull/384`
2.11.0
==========
* bugfix: Fix TypeError by patching register_default_jsonb from psycopg2 `PR350 https://github.com/aws/aws-xray-sdk-python/pull/350`
* improvement: Add annotations `PR348 https://github.com/aws/aws-xray-sdk-python/pull/348`
* bugfix: Use service parameter to match centralized sampling rules `PR 353 https://github.com/aws/aws-xray-sdk-python/pull/353`
* bugfix: Implement PEP3134 to discover underlying problems with python3 `PR355 https://github.com/aws/aws-xray-sdk-python/pull/355`
* improvement: Allow list TopicArn for SNS PublishBatch request `PR358 https://github.com/aws/aws-xray-sdk-python/pull/358`
* bugfix: Version pinning flask-sqlalchemy version to 2.5.1 or less `PR360 https://github.com/aws/aws-xray-sdk-python/pull/360`
* bugfix: Fix UnboundLocalError when aiohttp server raises a CancelledError `PR356 https://github.com/aws/aws-xray-sdk-python/pull/356`
* improvement: Instrument httpx >= 0.20 `PR357 https://github.com/aws/aws-xray-sdk-python/pull/357`
* improvement: [LambdaContext] persist original trace header `PR362 https://github.com/aws/aws-xray-sdk-python/pull/362`
* bugfix: Run tests against Django 4.x `PR361 https://github.com/aws/aws-xray-sdk-python/pull/361`
* improvement: Oversampling Mitigation `PR366 https://github.com/aws/aws-xray-sdk-python/pull/366`
2.10.0
==========
* bugfix: Only import future for py2. `PR343 <https://github.com/aws/aws-xray-sdk-python/pull/343>`_.
* bugfix: Defensively copy context entities to async thread. `PR340 <https://github.com/aws/aws-xray-sdk-python/pull/340>`_.
* improvement: Added support for IGNORE_ERROR option when context is missing. `PR338 <https://github.com/aws/aws-xray-sdk-python/pull/338>`_.
2.9.0
==========
* bugfix: Change logging behavior to avoid overflow. `PR302 <https://github.com/aws/aws-xray-sdk-python/pull/302>`_.
* improvement: Lazy load samplers to speed up cold start in lambda. `PR312 <https://github.com/aws/aws-xray-sdk-python/pull/312>`_.
* improvement: Replace slow json file name resolver. `PR 306 <https://github.com/aws/aws-xray-sdk-python/pull/306>`_.
2.8.0
==========
* improvement: feat(sqla-core): Add support for rendering Database Specific queries. `PR291 <https://github.com/aws/aws-xray-sdk-python/pull/291>`_.
* bugfix: Fixing broken instrumentation for sqlalchemy >= 1.4.0. `PR289 <https://github.com/aws/aws-xray-sdk-python/pull/289>`_.
* feature: no op trace id generation. `PR293 <https://github.com/aws/aws-xray-sdk-python/pull/293>`_.
* bugfix: Handle exception when sending entity to Daemon. `PR292 <https://github.com/aws/aws-xray-sdk-python/pull/292>`_.
* bugfix: Fixed serialization issue when cause is a string. `PR284 <https://github.com/aws/aws-xray-sdk-python/pull/284>`_.
* improvement: Publish metric on distribution availability. `PR279 <https://github.com/aws/aws-xray-sdk-python/pull/279>`_.
2.7.0
==========
* improvement: Only run integration tests on master. `PR277 <https://github.com/aws/aws-xray-sdk-python/pull/277>`_.
* improvement: Add distribution channel smoke test. `PR276 <https://github.com/aws/aws-xray-sdk-python/pull/276>`_.
* improvement: Replace jsonpickle with json to serialize entity. `PR275 <https://github.com/aws/aws-xray-sdk-python/pull/275>`_.
* bugfix: Always close segment in teardown_request handler. `PR272 <https://github.com/aws/aws-xray-sdk-python/pull/272>`_.
* improvement: Close segment in only _handle_exception in case of Internal Server Error. `PR271 <https://github.com/aws/aws-xray-sdk-python/pull/271>`_.
* bugfix: Handling condition where Entity.cause is not a dict. `PR267 <https://github.com/aws/aws-xray-sdk-python/pull/267>`_.
* improvement: Add ability to ignore some requests from httplib. `PR263 <https://github.com/aws/aws-xray-sdk-python/pull/263>`_.
* feature: Add support for SQLAlchemy Core. `PR264 <https://github.com/aws/aws-xray-sdk-python/pull/264>`_.
* improvement: Added always() to run clean up workflow. `PR259 <https://github.com/aws/aws-xray-sdk-python/pull/259>`_.
* improvement: Allow configuring different Sampler in Django App. `PR252 <https://github.com/aws/aws-xray-sdk-python/pull/252>`_.
* bugfix: Restore python2 compatibility of EC2 plugin. `PR249 <https://github.com/aws/aws-xray-sdk-python/pull/249>`_.
* bugfix: eb solution stack name. `PR251 <https://github.com/aws/aws-xray-sdk-python/pull/251>`_.
* improvement: Integration Test Workflow. `PR246 <https://github.com/aws/aws-xray-sdk-python/pull/246>`_.
* improvement: Include unicode type for annotation value. `PR235 <https://github.com/aws/aws-xray-sdk-python/pull/235>`_.
* improvement: Run tests against Django 3.1 instead of 1.11. `PR240 <https://github.com/aws/aws-xray-sdk-python/pull/240>`_.
* bugfix: Generalize error check for pymysql error type. `PR239 <https://github.com/aws/aws-xray-sdk-python/pull/239>`_.
* bugfix: SqlAlchemy: Close segment even if error was raised. `PR234 <https://github.com/aws/aws-xray-sdk-python/pull/234>`_.
2.6.0
==========
* bugfix: asyncio.Task.current_task PendingDeprecation fix. `PR217 <https://github.com/aws/aws-xray-sdk-python/pull/217>`_.
* bugfix: Added proper TraceID in dummy segments. `PR223 <https://github.com/aws/aws-xray-sdk-python/pull/223>`_.
* improvement: Add testing for current Django versions. `PR200 <https://github.com/aws/aws-xray-sdk-python/pull/200>`_.
* improvement: IMDSv2 support for EC2 plugin. `PR226 <https://github.com/aws/aws-xray-sdk-python/pull/226>`_.
* improvement: Using instance doc to fetch EC2 metadata. Added 2 additional fields. `PR227 <https://github.com/aws/aws-xray-sdk-python/pull/227>`_.
* improvement: Added StaleBot. `PR228 <https://github.com/aws/aws-xray-sdk-python/pull/228>`_.
2.5.0
==========
* bugfix: Downgrade Coverage to 4.5.4. `PR197 <https://github.com/aws/aws-xray-sdk-python/pull/197>`_.
* bugfix: Unwrap context provided to psycopg2.extensions.quote_ident. `PR198 <https://github.com/aws/aws-xray-sdk-python/pull/198>`_.
* feature: extension support as Bottle plugin. `PR204 <https://github.com/aws/aws-xray-sdk-python/pull/204>`_.
* bugfix: streaming_threshold not None check. `PR205 <https://github.com/aws/aws-xray-sdk-python/pull/205>`_.
* bugfix: Add support for Django 2.0 to 3.0. `PR206 <https://github.com/aws/aws-xray-sdk-python/pull/206>`_.
* bugfix: add puttracesegments to boto whitelist avoid a catch 22. `PR210 <https://github.com/aws/aws-xray-sdk-python/pull/210>`_.
* feature: Add patch support for pymysql. `PR215 <https://github.com/aws/aws-xray-sdk-python/pull/215>`_.
2.4.3
==========
* bugfix: Downstream Http Calls should use hostname rather than full URL as subsegment name. `PR192 <https://github.com/aws/aws-xray-sdk-python/pull/192>`_.
* improvement: Whitelist SageMakerRuntime InvokeEndpoint operation. `PR183 <https://github.com/aws/aws-xray-sdk-python/pull/183>`_.
* bugfix: Fix patching for PynamoDB4 with botocore 1.13. `PR181 <https://github.com/aws/aws-xray-sdk-python/pull/181>`_.
* bugfix: Add X-Ray client with default empty credentials. `PR180 <https://github.com/aws/aws-xray-sdk-python/pull/180>`_.
* improvement: Faster implementation of Wildcard Matching. `PR178 <https://github.com/aws/aws-xray-sdk-python/pull/178>`_.
* bugfix: Make patch compatible with PynamoDB4. `PR177 <https://github.com/aws/aws-xray-sdk-python/pull/177>`_.
* bugfix: Fix unit tests for newer versions of psycopg2. `PR163 <https://github.com/aws/aws-xray-sdk-python/pull/163>`_.
* improvement: Enable tests with python 3.7. `PR157 <https://github.com/aws/aws-xray-sdk-python/pull/157>`_.
2.4.2
==========
* bugfix: Fix exception processing in Django running in Lambda. `PR145 <https://github.com/aws/aws-xray-sdk-python/pull/145>`_.
* bugfix: Poller threads block main thread from exiting bug. `PR144 <https://github.com/aws/aws-xray-sdk-python/pull/144>`_.
2.4.1
==========
* bugfix: Middlewares should create subsegments only when in the Lambda context running under a Lambda environment. `PR139 <https://github.com/aws/aws-xray-sdk-python/pull/139>`_.
2.4.0
==========
* feature: Add ability to enable/disable the SDK. `PR119 <https://github.com/aws/aws-xray-sdk-python/pull/119>`_.
* feature: Add Serverless Framework Support `PR127 <https://github.com/aws/aws-xray-sdk-python/pull/127>`_.
* feature: Bring aiobotocore support back. `PR125 <https://github.com/aws/aws-xray-sdk-python/pull/125>`_.
* bugfix: Fix httplib invalid scheme detection for HTTPS. `PR122 <https://github.com/aws/aws-xray-sdk-python/pull/122>`_.
* bugfix: Max_trace_back = 0 returns full exception stack trace bug fix. `PR123 <https://github.com/aws/aws-xray-sdk-python/pull/123>`_.
* bugfix: Rename incorrect config module name to the correct global name. `PR130 <https://github.com/aws/aws-xray-sdk-python/pull/130>`_.
* bugfix: Correctly remove password component from SQLAlchemy URLs, preventing... `PR132 <https://github.com/aws/aws-xray-sdk-python/pull/132>`_.
2.3.0
==========
* feature: Stream Django ORM SQL queries and add flag to toggle their streaming. `PR111 <https://github.com/aws/aws-xray-sdk-python/pull/111>`_.
* feature: Recursively patch any given module functions with capture. `PR113 <https://github.com/aws/aws-xray-sdk-python/pull/113>`_.
* feature: Add patch support for pg8000 (Pure Python Driver). `PR115 <https://github.com/aws/aws-xray-sdk-python/pull/115>`_.
* improvement: Remove the dependency on Requests. `PR112 <https://github.com/aws/aws-xray-sdk-python/pull/112>`_.
* bugfix: Fix psycop2 register type. `PR95 <https://github.com/aws/aws-xray-sdk-python/pull/95>`_.
2.2.0
=====
* feature: Added context managers on segment/subsegment capture. `PR97 <https://github.com/aws/aws-xray-sdk-python/pull/97>`_.
* feature: Added AWS SNS topic ARN to the default whitelist file. `PR93 <https://github.com/aws/aws-xray-sdk-python/pull/93>`_.
* bugfix: Fixed an issue on `psycopg2` to support all keywords. `PR91 <https://github.com/aws/aws-xray-sdk-python/pull/91>`_.
* bugfix: Fixed an issue on `endSegment` when there is context missing. `ISSUE98 <https://github.com/aws/aws-xray-sdk-python/issues/98>`_.
* bugfix: Fixed the package description rendered on PyPI. `PR101 <https://github.com/aws/aws-xray-sdk-python/pull/101>`_.
* bugfix: Fixed an issue where `patch_all` could patch the same module multiple times. `ISSUE99 <https://github.com/aws/aws-xray-sdk-python/issues/99>`_.
* bugfix: Fixed the `datetime` to `epoch` conversion on Windows OS. `ISSUE103 <https://github.com/aws/aws-xray-sdk-python/issues/103>`_.
* bugfix: Fixed a wrong segment json key where it should be `sampling_rule_name` rather than `rule_name`.
2.1.0
=====
* feature: Added support for `psycopg2`. `PR83 <https://github.com/aws/aws-xray-sdk-python/pull/83>`_.
* feature: Added support for `pynamodb` >= 3.3.1. `PR88 <https://github.com/aws/aws-xray-sdk-python/pull/88>`_.
* improvement: Improved stack trace recording when exception is thrown in decorators. `PR70 <https://github.com/aws/aws-xray-sdk-python/pull/70>`_.
* bugfix: Argument `sampling_req` in LocalSampler `should_trace` method now becomes optional. `PR89 <https://github.com/aws/aws-xray-sdk-python/pull/89>`_.
* bugfix: Fixed a wrong test setup and leftover poller threads in recorder unit test.
2.0.1
=====
* bugfix: Fixed a issue where manually `begin_segment` might break when making sampling decisions. `PR82 <https://github.com/aws/aws-xray-sdk-python/pull/82>`_.
2.0.0
=====
* **Breaking**: The default sampler now launches background tasks to poll sampling rules from X-Ray backend. See the new default sampling strategy in more details here: https://docs.aws.amazon.com/xray/latest/devguide/xray-sdk-python-configuration.html#xray-sdk-python-configuration-sampling.
* **Breaking**: The `should_trace` function in the sampler now takes a dictionary for sampling rule matching.
* **Breaking**: The original sampling modules for local defined rules are moved from `models.sampling` to `models.sampling.local`.
* **Breaking**: The default behavior of `patch_all` changed to selectively patches libraries to avoid double patching. You can use `patch_all(double_patch=True)` to force it to patch ALL supported libraries. See more details on `ISSUE63 <https://github.com/aws/aws-xray-sdk-python/issues/63>`_
* **Breaking**: The latest `botocore` that has new X-Ray service API `GetSamplingRules` and `GetSamplingTargets` are required.
* **Breaking**: Version 2.x doesn't support pynamodb and aiobotocore as it requires botocore >= 1.11.3 which isn’t currently supported by the pynamodb and aiobotocore libraries. Please continue to use version 1.x if you’re using pynamodb or aiobotocore until those haven been updated to use botocore > = 1.11.3.
* feature: Environment variable `AWS_XRAY_DAEMON_ADDRESS` now takes an additional notation in `tcp:127.0.0.1:2000 udp:127.0.0.2:2001` to set TCP and UDP destination separately. By default it assumes a X-Ray daemon listening to both UDP and TCP traffic on `127.0.0.1:2000`.
* feature: Added MongoDB python client support. `PR65 <https://github.com/aws/aws-xray-sdk-python/pull/65>`_.
* bugfix: Support binding connection in sqlalchemy as well as engine. `PR78 <https://github.com/aws/aws-xray-sdk-python/pull/78>`_.
* bugfix: Flask middleware safe request teardown. `ISSUE75 <https://github.com/aws/aws-xray-sdk-python/issues/75>`_.
1.1.2
=====
* bugfix: Fixed an issue on PynamoDB patcher where the capture didn't handle client timeout.
1.1.1
=====
* bugfix: Handle Aiohttp Exceptions as valid responses `PR59 <https://github.com/aws/aws-xray-sdk-python/pull/59>`_.
1.1
===
* feature: Added Sqlalchemy parameterized query capture. `PR34 <https://github.com/aws/aws-xray-sdk-python/pull/34>`_
* bugfix: Allow standalone sqlalchemy integrations without flask_sqlalchemy. `PR53 <https://github.com/aws/aws-xray-sdk-python/pull/53>`_
* bugfix: Give up aiohttp client tracing when there is no open segment and LOG_ERROR is configured. `PR58 <https://github.com/aws/aws-xray-sdk-python/pull/58>`_
* bugfix: Handle missing subsegment when rendering a Django template. `PR54 <https://github.com/aws/aws-xray-sdk-python/pull/54>`_
* Typo fixes on comments and docs.
1.0
===
* Changed development status to `5 - Production/Stable` and removed beta tag.
* feature: Added S3 API parameters to the default whitelist.
* feature: Added new recorder APIs to add annotations/metadata.
* feature: The recorder now adds more runtime and version information to sampled segments.
* feature: Django, Flask and Aiohttp middleware now inject trace header to response headers.
* feature: Added a new API to configure maximum captured stack trace.
* feature: Modularized subsegments streaming logic and now it can be overriden with custom implementation.
* bugfix(**Breaking**): Subsegment `set_user` API is removed since this attribute is not supported by X-Ray back-end.
* bugfix: Fixed an issue where arbitrary fields in trace header being dropped when calling downstream.
* bugfix: Fixed a compatibility issue between botocore and httplib patcher. `ISSUE48 <https://github.com/aws/aws-xray-sdk-python/issues/48>`_.
* bugfix: Fixed a typo in sqlalchemy decorators. `PR50 <https://github.com/aws/aws-xray-sdk-python/pull/50>`_.
* Updated `README` with more usage examples.
0.97
====
* feature: Support aiohttp client tracing for aiohttp 3.x. `PR42 <https://github.com/aws/aws-xray-sdk-python/pull/42>`_.
* feature: Use the official middleware pattern for Aiohttp ext. `PR29 <https://github.com/aws/aws-xray-sdk-python/pull/29>`_.
* bugfix: Aiohttp middleware serialized URL values incorrectly. `PR37 <https://github.com/aws/aws-xray-sdk-python/pull/37>`_
* bugfix: Don't overwrite plugins list on each `.configure` call. `PR38 <https://github.com/aws/aws-xray-sdk-python/pull/38>`_
* bugfix: Do not swallow `return_value` when context is missing and `LOG_ERROR` is set. `PR44 <https://github.com/aws/aws-xray-sdk-python/pull/44>`_
* bugfix: Loose entity name validation. `ISSUE36 <https://github.com/aws/aws-xray-sdk-python/issues/36>`_
* bugfix: Fix PyPI project page being rendered incorrectly. `ISSUE30 <https://github.com/aws/aws-xray-sdk-python/issues/30>`_
0.96
====
* feature: Add support for SQLAlchemy and Flask-SQLAlcemy. `PR14 <https://github.com/aws/aws-xray-sdk-python/pull/14>`_.
* feature: Add support for PynamoDB calls to DynamoDB. `PR13 <https://github.com/aws/aws-xray-sdk-python/pull/13>`_.
* feature: Add support for httplib calls. `PR19 <https://github.com/aws/aws-xray-sdk-python/pull/19>`_.
* feature: Make streaming threshold configurable through public interface. `ISSUE21 <https://github.com/aws/aws-xray-sdk-python/issues/21>`_.
* bugfix: Drop invalid annotation keys and log a warning. `PR22 <https://github.com/aws/aws-xray-sdk-python/pull/22>`_.
* bugfix: Respect `with` statement on cursor objects in dbapi2 patcher. `PR17 <https://github.com/aws/aws-xray-sdk-python/pull/17>`_.
* bugfix: Don't throw error from built in subsegment capture when `LOG_ERROR` is set. `ISSUE4 <https://github.com/aws/aws-xray-sdk-python/issues/4>`_.
0.95
====
* **Breaking**: AWS API parameter whitelist json file is moved to path `aws_xray_sdk/ext/resources/aws_para_whitelist.json` in `PR6 <https://github.com/aws/aws-xray-sdk-python/pull/6>`_.
* Added aiobotocore/aioboto3 support and async function capture. `PR6 <https://github.com/aws/aws-xray-sdk-python/pull/6>`_
* Added logic to removing segment/subsegment name invalid characters. `PR9 <https://github.com/aws/aws-xray-sdk-python/pull/9>`_
* Temporarily disabled tests run on Django2.0. `PR10 <https://github.com/aws/aws-xray-sdk-python/pull/10>`_
* Code cleanup. `PR11 <https://github.com/aws/aws-xray-sdk-python/pull/11>`_
0.94
====
* Added aiohttp support. `PR3 <https://github.com/aws/aws-xray-sdk-python/pull/3>`_
0.93
====
* The X-Ray SDK for Python is now an open source project. You can follow the project and submit issues and pull requests on GitHub: https://github.com/aws/aws-xray-sdk-python
0.92.2
======
* bugfix: Fixed an issue that caused the X-Ray recorder to omit the origin when recording segments with a service plugin. This caused the service's type to not appear on the service map in the X-Ray console.
0.92.1
======
* bugfix: Fixed an issue that caused all calls to Amazon DynamoDB tables to be grouped under a single node in the service map. With this update, each table gets a separate node.
0.92
====
* feature: Add Flask support
* feature: Add dynamic naming on segment name
0.91.1
======
* bugfix: The SDK has been released as a universal wheel
================================================
FILE: CODE_OF_CONDUCT.md
================================================
## Code of Conduct
This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct).
For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact
opensource-codeofconduct@amazon.com with any additional questions or comments.
================================================
FILE: CONTRIBUTING.md
================================================
# Contributing Guidelines
Thank you for your interest in contributing to our project. Whether it's a bug report, new feature, correction, or additional
documentation, we greatly value feedback and contributions from our community.
Please read through this document before submitting any issues or pull requests to ensure we have all the necessary
information to effectively respond to your bug report or contribution.
## Reporting Bugs/Feature Requests
We welcome you to use the GitHub issue tracker to report bugs or suggest features.
When filing an issue, please check [existing open](https://github.com/aws/aws-xray-sdk-python/issues), or [recently closed](https://github.com/aws/aws-xray-sdk-python/issues?utf8=%E2%9C%93&q=is%3Aissue%20is%3Aclosed%20), issues to make sure somebody else hasn't already
reported the issue. Please try to include as much information as you can. Details like these are incredibly useful:
* A reproducible test case or series of steps
* The version of our code being used
* Any modifications you've made relevant to the bug
* Anything unusual about your environment or deployment
## Contributing via Pull Requests
Contributions via pull requests are much appreciated. Before sending us a pull request, please ensure that:
1. You are working against the latest source on the *master* branch.
2. You check existing open, and recently merged, pull requests to make sure someone else hasn't addressed the problem already.
3. You open an issue to discuss any significant work - we would hate for your time to be wasted.
To send us a pull request, please:
1. Fork the repository.
2. Modify the source; please focus on the specific change you are contributing. If you also reformat all the code, it will be hard for us to focus on your change.
3. Ensure local tests pass.
4. Commit to your fork using clear commit messages.
5. Send us a pull request, answering any default questions in the pull request interface.
6. Pay attention to any automated CI failures reported in the pull request, and stay involved in the conversation.
GitHub provides additional document on [forking a repository](https://help.github.com/articles/fork-a-repo/) and
[creating a pull request](https://help.github.com/articles/creating-a-pull-request/).
## Finding contributions to work on
Looking at the existing issues is a great way to find something to contribute on. As our projects, by default, use the default GitHub issue labels ((enhancement/bug/duplicate/help wanted/invalid/question/wontfix), looking at any ['help wanted'](https://github.com/aws/aws-xray-sdk-python/labels/help%20wanted) issues is a great place to start.
## Code of Conduct
This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct).
For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact
opensource-codeofconduct@amazon.com with any additional questions or comments.
## Security issue notifications
If you discover a potential security issue in this project we ask that you notify AWS/Amazon Security via our [vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/). Please do **not** create a public github issue.
## Licensing
See the [LICENSE](https://github.com/aws/aws-xray-sdk-python/blob/master/LICENSE) file for our project's licensing. We will ask you to confirm the licensing of your contribution.
We may ask you to sign a [Contributor License Agreement (CLA)](http://en.wikipedia.org/wiki/Contributor_License_Agreement) for larger changes.
================================================
FILE: LICENSE
================================================
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "{}"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright {yyyy} {name of copyright owner}
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
================================================
FILE: MANIFEST.in
================================================
include aws_xray_sdk/ext/resources/*.json
include aws_xray_sdk/core/sampling/local/*.json
include README.md
include LICENSE
include NOTICE
================================================
FILE: NOTICE
================================================
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
================================================
FILE: README.md
================================================

[](https://codecov.io/gh/aws/aws-xray-sdk-python)
# AWS X-Ray SDK for Python
## :mega: Upcoming Maintenance Mode on February 25, 2026
[The AWS X-Ray SDKs will enter maintenance mode on **`February 25, 2026`**][xray-sdk-daemon-timeline]. During maintenance mode, the X-Ray SDKs and Daemon will only receive critical bug fixes and security updates, and will not be updated to support new features.
We recommend that you migrate to [AWS Distro for OpenTelemetry (ADOT) or OpenTelemetry Instrumentation][xray-otel-migration-docs] to generate traces (through manual or zero-code instrumentation) from your application and send them to AWS X-Ray. OpenTelemetry is the industry-wide standard for tracing instrumentation and observability. It has a large open-source community for support and provides more instrumentations and updates. By adopting an OpenTelemetry solution, developers can leverage the latest services and innovations from AWS CloudWatch.
[xray-otel-migration-docs]: https://docs.aws.amazon.com/xray/latest/devguide/xray-sdk-migration.html
[xray-sdk-daemon-timeline]: https://docs.aws.amazon.com/xray/latest/devguide/xray-daemon-eos.html
-------------------------------------
### OpenTelemetry Python with AWS X-Ray
AWS X-Ray supports using OpenTelemetry Python and the AWS Distro for OpenTelemetry (ADOT) Collector to instrument your application and send trace data to X-Ray. The OpenTelemetry SDKs are an industry-wide standard for tracing instrumentation. They provide more instrumentations and have a larger community for support, but may not have complete feature parity with the X-Ray SDKs. See [choosing between the ADOT and X-Ray SDKs](https://docs.aws.amazon.com/xray/latest/devguide/xray-instrumenting-your-app.html#xray-instrumenting-choosing) for more help with choosing between the two.
If you want additional features when tracing your Python applications, please [open an issue on the OpenTelemetry Python Instrumentation repository](https://github.com/open-telemetry/opentelemetry-python-contrib/issues/new?labels=feature-request&template=feature_request.md&title=X-Ray%20Compatible%20Feature%20Request).
### Python Versions End-of-Support Notice
AWS X-Ray SDK for Python versions `>2.11.0` has dropped support for Python 2.7, 3.4, 3.5, and 3.6.
-------------------------------------

## Installing
The AWS X-Ray SDK for Python is compatible with Python 3.7, 3.8, 3.9, 3.10, and 3.11.
Install the SDK using the following command (the SDK's non-testing dependencies will be installed).
```
pip install aws-xray-sdk
```
To install the SDK's testing dependencies, use the following command.
```
pip install tox
```
## Getting Help
Use the following community resources for getting help with the SDK. We use the GitHub
issues for tracking bugs and feature requests.
* Ask a question in the [AWS X-Ray Forum](https://forums.aws.amazon.com/forum.jspa?forumID=241&start=0).
* Open a support ticket with [AWS Support](http://docs.aws.amazon.com/awssupport/latest/user/getting-started.html).
* If you think you may have found a bug, open an [issue](https://github.com/aws/aws-xray-sdk-python/issues/new).
## Opening Issues
If you encounter a bug with the AWS X-Ray SDK for Python, we want to hear about
it. Before opening a new issue, search the [existing issues](https://github.com/aws/aws-xray-sdk-python/issues)
to see if others are also experiencing the issue. Include the version of the AWS X-Ray
SDK for Python, Python language, and botocore/boto3 if applicable. In addition,
include the repro case when appropriate.
The GitHub issues are intended for bug reports and feature requests. For help and
questions about using the AWS SDK for Python, use the resources listed
in the [Getting Help](https://github.com/aws/aws-xray-sdk-python#getting-help) section. Keeping the list of open issues lean helps us respond in a timely manner.
## Documentation
The [developer guide](https://docs.aws.amazon.com/xray/latest/devguide) provides in-depth
guidance about using the AWS X-Ray service.
The [API Reference](http://docs.aws.amazon.com/xray-sdk-for-python/latest/reference/)
provides guidance for using the SDK and module-level documentation.
## Quick Start
### Configuration
```python
from aws_xray_sdk.core import xray_recorder
xray_recorder.configure(
sampling=False,
context_missing='LOG_ERROR',
plugins=('EC2Plugin', 'ECSPlugin', 'ElasticBeanstalkPlugin'),
daemon_address='127.0.0.1:3000',
dynamic_naming='*mysite.com*'
)
```
### Start a custom segment/subsegment
Using context managers for implicit exceptions recording:
```python
from aws_xray_sdk.core import xray_recorder
with xray_recorder.in_segment('segment_name') as segment:
# Add metadata or annotation here if necessary
segment.put_metadata('key', dict, 'namespace')
with xray_recorder.in_subsegment('subsegment_name') as subsegment:
subsegment.put_annotation('key', 'value')
# Do something here
with xray_recorder.in_subsegment('subsegment2') as subsegment:
subsegment.put_annotation('key2', 'value2')
# Do something else
```
async versions of context managers:
```python
from aws_xray_sdk.core import xray_recorder
async with xray_recorder.in_segment_async('segment_name') as segment:
# Add metadata or annotation here if necessary
segment.put_metadata('key', dict, 'namespace')
async with xray_recorder.in_subsegment_async('subsegment_name') as subsegment:
subsegment.put_annotation('key', 'value')
# Do something here
async with xray_recorder.in_subsegment_async('subsegment2') as subsegment:
subsegment.put_annotation('key2', 'value2')
# Do something else
```
Default begin/end functions:
```python
from aws_xray_sdk.core import xray_recorder
# Start a segment
segment = xray_recorder.begin_segment('segment_name')
# Start a subsegment
subsegment = xray_recorder.begin_subsegment('subsegment_name')
# Add metadata or annotation here if necessary
segment.put_metadata('key', dict, 'namespace')
subsegment.put_annotation('key', 'value')
xray_recorder.end_subsegment()
# Close the segment
xray_recorder.end_segment()
```
### Oversampling Mitigation
To modify the sampling decision at the subsegment level, subsegments that inherit the decision of their direct parent (segment or subsegment) can be created using `xray_recorder.begin_subsegment()` and unsampled subsegments can be created using
`xray_recorder.begin_subsegment_without_sampling()`.
The code snippet below demonstrates creating a sampled or unsampled subsegment based on the sampling decision of each SQS message processed by Lambda.
```python
from aws_xray_sdk.core import xray_recorder
from aws_xray_sdk.core.models.subsegment import Subsegment
from aws_xray_sdk.core.utils.sqs_message_helper import SqsMessageHelper
def lambda_handler(event, context):
for message in event['Records']:
if SqsMessageHelper.isSampled(message):
subsegment = xray_recorder.begin_subsegment('sampled_subsegment')
print('sampled - processing SQS message')
else:
subsegment = xray_recorder.begin_subsegment_without_sampling('unsampled_subsegment')
print('unsampled - processing SQS message')
xray_recorder.end_subsegment()
```
The code snippet below demonstrates wrapping a downstream AWS SDK request with an unsampled subsegment.
```python
from aws_xray_sdk.core import xray_recorder, patch_all
import boto3
patch_all()
def lambda_handler(event, context):
subsegment = xray_recorder.begin_subsegment_without_sampling('unsampled_subsegment')
client = boto3.client('sqs')
print(client.list_queues())
xray_recorder.end_subsegment()
```
### Capture
As a decorator:
```python
from aws_xray_sdk.core import xray_recorder
@xray_recorder.capture('subsegment_name')
def myfunc():
# Do something here
myfunc()
```
or as a context manager:
```python
from aws_xray_sdk.core import xray_recorder
with xray_recorder.capture('subsegment_name') as subsegment:
# Do something here
subsegment.put_annotation('mykey', val)
# Do something more
```
Async capture as decorator:
```python
from aws_xray_sdk.core import xray_recorder
@xray_recorder.capture_async('subsegment_name')
async def myfunc():
# Do something here
async def main():
await myfunc()
```
or as context manager:
```python
from aws_xray_sdk.core import xray_recorder
async with xray_recorder.capture_async('subsegment_name') as subsegment:
# Do something here
subsegment.put_annotation('mykey', val)
# Do something more
```
### Adding annotations/metadata using recorder
```python
from aws_xray_sdk.core import xray_recorder
# Start a segment if no segment exist
segment1 = xray_recorder.begin_segment('segment_name')
# This will add the key value pair to segment1 as it is active
xray_recorder.put_annotation('key', 'value')
# Start a subsegment so it becomes the active trace entity
subsegment1 = xray_recorder.begin_subsegment('subsegment_name')
# This will add the key value pair to subsegment1 as it is active
xray_recorder.put_metadata('key', 'value')
if xray_recorder.is_sampled():
# some expensitve annotations/metadata generation code here
val = compute_annotation_val()
metadata = compute_metadata_body()
xray_recorder.put_annotation('mykey', val)
xray_recorder.put_metadata('mykey', metadata)
```
### Generate NoOp Trace and Entity Id
X-Ray Python SDK will by default generate no-op trace and entity id for unsampled requests and secure random trace and entity id for sampled requests. If customer wants to enable generating secure random trace and entity id for all the (sampled/unsampled) requests (this is applicable for trace id injection into logs use case) then they should set the `AWS_XRAY_NOOP_ID` environment variable as False.
### Disabling X-Ray
Often times, it may be useful to be able to disable X-Ray for specific use cases, whether to stop X-Ray from sending traces at any moment, or to test code functionality that originally depended on X-Ray instrumented packages to begin segments prior to the code call. For example, if your application relied on an XRayMiddleware to instrument incoming web requests, and you have a method which begins subsegments based on the segment generated by that middleware, it would be useful to be able to disable X-Ray for your unit tests so that `SegmentNotFound` exceptions are not thrown when you need to test your method.
There are two ways to disable X-Ray, one is through environment variables, and the other is through the SDKConfig module.
**Disabling through the environment variable:**
Prior to running your application, make sure to have the environment variable `AWS_XRAY_SDK_ENABLED` set to `false`.
**Disabling through the SDKConfig module:**
```
from aws_xray_sdk import global_sdk_config
global_sdk_config.set_sdk_enabled(False)
```
**Important Notes:**
* Environment Variables always take precedence over the SDKConfig module when disabling/enabling. If your environment variable is set to `false` while your code calls `global_sdk_config.set_sdk_enabled(True)`, X-Ray will still be disabled.
* If you need to re-enable X-Ray again during runtime and acknowledge disabling/enabling through the SDKConfig module, you may run the following in your application:
```
import os
from aws_xray_sdk import global_sdk_config
del os.environ['AWS_XRAY_SDK_ENABLED']
global_sdk_config.set_sdk_enabled(True)
```
### Trace AWS Lambda functions
```python
from aws_xray_sdk.core import xray_recorder
def lambda_handler(event, context):
# ... some code
subsegment = xray_recorder.begin_subsegment('subsegment_name')
# Code to record
# Add metadata or annotation here, if necessary
subsegment.put_metadata('key', dict, 'namespace')
subsegment.put_annotation('key', 'value')
xray_recorder.end_subsegment()
# ... some other code
```
### Trace ThreadPoolExecutor
```python
import concurrent.futures
import requests
from aws_xray_sdk.core import xray_recorder
from aws_xray_sdk.core import patch
patch(('requests',))
URLS = ['http://www.amazon.com/',
'http://aws.amazon.com/',
'http://example.com/',
'http://www.bilibili.com/',
'http://invalid-domain.com/']
def load_url(url, trace_entity):
# Set the parent X-Ray entity for the worker thread.
xray_recorder.set_trace_entity(trace_entity)
# Subsegment captured from the following HTTP GET will be
# a child of parent entity passed from the main thread.
resp = requests.get(url)
# prevent thread pollution
xray_recorder.clear_trace_entities()
return resp
# Get the current active segment or subsegment from the main thread.
current_entity = xray_recorder.get_trace_entity()
with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor:
# Pass the active entity from main thread to worker threads.
future_to_url = {executor.submit(load_url, url, current_entity): url for url in URLS}
for future in concurrent.futures.as_completed(future_to_url):
url = future_to_url[future]
try:
data = future.result()
except Exception:
pass
```
### Trace SQL queries
By default, if no other value is provided to `.configure()`, SQL trace streaming is enabled
for all the supported DB engines. Those currently are:
- Any engine attached to the Django ORM.
- Any engine attached to SQLAlchemy.
The behaviour can be toggled by sending the appropriate `stream_sql` value, for example:
```python
from aws_xray_sdk.core import xray_recorder
xray_recorder.configure(service='fallback_name', stream_sql=True)
```
### Patch third-party libraries
```python
from aws_xray_sdk.core import patch
libs_to_patch = ('boto3', 'mysql', 'requests')
patch(libs_to_patch)
```
#### Automatic module patching
Full modules in the local codebase can be recursively patched by providing the module references
to the patch function.
```python
from aws_xray_sdk.core import patch
libs_to_patch = ('boto3', 'requests', 'local.module.ref', 'other_module')
patch(libs_to_patch)
```
An `xray_recorder.capture()` decorator will be applied to all functions and class methods in the
given module and all the modules inside them recursively. Some files/modules can be excluded by
providing to the `patch` function a regex that matches them.
```python
from aws_xray_sdk.core import patch
libs_to_patch = ('boto3', 'requests', 'local.module.ref', 'other_module')
ignore = ('local.module.ref.some_file', 'other_module.some_module\.*')
patch(libs_to_patch, ignore_module_patterns=ignore)
```
### Django
#### Add Django middleware
In django settings.py, use the following.
```python
INSTALLED_APPS = [
# ... other apps
'aws_xray_sdk.ext.django',
]
MIDDLEWARE = [
'aws_xray_sdk.ext.django.middleware.XRayMiddleware',
# ... other middlewares
]
```
You can configure the X-Ray recorder in a Django app under the ‘XRAY_RECORDER’ namespace. For a minimal configuration, the 'AWS_XRAY_TRACING_NAME' is required unless it is specified in an environment variable.
```
XRAY_RECORDER = {
'AWS_XRAY_TRACING_NAME': 'My application', # Required - the segment name for segments generated from incoming requests
}
```
For more information about configuring Django with X-Ray read more about it in the [API reference](https://docs.aws.amazon.com/xray-sdk-for-python/latest/reference/frameworks.html)
#### SQL tracing
If Django's ORM is patched - either using the `AUTO_INSTRUMENT = True` in your settings file
or explicitly calling `patch_db()` - the SQL query trace streaming can then be enabled or
disabled updating the `STREAM_SQL` variable in your settings file. It is enabled by default.
#### Automatic patching
The automatic module patching can also be configured through Django settings.
```python
XRAY_RECORDER = {
'PATCH_MODULES': [
'boto3',
'requests',
'local.module.ref',
'other_module',
],
'IGNORE_MODULE_PATTERNS': [
'local.module.ref.some_file',
'other_module.some_module\.*',
],
...
}
```
If `AUTO_PATCH_PARENT_SEGMENT_NAME` is also specified, then a segment parent will be created
with the supplied name, wrapping the automatic patching so that it captures any dangling
subsegments created on the import patching.
### Django in Lambda
X-Ray can't search on http annotations in subsegments. To enable searching the middleware adds the http values as annotations
This allows searching in the X-Ray console like so
This is configurable in settings with `URLS_AS_ANNOTATION` that has 3 valid values
`LAMBDA` - the default, which uses URLs as annotations by default if running in a lambda context
`ALL` - do this for every request (useful if running in a mixed lambda/other deployment)
`NONE` - don't do this for any (avoiding hitting the 50 annotation limit)
```
annotation.url BEGINSWITH "https://your.url.com/here"
```
### Add Flask middleware
```python
from aws_xray_sdk.core import xray_recorder
from aws_xray_sdk.ext.flask.middleware import XRayMiddleware
app = Flask(__name__)
xray_recorder.configure(service='fallback_name', dynamic_naming='*mysite.com*')
XRayMiddleware(app, xray_recorder)
```
### Add Bottle middleware(plugin)
```python
from aws_xray_sdk.core import xray_recorder
from aws_xray_sdk.ext.bottle.middleware import XRayMiddleware
app = Bottle()
xray_recorder.configure(service='fallback_name', dynamic_naming='*mysite.com*')
app.install(XRayMiddleware(xray_recorder))
```
### Serverless Support for Flask & Django & Bottle Using X-Ray
Serverless is an application model that enables you to shift more of your operational responsibilities to AWS. As a result, you can focus only on your applications and services, instead of the infrastructure management tasks such as server provisioning, patching, operating system maintenance, and capacity provisioning. With serverless, you can deploy your web application to [AWS Lambda](https://aws.amazon.com/lambda/) and have customers interact with it through a Lambda-invoking endpoint, such as [Amazon API Gateway](https://aws.amazon.com/api-gateway/).
X-Ray supports the Serverless model out of the box and requires no extra configuration. The middlewares in Lambda generate `Subsegments` instead of `Segments` when an endpoint is reached. This is because `Segments` cannot be generated inside the Lambda function, but it is generated automatically by the Lambda container. Therefore, when using the middlewares with this model, it is important to make sure that your methods only generate `Subsegments`.
The following guide shows an example of setting up a Serverless application that utilizes API Gateway and Lambda:
[Instrumenting Web Frameworks in a Serverless Environment](https://docs.aws.amazon.com/xray/latest/devguide/xray-sdk-python-serverless.html)
### Working with aiohttp
Adding aiohttp middleware. Support aiohttp >= 2.3.
```python
from aiohttp import web
from aws_xray_sdk.ext.aiohttp.middleware import middleware
from aws_xray_sdk.core import xray_recorder
from aws_xray_sdk.core.async_context import AsyncContext
xray_recorder.configure(service='fallback_name', context=AsyncContext())
app = web.Application(middlewares=[middleware])
app.router.add_get("/", handler)
web.run_app(app)
```
Tracing aiohttp client. Support aiohttp >=3.
```python
from aws_xray_sdk.ext.aiohttp.client import aws_xray_trace_config
async def foo():
trace_config = aws_xray_trace_config()
async with ClientSession(loop=loop, trace_configs=[trace_config]) as session:
async with session.get(url) as resp
await resp.read()
```
### Use SQLAlchemy ORM
The SQLAlchemy integration requires you to override the Session and Query Classes for SQL Alchemy
SQLAlchemy integration uses subsegments so you need to have a segment started before you make a query.
```python
from aws_xray_sdk.core import xray_recorder
from aws_xray_sdk.ext.sqlalchemy.query import XRaySessionMaker
xray_recorder.begin_segment('SQLAlchemyTest')
Session = XRaySessionMaker(bind=engine)
session = Session()
xray_recorder.end_segment()
app = Flask(__name__)
xray_recorder.configure(service='fallback_name', dynamic_naming='*mysite.com*')
XRayMiddleware(app, xray_recorder)
```
### Add Flask-SQLAlchemy
```python
from aws_xray_sdk.core import xray_recorder
from aws_xray_sdk.ext.flask.middleware import XRayMiddleware
from aws_xray_sdk.ext.flask_sqlalchemy.query import XRayFlaskSqlAlchemy
app = Flask(__name__)
app.config["SQLALCHEMY_DATABASE_URI"] = "sqlite:///:memory:"
XRayMiddleware(app, xray_recorder)
db = XRayFlaskSqlAlchemy(app)
```
### Ignoring httplib requests
If you want to ignore certain httplib requests you can do so based on the hostname or URL that is being requsted. The hostname is matched using the Python [fnmatch library](https://docs.python.org/3/library/fnmatch.html) which does Unix glob style matching.
```python
from aws_xray_sdk.ext.httplib import add_ignored as xray_add_ignored
# ignore requests to test.myapp.com
xray_add_ignored(hostname='test.myapp.com')
# ignore requests to a subdomain of myapp.com with a glob pattern
xray_add_ignored(hostname='*.myapp.com')
# ignore requests to /test-url and /other-test-url
xray_add_ignored(urls=['/test-path', '/other-test-path'])
# ignore requests to myapp.com for /test-url
xray_add_ignored(hostname='myapp.com', urls=['/test-url'])
```
If you use a subclass of httplib to make your requests, you can also filter on the class name that initiates the request. This must use the complete package name to do the match.
```python
from aws_xray_sdk.ext.httplib import add_ignored as xray_add_ignored
# ignore all requests made by botocore
xray_add_ignored(subclass='botocore.awsrequest.AWSHTTPConnection')
```
## License
The AWS X-Ray SDK for Python is licensed under the Apache 2.0 License. See LICENSE and NOTICE.txt for more information.
================================================
FILE: __init__.py
================================================
================================================
FILE: aws_xray_sdk/__init__.py
================================================
from .sdk_config import SDKConfig
global_sdk_config = SDKConfig()
================================================
FILE: aws_xray_sdk/core/__init__.py
================================================
from .async_recorder import AsyncAWSXRayRecorder
from .patcher import patch, patch_all
from .recorder import AWSXRayRecorder
xray_recorder = AsyncAWSXRayRecorder()
__all__ = [
'patch',
'patch_all',
'xray_recorder',
'AWSXRayRecorder',
]
================================================
FILE: aws_xray_sdk/core/async_context.py
================================================
import asyncio
import copy
from .context import Context as _Context
class AsyncContext(_Context):
"""
Async Context for storing segments.
Inherits nearly everything from the main Context class.
Replaces threading.local with a task based local storage class,
Also overrides clear_trace_entities
"""
def __init__(self, *args, loop=None, use_task_factory=True, **kwargs):
super().__init__(*args, **kwargs)
self._loop = loop
if loop is None:
self._loop = asyncio.get_event_loop()
if use_task_factory:
self._loop.set_task_factory(task_factory)
self._local = TaskLocalStorage(loop=loop)
def clear_trace_entities(self):
"""
Clear all trace_entities stored in the task local context.
"""
if self._local is not None:
self._local.clear()
class TaskLocalStorage:
"""
Simple task local storage
"""
def __init__(self, loop=None):
if loop is None:
loop = asyncio.get_event_loop()
self._loop = loop
def __setattr__(self, name, value):
if name in ('_loop',):
# Set normal attributes
object.__setattr__(self, name, value)
else:
# Set task local attributes
task = asyncio.current_task(loop=self._loop)
if task is None:
return None
if not hasattr(task, 'context'):
task.context = {}
task.context[name] = value
def __getattribute__(self, item):
if item in ('_loop', 'clear'):
# Return references to local objects
return object.__getattribute__(self, item)
task = asyncio.current_task(loop=self._loop)
if task is None:
return None
if hasattr(task, 'context') and item in task.context:
return task.context[item]
raise AttributeError('Task context does not have attribute {0}'.format(item))
def clear(self):
# If were in a task, clear the context dictionary
task = asyncio.current_task(loop=self._loop)
if task is not None and hasattr(task, 'context'):
task.context.clear()
def task_factory(loop, coro):
"""
Task factory function
Fuction closely mirrors the logic inside of
asyncio.BaseEventLoop.create_task. Then if there is a current
task and the current task has a context then share that context
with the new task
"""
task = asyncio.Task(coro, loop=loop)
if task._source_traceback: # flake8: noqa
del task._source_traceback[-1] # flake8: noqa
# Share context with new task if possible
current_task = asyncio.current_task(loop=loop)
if current_task is not None and hasattr(current_task, 'context'):
if current_task.context.get('entities'):
# NOTE: (enowell) Because the `AWSXRayRecorder`'s `Context` decides
# the parent by looking at its `_local.entities`, we must copy the entities
# for concurrent subsegments. Otherwise, the subsegments would be
# modifying the same `entities` list and sugsegments would take other
# subsegments as parents instead of the original `segment`.
#
# See more: https://github.com/aws/aws-xray-sdk-python/blob/0f13101e4dba7b5c735371cb922f727b1d9f46d8/aws_xray_sdk/core/context.py#L90-L101
new_context = copy.copy(current_task.context)
new_context['entities'] = [item for item in current_task.context['entities']]
else:
new_context = current_task.context
setattr(task, 'context', new_context)
return task
================================================
FILE: aws_xray_sdk/core/async_recorder.py
================================================
import time
from aws_xray_sdk.core.recorder import AWSXRayRecorder
from aws_xray_sdk.core.utils import stacktrace
from aws_xray_sdk.core.models.subsegment import SubsegmentContextManager, is_already_recording, subsegment_decorator
from aws_xray_sdk.core.models.segment import SegmentContextManager
class AsyncSegmentContextManager(SegmentContextManager):
async def __aenter__(self):
return self.__enter__()
async def __aexit__(self, exc_type, exc_val, exc_tb):
return self.__exit__(exc_type, exc_val, exc_tb)
class AsyncSubsegmentContextManager(SubsegmentContextManager):
@subsegment_decorator
async def __call__(self, wrapped, instance, args, kwargs):
if is_already_recording(wrapped):
# The wrapped function is already decorated, the subsegment will be created later,
# just return the result
return await wrapped(*args, **kwargs)
func_name = self.name
if not func_name:
func_name = wrapped.__name__
return await self.recorder.record_subsegment_async(
wrapped, instance, args, kwargs,
name=func_name,
namespace='local',
meta_processor=None,
)
async def __aenter__(self):
return self.__enter__()
async def __aexit__(self, exc_type, exc_val, exc_tb):
return self.__exit__(exc_type, exc_val, exc_tb)
class AsyncAWSXRayRecorder(AWSXRayRecorder):
def capture_async(self, name=None):
"""
A decorator that records enclosed function in a subsegment.
It only works with asynchronous functions.
params str name: The name of the subsegment. If not specified
the function name will be used.
"""
return self.in_subsegment_async(name=name)
def in_segment_async(self, name=None, **segment_kwargs):
"""
Return a segment async context manager.
:param str name: the name of the segment
:param dict segment_kwargs: remaining arguments passed directly to `begin_segment`
"""
return AsyncSegmentContextManager(self, name=name, **segment_kwargs)
def in_subsegment_async(self, name=None, **subsegment_kwargs):
"""
Return a subsegment async context manager.
:param str name: the name of the segment
:param dict segment_kwargs: remaining arguments passed directly to `begin_segment`
"""
return AsyncSubsegmentContextManager(self, name=name, **subsegment_kwargs)
async def record_subsegment_async(self, wrapped, instance, args, kwargs, name,
namespace, meta_processor):
subsegment = self.begin_subsegment(name, namespace)
exception = None
stack = None
return_value = None
try:
return_value = await wrapped(*args, **kwargs)
return return_value
except Exception as e:
exception = e
stack = stacktrace.get_stacktrace(limit=self._max_trace_back)
raise
finally:
# No-op if subsegment is `None` due to `LOG_ERROR`.
if subsegment is not None:
end_time = time.time()
if callable(meta_processor):
meta_processor(
wrapped=wrapped,
instance=instance,
args=args,
kwargs=kwargs,
return_value=return_value,
exception=exception,
subsegment=subsegment,
stack=stack,
)
elif exception:
if subsegment:
subsegment.add_exception(exception, stack)
self.end_subsegment(end_time)
================================================
FILE: aws_xray_sdk/core/context.py
================================================
import threading
import logging
import os
from .exceptions.exceptions import SegmentNotFoundException
from .models.dummy_entities import DummySegment
from aws_xray_sdk import global_sdk_config
log = logging.getLogger(__name__)
MISSING_SEGMENT_MSG = 'cannot find the current segment/subsegment, please make sure you have a segment open'
SUPPORTED_CONTEXT_MISSING = ('RUNTIME_ERROR', 'LOG_ERROR', 'IGNORE_ERROR')
CXT_MISSING_STRATEGY_KEY = 'AWS_XRAY_CONTEXT_MISSING'
class Context:
"""
The context storage class to store trace entities(segments/subsegments).
The default implementation uses threadlocal to store these entities.
It also provides interfaces to manually inject trace entities which will
replace the current stored entities and to clean up the storage.
For any data access or data mutation, if there is no active segment present
it will use user-defined behavior to handle such case. By default it throws
an runtime error.
This data structure is thread-safe.
"""
def __init__(self, context_missing='LOG_ERROR'):
self._local = threading.local()
strategy = os.getenv(CXT_MISSING_STRATEGY_KEY, context_missing)
self._context_missing = strategy
def put_segment(self, segment):
"""
Store the segment created by ``xray_recorder`` to the context.
It overrides the current segment if there is already one.
"""
setattr(self._local, 'entities', [segment])
def end_segment(self, end_time=None):
"""
End the current active segment.
:param float end_time: epoch in seconds. If not specified the current
system time will be used.
"""
entity = self.get_trace_entity()
if not entity:
log.warning("No segment to end")
return
if self._is_subsegment(entity):
entity.parent_segment.close(end_time)
else:
entity.close(end_time)
def put_subsegment(self, subsegment):
"""
Store the subsegment created by ``xray_recorder`` to the context.
If you put a new subsegment while there is already an open subsegment,
the new subsegment becomes the child of the existing subsegment.
"""
entity = self.get_trace_entity()
if not entity:
log.warning("Active segment or subsegment not found. Discarded %s." % subsegment.name)
return
entity.add_subsegment(subsegment)
self._local.entities.append(subsegment)
def end_subsegment(self, end_time=None):
"""
End the current active segment. Return False if there is no
subsegment to end.
:param float end_time: epoch in seconds. If not specified the current
system time will be used.
"""
entity = self.get_trace_entity()
if self._is_subsegment(entity):
entity.close(end_time)
self._local.entities.pop()
return True
elif isinstance(entity, DummySegment):
return False
else:
log.warning("No subsegment to end.")
return False
def get_trace_entity(self):
"""
Return the current trace entity(segment/subsegment). If there is none,
it behaves based on pre-defined ``context_missing`` strategy.
If the SDK is disabled, returns a DummySegment
"""
if not getattr(self._local, 'entities', None):
if not global_sdk_config.sdk_enabled():
return DummySegment()
return self.handle_context_missing()
return self._local.entities[-1]
def set_trace_entity(self, trace_entity):
"""
Store the input trace_entity to local context. It will overwrite all
existing ones if there is any.
"""
setattr(self._local, 'entities', [trace_entity])
def clear_trace_entities(self):
"""
clear all trace_entities stored in the local context.
In case of using threadlocal to store trace entites, it will
clean up all trace entities created by the current thread.
"""
self._local.__dict__.clear()
def handle_context_missing(self):
"""
Called whenever there is no trace entity to access or mutate.
"""
if self.context_missing == 'RUNTIME_ERROR':
raise SegmentNotFoundException(MISSING_SEGMENT_MSG)
elif self.context_missing == 'LOG_ERROR':
log.error(MISSING_SEGMENT_MSG)
def _is_subsegment(self, entity):
return hasattr(entity, 'type') and entity.type == 'subsegment'
@property
def context_missing(self):
return self._context_missing
@context_missing.setter
def context_missing(self, value):
if value not in SUPPORTED_CONTEXT_MISSING:
log.warning('specified context_missing not supported, using default.')
return
self._context_missing = value
================================================
FILE: aws_xray_sdk/core/daemon_config.py
================================================
import os
from .exceptions.exceptions import InvalidDaemonAddressException
DAEMON_ADDRESS_KEY = "AWS_XRAY_DAEMON_ADDRESS"
DEFAULT_ADDRESS = '127.0.0.1:2000'
class DaemonConfig:
"""The class that stores X-Ray daemon configuration about
the ip address and port for UDP and TCP port. It gets the address
string from ``AWS_TRACING_DAEMON_ADDRESS`` and then from recorder's
configuration for ``daemon_address``.
A notation of '127.0.0.1:2000' or 'tcp:127.0.0.1:2000 udp:127.0.0.2:2001'
are both acceptable. The former one means UDP and TCP are running at
the same address.
By default it assumes a X-Ray daemon running at 127.0.0.1:2000
listening to both UDP and TCP traffic.
"""
def __init__(self, daemon_address=DEFAULT_ADDRESS):
if daemon_address is None:
daemon_address = DEFAULT_ADDRESS
val = os.getenv(DAEMON_ADDRESS_KEY, daemon_address)
configs = val.split(' ')
if len(configs) == 1:
self._parse_single_form(configs[0])
elif len(configs) == 2:
self._parse_double_form(configs[0], configs[1], val)
else:
raise InvalidDaemonAddressException('Invalid daemon address %s specified.' % val)
def _parse_single_form(self, val):
try:
configs = val.split(':')
self._udp_ip = configs[0]
self._udp_port = int(configs[1])
self._tcp_ip = configs[0]
self._tcp_port = int(configs[1])
except Exception:
raise InvalidDaemonAddressException('Invalid daemon address %s specified.' % val)
def _parse_double_form(self, val1, val2, origin):
try:
configs1 = val1.split(':')
configs2 = val2.split(':')
mapping = {
configs1[0]: configs1,
configs2[0]: configs2,
}
tcp_info = mapping.get('tcp')
udp_info = mapping.get('udp')
self._tcp_ip = tcp_info[1]
self._tcp_port = int(tcp_info[2])
self._udp_ip = udp_info[1]
self._udp_port = int(udp_info[2])
except Exception:
raise InvalidDaemonAddressException('Invalid daemon address %s specified.' % origin)
@property
def udp_ip(self):
return self._udp_ip
@property
def udp_port(self):
return self._udp_port
@property
def tcp_ip(self):
return self._tcp_ip
@property
def tcp_port(self):
return self._tcp_port
================================================
FILE: aws_xray_sdk/core/emitters/__init__.py
================================================
================================================
FILE: aws_xray_sdk/core/emitters/udp_emitter.py
================================================
import logging
import socket
from aws_xray_sdk.core.daemon_config import DaemonConfig
from ..exceptions.exceptions import InvalidDaemonAddressException
log = logging.getLogger(__name__)
PROTOCOL_HEADER = "{\"format\":\"json\",\"version\":1}"
PROTOCOL_DELIMITER = '\n'
DEFAULT_DAEMON_ADDRESS = '127.0.0.1:2000'
class UDPEmitter:
"""
The default emitter the X-Ray recorder uses to send segments/subsegments
to the X-Ray daemon over UDP using a non-blocking socket. If there is an
exception on the actual data transfer between the socket and the daemon,
it logs the exception and continue.
"""
def __init__(self, daemon_address=DEFAULT_DAEMON_ADDRESS):
self._socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self._socket.setblocking(0)
self.set_daemon_address(daemon_address)
def send_entity(self, entity):
"""
Serializes a segment/subsegment and sends it to the X-Ray daemon
over UDP. By default it doesn't retry on failures.
:param entity: a trace entity to send to the X-Ray daemon
"""
try:
message = "%s%s%s" % (PROTOCOL_HEADER,
PROTOCOL_DELIMITER,
entity.serialize())
log.debug("sending: %s to %s:%s." % (message, self._ip, self._port))
self._send_data(message)
except Exception:
log.exception("Failed to send entity to Daemon.")
def set_daemon_address(self, address):
"""
Set up UDP ip and port from the raw daemon address
string using ``DaemonConfig`` class utlities.
"""
if address:
daemon_config = DaemonConfig(address)
self._ip, self._port = daemon_config.udp_ip, daemon_config.udp_port
@property
def ip(self):
return self._ip
@property
def port(self):
return self._port
def _send_data(self, data):
self._socket.sendto(data.encode('utf-8'), (self._ip, self._port))
def _parse_address(self, daemon_address):
try:
val = daemon_address.split(':')
return val[0], int(val[1])
except Exception:
raise InvalidDaemonAddressException('Invalid daemon address %s specified.' % daemon_address)
================================================
FILE: aws_xray_sdk/core/exceptions/__init__.py
================================================
================================================
FILE: aws_xray_sdk/core/exceptions/exceptions.py
================================================
class InvalidSamplingManifestError(Exception):
pass
class SegmentNotFoundException(Exception):
pass
class InvalidDaemonAddressException(Exception):
pass
class SegmentNameMissingException(Exception):
pass
class SubsegmentNameMissingException(Exception):
pass
class FacadeSegmentMutationException(Exception):
pass
class MissingPluginNames(Exception):
pass
class AlreadyEndedException(Exception):
pass
================================================
FILE: aws_xray_sdk/core/lambda_launcher.py
================================================
import os
import logging
import threading
from aws_xray_sdk import global_sdk_config
from .models.dummy_entities import DummySegment
from .models.facade_segment import FacadeSegment
from .models.trace_header import TraceHeader
from .context import Context
log = logging.getLogger(__name__)
LAMBDA_TRACE_HEADER_KEY = '_X_AMZN_TRACE_ID'
LAMBDA_TASK_ROOT_KEY = 'LAMBDA_TASK_ROOT'
TOUCH_FILE_DIR = '/tmp/.aws-xray/'
TOUCH_FILE_PATH = '/tmp/.aws-xray/initialized'
def check_in_lambda():
"""
Return None if SDK is not loaded in AWS Lambda worker.
Otherwise drop a touch file and return a lambda context.
"""
if not os.getenv(LAMBDA_TASK_ROOT_KEY):
return None
try:
os.mkdir(TOUCH_FILE_DIR)
except OSError:
log.debug('directory %s already exists', TOUCH_FILE_DIR)
try:
f = open(TOUCH_FILE_PATH, 'w+')
f.close()
# utime force second parameter in python2.7
os.utime(TOUCH_FILE_PATH, None)
except (IOError, OSError):
log.warning("Unable to write to %s. Failed to signal SDK initialization." % TOUCH_FILE_PATH)
return LambdaContext()
class LambdaContext(Context):
"""
Lambda service will generate a segment for each function invocation which
cannot be mutated. The context doesn't keep any manually created segment
but instead every time ``get_trace_entity()`` gets called it refresh the
segment based on environment variables set by Lambda worker.
"""
def __init__(self):
self._local = threading.local()
def put_segment(self, segment):
"""
No-op.
"""
log.warning('Cannot create segments inside Lambda function. Discarded.')
def end_segment(self, end_time=None):
"""
No-op.
"""
log.warning('Cannot end segment inside Lambda function. Ignored.')
def put_subsegment(self, subsegment):
"""
Refresh the segment every time this function is invoked to prevent
a new subsegment from being attached to a leaked segment/subsegment.
"""
current_entity = self.get_trace_entity()
if not self._is_subsegment(current_entity) and (getattr(current_entity, 'initializing', None) or isinstance(current_entity, DummySegment)):
if global_sdk_config.sdk_enabled() and not os.getenv(LAMBDA_TRACE_HEADER_KEY):
log.warning("Subsegment %s discarded due to Lambda worker still initializing" % subsegment.name)
return
current_entity.add_subsegment(subsegment)
self._local.entities.append(subsegment)
def set_trace_entity(self, trace_entity):
"""
For Lambda context, we additionally store the segment in the thread local.
"""
if self._is_subsegment(trace_entity):
segment = trace_entity.parent_segment
else:
segment = trace_entity
setattr(self._local, 'segment', segment)
setattr(self._local, 'entities', [trace_entity])
def get_trace_entity(self):
self._refresh_context()
if getattr(self._local, 'entities', None):
return self._local.entities[-1]
else:
return self._local.segment
def _refresh_context(self):
"""
Get current segment. To prevent resource leaking in Lambda worker,
every time there is segment present, we compare its trace id to current
environment variables. If it is different we create a new segment
and clean up subsegments stored.
"""
header_str = os.getenv(LAMBDA_TRACE_HEADER_KEY)
trace_header = TraceHeader.from_header_str(header_str)
if not global_sdk_config.sdk_enabled():
trace_header._sampled = False
segment = getattr(self._local, 'segment', None)
if segment:
# Ensure customers don't have leaked subsegments across invocations
if not trace_header.root or trace_header.root == segment.trace_id:
return
else:
self._initialize_context(trace_header)
else:
self._initialize_context(trace_header)
@property
def context_missing(self):
return None
@context_missing.setter
def context_missing(self, value):
pass
def handle_context_missing(self):
"""
No-op.
"""
pass
def _initialize_context(self, trace_header):
"""
Create a segment based on environment variables set by
AWS Lambda and initialize storage for subsegments.
"""
sampled = None
if not global_sdk_config.sdk_enabled():
# Force subsequent subsegments to be disabled and turned into DummySegments.
sampled = False
elif trace_header.sampled == 0:
sampled = False
elif trace_header.sampled == 1:
sampled = True
segment = None
if not trace_header.root or not trace_header.parent or trace_header.sampled is None:
segment = DummySegment()
log.debug("Creating NoOp/Dummy parent segment")
else:
segment = FacadeSegment(
name='facade',
traceid=trace_header.root,
entityid=trace_header.parent,
sampled=sampled,
)
segment.save_origin_trace_header(trace_header)
setattr(self._local, 'segment', segment)
setattr(self._local, 'entities', [])
================================================
FILE: aws_xray_sdk/core/models/__init__.py
================================================
================================================
FILE: aws_xray_sdk/core/models/default_dynamic_naming.py
================================================
from ..utils.search_pattern import wildcard_match
class DefaultDynamicNaming:
"""
Decides what name to use on a segment generated from an incoming request.
By default it takes the host name and compares it to a pre-defined pattern.
If the host name matches that pattern, it returns the host name, otherwise
it returns the fallback name. The host name usually comes from the incoming
request's headers.
"""
def __init__(self, pattern, fallback):
"""
:param str pattern: the regex-like pattern to be compared against.
Right now only ? and * are supported. An asterisk (*) represents
any combination of characters. A question mark (?) represents
any single character.
:param str fallback: the fallback name to be used if the candidate name
doesn't match the provided pattern.
"""
self._pattern = pattern
self._fallback = fallback
def get_name(self, host_name):
"""
Returns the segment name based on the input host name.
"""
if wildcard_match(self._pattern, host_name):
return host_name
else:
return self._fallback
================================================
FILE: aws_xray_sdk/core/models/dummy_entities.py
================================================
import os
from .noop_traceid import NoOpTraceId
from .traceid import TraceId
from .segment import Segment
from .subsegment import Subsegment
class DummySegment(Segment):
"""
A dummy segment is created when ``xray_recorder`` decide to not sample
the segment based on sampling rules.
Adding data to a dummy segment becomes a no-op except for
subsegments. This is to reduce the memory footprint of the SDK.
A dummy segment will not be sent to the X-Ray daemon. Manually creating
dummy segments is not recommended.
"""
def __init__(self, name='dummy'):
no_op_id = os.getenv('AWS_XRAY_NOOP_ID')
if no_op_id and no_op_id.lower() == 'false':
super().__init__(name=name, traceid=TraceId().to_id())
else:
super().__init__(name=name, traceid=NoOpTraceId().to_id(), entityid='0000000000000000')
self.sampled = False
def set_aws(self, aws_meta):
"""
No-op
"""
pass
def put_http_meta(self, key, value):
"""
No-op
"""
pass
def put_annotation(self, key, value):
"""
No-op
"""
pass
def put_metadata(self, key, value, namespace='default'):
"""
No-op
"""
pass
def set_user(self, user):
"""
No-op
"""
pass
def set_service(self, service_info):
"""
No-op
"""
pass
def apply_status_code(self, status_code):
"""
No-op
"""
pass
def add_exception(self, exception, stack, remote=False):
"""
No-op
"""
pass
def serialize(self):
"""
No-op
"""
pass
class DummySubsegment(Subsegment):
"""
A dummy subsegment will be created when ``xray_recorder`` tries
to create a subsegment under a not sampled segment. Adding data
to a dummy subsegment becomes no-op. Dummy subsegment will not
be sent to the X-Ray daemon.
"""
def __init__(self, segment, name='dummy'):
super().__init__(name, 'dummy', segment)
no_op_id = os.getenv('AWS_XRAY_NOOP_ID')
if no_op_id and no_op_id.lower() == 'false':
super(Subsegment, self).__init__(name)
else:
super(Subsegment, self).__init__(name, entity_id='0000000000000000')
self.sampled = False
def set_aws(self, aws_meta):
"""
No-op
"""
pass
def put_http_meta(self, key, value):
"""
No-op
"""
pass
def put_annotation(self, key, value):
"""
No-op
"""
pass
def put_metadata(self, key, value, namespace='default'):
"""
No-op
"""
pass
def set_sql(self, sql):
"""
No-op
"""
pass
def apply_status_code(self, status_code):
"""
No-op
"""
pass
def add_exception(self, exception, stack, remote=False):
"""
No-op
"""
pass
def serialize(self):
"""
No-op
"""
pass
================================================
FILE: aws_xray_sdk/core/models/entity.py
================================================
import logging
import os
import binascii
import time
import string
import json
from ..utils.compat import annotation_value_types
from ..utils.conversion import metadata_to_dict
from .throwable import Throwable
from . import http
from ..exceptions.exceptions import AlreadyEndedException
log = logging.getLogger(__name__)
# Valid characters can be found at http://docs.aws.amazon.com/xray/latest/devguide/xray-api-segmentdocuments.html
_common_invalid_name_characters = '?;*()!$~^<>'
_valid_annotation_key_characters = string.ascii_letters + string.digits + '_'
ORIGIN_TRACE_HEADER_ATTR_KEY = '_origin_trace_header'
class Entity:
"""
The parent class for segment/subsegment. It holds common properties
and methods on segment and subsegment.
"""
def __init__(self, name, entity_id=None):
if not entity_id:
self.id = self._generate_random_id()
else:
self.id = entity_id
# required attributes
self.name = name
self.name = ''.join([c for c in name if c not in _common_invalid_name_characters])
self.start_time = time.time()
self.parent_id = None
if self.name != name:
log.warning("Removing Segment/Subsugment Name invalid characters from {}.".format(name))
# sampling
self.sampled = True
# state
self.in_progress = True
# meta fields
self.http = {}
self.annotations = {}
self.metadata = {}
self.aws = {}
self.cause = {}
# child subsegments
# list is thread-safe
self.subsegments = []
def close(self, end_time=None):
"""
Close the trace entity by setting `end_time`
and flip the in progress flag to False.
:param float end_time: Epoch in seconds. If not specified
current time will be used.
"""
self._check_ended()
if end_time:
self.end_time = end_time
else:
self.end_time = time.time()
self.in_progress = False
def add_subsegment(self, subsegment):
"""
Add input subsegment as a child subsegment.
"""
self._check_ended()
subsegment.parent_id = self.id
if not self.sampled and subsegment.sampled:
log.warning("This sampled subsegment is being added to an unsampled parent segment/subsegment and will be orphaned.")
self.subsegments.append(subsegment)
def remove_subsegment(self, subsegment):
"""
Remove input subsegment from child subsegments.
"""
self.subsegments.remove(subsegment)
def put_http_meta(self, key, value):
"""
Add http related metadata.
:param str key: Currently supported keys are:
* url
* method
* user_agent
* client_ip
* status
* content_length
:param value: status and content_length are int and for other
supported keys string should be used.
"""
self._check_ended()
if value is None:
return
if key == http.STATUS:
if isinstance(value, str):
value = int(value)
self.apply_status_code(value)
if key in http.request_keys:
if 'request' not in self.http:
self.http['request'] = {}
self.http['request'][key] = value
elif key in http.response_keys:
if 'response' not in self.http:
self.http['response'] = {}
self.http['response'][key] = value
else:
log.warning("ignoring unsupported key %s in http meta.", key)
def put_annotation(self, key, value):
"""
Annotate segment or subsegment with a key-value pair.
Annotations will be indexed for later search query.
:param str key: annotation key
:param object value: annotation value. Any type other than
string/number/bool will be dropped
"""
self._check_ended()
if not isinstance(key, str):
log.warning("ignoring non string type annotation key with type %s.", type(key))
return
if not isinstance(value, annotation_value_types):
log.warning("ignoring unsupported annotation value type %s.", type(value))
return
if any(character not in _valid_annotation_key_characters for character in key):
log.warning("ignoring annnotation with unsupported characters in key: '%s'.", key)
return
self.annotations[key] = value
def put_metadata(self, key, value, namespace='default'):
"""
Add metadata to segment or subsegment. Metadata is not indexed
but can be later retrieved by BatchGetTraces API.
:param str namespace: optional. Default namespace is `default`.
It must be a string and prefix `AWS.` is reserved.
:param str key: metadata key under specified namespace
:param object value: any object that can be serialized into JSON string
"""
self._check_ended()
if not isinstance(namespace, str):
log.warning("ignoring non string type metadata namespace")
return
if namespace.startswith('AWS.'):
log.warning("Prefix 'AWS.' is reserved, drop metadata with namespace %s", namespace)
return
if self.metadata.get(namespace, None):
self.metadata[namespace][key] = value
else:
self.metadata[namespace] = {key: value}
def set_aws(self, aws_meta):
"""
set aws section of the entity.
This method is called by global recorder and botocore patcher
to provide additonal information about AWS runtime.
It is not recommended to manually set aws section.
"""
self._check_ended()
self.aws = aws_meta
def add_throttle_flag(self):
self.throttle = True
def add_fault_flag(self):
self.fault = True
def add_error_flag(self):
self.error = True
def apply_status_code(self, status_code):
"""
When a trace entity is generated under the http context,
the status code will affect this entity's fault/error/throttle flags.
Flip these flags based on status code.
"""
self._check_ended()
if not status_code:
return
if status_code >= 500:
self.add_fault_flag()
elif status_code == 429:
self.add_throttle_flag()
self.add_error_flag()
elif status_code >= 400:
self.add_error_flag()
def add_exception(self, exception, stack, remote=False):
"""
Add an exception to trace entities.
:param Exception exception: the caught exception.
:param list stack: the output from python built-in
`traceback.extract_stack()`.
:param bool remote: If False it means it's a client error
instead of a downstream service.
"""
self._check_ended()
self.add_fault_flag()
if hasattr(exception, '_recorded'):
setattr(self, 'cause', getattr(exception, '_cause_id'))
return
if not isinstance(self.cause, dict):
log.warning("The current cause object is not a dict but an id: {}. Resetting the cause and recording the "
"current exception".format(self.cause))
self.cause = {}
if 'exceptions' in self.cause:
exceptions = self.cause['exceptions']
else:
exceptions = []
exceptions.append(Throwable(exception, stack, remote))
self.cause['exceptions'] = exceptions
self.cause['working_directory'] = os.getcwd()
def save_origin_trace_header(self, trace_header):
"""
Temporarily store additional data fields in trace header
to the entity for later propagation. The data will be
cleaned up upon serialization.
"""
setattr(self, ORIGIN_TRACE_HEADER_ATTR_KEY, trace_header)
def get_origin_trace_header(self):
"""
Retrieve saved trace header data.
"""
return getattr(self, ORIGIN_TRACE_HEADER_ATTR_KEY, None)
def serialize(self):
"""
Serialize to JSON document that can be accepted by the
X-Ray backend service. It uses json to perform serialization.
"""
return json.dumps(self.to_dict(), default=str)
def to_dict(self):
"""
Convert Entity(Segment/Subsegment) object to dict
with required properties that have non-empty values.
"""
entity_dict = {}
for key, value in vars(self).items():
if isinstance(value, bool) or value:
if key == 'subsegments':
# child subsegments are stored as List
subsegments = []
for subsegment in value:
subsegments.append(subsegment.to_dict())
entity_dict[key] = subsegments
elif key == 'cause':
if isinstance(self.cause, dict):
entity_dict[key] = {}
entity_dict[key]['working_directory'] = self.cause['working_directory']
# exceptions are stored as List
throwables = []
for throwable in value['exceptions']:
throwables.append(throwable.to_dict())
entity_dict[key]['exceptions'] = throwables
else:
entity_dict[key] = self.cause
elif key == 'metadata':
entity_dict[key] = metadata_to_dict(value)
elif key != 'sampled' and key != ORIGIN_TRACE_HEADER_ATTR_KEY:
entity_dict[key] = value
return entity_dict
def _check_ended(self):
if not self.in_progress:
raise AlreadyEndedException("Already ended segment and subsegment cannot be modified.")
def _generate_random_id(self):
"""
Generate a random 16-digit hex str.
This is used for generating segment/subsegment id.
"""
return binascii.b2a_hex(os.urandom(8)).decode('utf-8')
================================================
FILE: aws_xray_sdk/core/models/facade_segment.py
================================================
from .segment import Segment
from ..exceptions.exceptions import FacadeSegmentMutationException
MUTATION_UNSUPPORTED_MESSAGE = 'FacadeSegments cannot be mutated.'
class FacadeSegment(Segment):
"""
This type of segment should only be used in an AWS Lambda environment.
It holds the same id, traceid and sampling decision as
the segment generated by Lambda service but its properties cannot
be mutated except for its subsegments. If this segment is created
before Lambda worker finishes initializatioin, all the child
subsegments will be discarded.
"""
def __init__(self, name, entityid, traceid, sampled):
self.initializing = self._is_initializing(
entityid=entityid,
traceid=traceid,
sampled=sampled,
)
super().__init__(
name=name,
entityid=entityid,
traceid=traceid,
sampled=sampled,
)
def close(self, end_time=None):
"""
Unsupported operation. Will raise an exception.
"""
raise FacadeSegmentMutationException(MUTATION_UNSUPPORTED_MESSAGE)
def put_http_meta(self, key, value):
"""
Unsupported operation. Will raise an exception.
"""
raise FacadeSegmentMutationException(MUTATION_UNSUPPORTED_MESSAGE)
def put_annotation(self, key, value):
"""
Unsupported operation. Will raise an exception.
"""
raise FacadeSegmentMutationException(MUTATION_UNSUPPORTED_MESSAGE)
def put_metadata(self, key, value, namespace='default'):
"""
Unsupported operation. Will raise an exception.
"""
raise FacadeSegmentMutationException(MUTATION_UNSUPPORTED_MESSAGE)
def set_aws(self, aws_meta):
"""
Unsupported operation. Will raise an exception.
"""
raise FacadeSegmentMutationException(MUTATION_UNSUPPORTED_MESSAGE)
def set_user(self, user):
"""
Unsupported operation. Will raise an exception.
"""
raise FacadeSegmentMutationException(MUTATION_UNSUPPORTED_MESSAGE)
def add_throttle_flag(self):
"""
Unsupported operation. Will raise an exception.
"""
raise FacadeSegmentMutationException(MUTATION_UNSUPPORTED_MESSAGE)
def add_fault_flag(self):
"""
Unsupported operation. Will raise an exception.
"""
raise FacadeSegmentMutationException(MUTATION_UNSUPPORTED_MESSAGE)
def add_error_flag(self):
"""
Unsupported operation. Will raise an exception.
"""
raise FacadeSegmentMutationException(MUTATION_UNSUPPORTED_MESSAGE)
def add_exception(self, exception, stack, remote=False):
"""
Unsupported operation. Will raise an exception.
"""
raise FacadeSegmentMutationException(MUTATION_UNSUPPORTED_MESSAGE)
def apply_status_code(self, status_code):
"""
Unsupported operation. Will raise an exception.
"""
raise FacadeSegmentMutationException(MUTATION_UNSUPPORTED_MESSAGE)
def serialize(self):
"""
Unsupported operation. Will raise an exception.
"""
raise FacadeSegmentMutationException(MUTATION_UNSUPPORTED_MESSAGE)
def ready_to_send(self):
"""
Facade segment should never be sent out. This always
return False.
"""
return False
def increment(self):
"""
Increment total subsegments counter by 1.
"""
self._subsegments_counter.increment()
def decrement_ref_counter(self):
"""
No-op
"""
pass
def _is_initializing(self, entityid, traceid, sampled):
return not entityid or not traceid or sampled is None
================================================
FILE: aws_xray_sdk/core/models/http.py
================================================
URL = "url"
METHOD = "method"
USER_AGENT = "user_agent"
CLIENT_IP = "client_ip"
X_FORWARDED_FOR = "x_forwarded_for"
STATUS = "status"
CONTENT_LENGTH = "content_length"
XRAY_HEADER = "X-Amzn-Trace-Id"
# for proxy header re-write
ALT_XRAY_HEADER = "HTTP_X_AMZN_TRACE_ID"
request_keys = (URL, METHOD, USER_AGENT, CLIENT_IP, X_FORWARDED_FOR)
response_keys = (STATUS, CONTENT_LENGTH)
================================================
FILE: aws_xray_sdk/core/models/noop_traceid.py
================================================
class NoOpTraceId:
"""
A trace ID tracks the path of a request through your application.
A trace collects all the segments generated by a single request.
A trace ID is required for a segment.
"""
VERSION = '1'
DELIMITER = '-'
def __init__(self):
"""
Generate a no-op trace id.
"""
self.start_time = '00000000'
self.__number = '000000000000000000000000'
def to_id(self):
"""
Convert TraceId object to a string.
"""
return "%s%s%s%s%s" % (NoOpTraceId.VERSION, NoOpTraceId.DELIMITER,
self.start_time,
NoOpTraceId.DELIMITER, self.__number)
================================================
FILE: aws_xray_sdk/core/models/segment.py
================================================
import copy
import traceback
from .entity import Entity
from .traceid import TraceId
from ..utils.atomic_counter import AtomicCounter
from ..exceptions.exceptions import SegmentNameMissingException
ORIGIN_TRACE_HEADER_ATTR_KEY = '_origin_trace_header'
class SegmentContextManager:
"""
Wrapper for segment and recorder to provide segment context manager.
"""
def __init__(self, recorder, name=None, **segment_kwargs):
self.name = name
self.segment_kwargs = segment_kwargs
self.recorder = recorder
self.segment = None
def __enter__(self):
self.segment = self.recorder.begin_segment(
name=self.name, **self.segment_kwargs)
return self.segment
def __exit__(self, exc_type, exc_val, exc_tb):
if self.segment is None:
return
if exc_type is not None:
self.segment.add_exception(
exc_val,
traceback.extract_tb(
exc_tb,
limit=self.recorder.max_trace_back,
)
)
self.recorder.end_segment()
class Segment(Entity):
"""
The compute resources running your application logic send data
about their work as segments. A segment provides the resource's name,
details about the request, and details about the work done.
"""
def __init__(self, name, entityid=None, traceid=None,
parent_id=None, sampled=True):
"""
Create a segment object.
:param str name: segment name. If not specified a
SegmentNameMissingException will be thrown.
:param str entityid: hexdigits segment id.
:param str traceid: The trace id of the segment.
:param str parent_id: The parent id of the segment. It comes
from id of an upstream segment or subsegment.
:param bool sampled: If False this segment will not be sent
to the X-Ray daemon.
"""
if not name:
raise SegmentNameMissingException("Segment name is required.")
super().__init__(name)
if not traceid:
traceid = TraceId().to_id()
self.trace_id = traceid
if entityid:
self.id = entityid
self.in_progress = True
self.sampled = sampled
self.user = None
self.ref_counter = AtomicCounter()
self._subsegments_counter = AtomicCounter()
if parent_id:
self.parent_id = parent_id
def add_subsegment(self, subsegment):
"""
Add input subsegment as a child subsegment and increment
reference counter and total subsegments counter.
"""
super().add_subsegment(subsegment)
self.increment()
def increment(self):
"""
Increment reference counter to track on open subsegments
and total subsegments counter to track total size of subsegments
it currently hold.
"""
self.ref_counter.increment()
self._subsegments_counter.increment()
def decrement_ref_counter(self):
"""
Decrement reference counter by 1 when a subsegment is closed.
"""
self.ref_counter.decrement()
def ready_to_send(self):
"""
Return True if the segment doesn't have any open subsegments
and itself is not in progress.
"""
return self.ref_counter.get_current() <= 0 and not self.in_progress
def get_total_subsegments_size(self):
"""
Return the number of total subsegments regardless of open or closed.
"""
return self._subsegments_counter.get_current()
def decrement_subsegments_size(self):
"""
Decrement total subsegments by 1. This usually happens when
a subsegment is streamed out.
"""
return self._subsegments_counter.decrement()
def remove_subsegment(self, subsegment):
"""
Remove the reference of input subsegment.
"""
super().remove_subsegment(subsegment)
self.decrement_subsegments_size()
def set_user(self, user):
"""
set user of a segment. One segment can only have one user.
User is indexed and can be later queried.
"""
super()._check_ended()
self.user = user
def set_service(self, service_info):
"""
Add python runtime and version info.
This method should be only used by the recorder.
"""
self.service = service_info
def set_rule_name(self, rule_name):
"""
Add the matched centralized sampling rule name
if a segment is sampled because of that rule.
This method should be only used by the recorder.
"""
if not self.aws.get('xray', None):
self.aws['xray'] = {}
self.aws['xray']['sampling_rule_name'] = rule_name
def to_dict(self):
"""
Convert Segment object to dict with required properties
that have non-empty values.
"""
segment_dict = super().to_dict()
del segment_dict['ref_counter']
del segment_dict['_subsegments_counter']
return segment_dict
================================================
FILE: aws_xray_sdk/core/models/subsegment.py
================================================
import copy
import traceback
import wrapt
from .entity import Entity
from ..exceptions.exceptions import SegmentNotFoundException
# Attribute starts with _self_ to prevent wrapt proxying to underlying function
SUBSEGMENT_RECORDING_ATTRIBUTE = '_self___SUBSEGMENT_RECORDING_ATTRIBUTE__'
def set_as_recording(decorated_func, wrapped):
# If the wrapped function has the attribute, then it has already been patched
setattr(decorated_func, SUBSEGMENT_RECORDING_ATTRIBUTE, hasattr(wrapped, SUBSEGMENT_RECORDING_ATTRIBUTE))
def is_already_recording(func):
# The function might have the attribute, but its value might still be false
# as it might be the first decorator
return getattr(func, SUBSEGMENT_RECORDING_ATTRIBUTE, False)
@wrapt.decorator
def subsegment_decorator(wrapped, instance, args, kwargs):
decorated_func = wrapt.decorator(wrapped)(*args, **kwargs)
set_as_recording(decorated_func, wrapped)
return decorated_func
class SubsegmentContextManager:
"""
Wrapper for segment and recorder to provide segment context manager.
"""
def __init__(self, recorder, name=None, **subsegment_kwargs):
self.name = name
self.subsegment_kwargs = subsegment_kwargs
self.recorder = recorder
self.subsegment = None
@subsegment_decorator
def __call__(self, wrapped, instance, args, kwargs):
if is_already_recording(wrapped):
# The wrapped function is already decorated, the subsegment will be created later,
# just return the result
return wrapped(*args, **kwargs)
func_name = self.name
if not func_name:
func_name = wrapped.__name__
return self.recorder.record_subsegment(
wrapped, instance, args, kwargs,
name=func_name,
namespace='local',
meta_processor=None,
)
def __enter__(self):
self.subsegment = self.recorder.begin_subsegment(
name=self.name, **self.subsegment_kwargs)
return self.subsegment
def __exit__(self, exc_type, exc_val, exc_tb):
if self.subsegment is None:
return
if exc_type is not None:
self.subsegment.add_exception(
exc_val,
traceback.extract_tb(
exc_tb,
limit=self.recorder.max_trace_back,
)
)
self.recorder.end_subsegment()
class Subsegment(Entity):
"""
The work done in a single segment can be broke down into subsegments.
Subsegments provide more granular timing information and details about
downstream calls that your application made to fulfill the original request.
A subsegment can contain additional details about a call to an AWS service,
an external HTTP API, or an SQL database.
"""
def __init__(self, name, namespace, segment):
"""
Create a new subsegment.
:param str name: Subsegment name is required.
:param str namespace: The namespace of the subsegment. Currently
support `aws`, `remote` and `local`.
:param Segment segment: The parent segment
"""
super().__init__(name)
if not segment:
raise SegmentNotFoundException("A parent segment is required for creating subsegments.")
self.parent_segment = segment
self.trace_id = segment.trace_id
self.type = 'subsegment'
self.namespace = namespace
self.sql = {}
def add_subsegment(self, subsegment):
"""
Add input subsegment as a child subsegment and increment
reference counter and total subsegments counter of the
parent segment.
"""
super().add_subsegment(subsegment)
self.parent_segment.increment()
def remove_subsegment(self, subsegment):
"""
Remove input subsegment from child subsegemnts and
decrement parent segment total subsegments count.
:param Subsegment: subsegment to remove.
"""
super().remove_subsegment(subsegment)
self.parent_segment.decrement_subsegments_size()
def close(self, end_time=None):
"""
Close the trace entity by setting `end_time`
and flip the in progress flag to False. Also decrement
parent segment's ref counter by 1.
:param float end_time: Epoch in seconds. If not specified
current time will be used.
"""
super().close(end_time)
self.parent_segment.decrement_ref_counter()
def set_sql(self, sql):
"""
Set sql related metadata. This function is used by patchers
for database connectors and is not recommended to
invoke manually.
:param dict sql: sql related metadata
"""
self.sql = sql
def to_dict(self):
"""
Convert Subsegment object to dict with required properties
that have non-empty values.
"""
subsegment_dict = super().to_dict()
del subsegment_dict['parent_segment']
return subsegment_dict
================================================
FILE: aws_xray_sdk/core/models/throwable.py
================================================
import copy
import os
import binascii
import logging
log = logging.getLogger(__name__)
class Throwable:
"""
An object recording exception infomation under trace entity
`cause` section. The information includes the stack trace,
working directory and message from the original exception.
"""
def __init__(self, exception, stack, remote=False):
"""
:param Exception exception: the catched exception.
:param list stack: the formatted stack trace gathered
through `traceback` module.
:param bool remote: If False it means it's a client error
instead of a downstream service.
"""
self.id = binascii.b2a_hex(os.urandom(8)).decode('utf-8')
try:
message = str(exception)
# in case there is an exception cannot be converted to str
except Exception:
message = None
# do not record non-string exception message
if isinstance(message, str):
self.message = message
self.type = type(exception).__name__
self.remote = remote
try:
self._normalize_stack_trace(stack)
except Exception:
self.stack = None
log.warning("can not parse stack trace string, ignore stack field.")
if exception:
setattr(exception, '_recorded', True)
setattr(exception, '_cause_id', self.id)
def to_dict(self):
"""
Convert Throwable object to dict with required properties that
have non-empty values.
"""
throwable_dict = {}
for key, value in vars(self).items():
if isinstance(value, bool) or value:
throwable_dict[key] = value
return throwable_dict
def _normalize_stack_trace(self, stack):
if stack is None:
return None
self.stack = []
for entry in stack:
path = entry[0]
line = entry[1]
label = entry[2]
if 'aws_xray_sdk/' in path:
continue
normalized = {}
normalized['path'] = os.path.basename(path).replace('\"', ' ').strip()
normalized['line'] = line
normalized['label'] = label.strip()
self.stack.append(normalized)
================================================
FILE: aws_xray_sdk/core/models/trace_header.py
================================================
import logging
log = logging.getLogger(__name__)
ROOT = 'Root'
PARENT = 'Parent'
SAMPLE = 'Sampled'
SELF = 'Self'
HEADER_DELIMITER = ";"
class TraceHeader:
"""
The sampling decision and trace ID are added to HTTP requests in
tracing headers named ``X-Amzn-Trace-Id``. The first X-Ray-integrated
service that the request hits adds a tracing header, which is read
by the X-Ray SDK and included in the response. Learn more about
`Tracing Header <http://docs.aws.amazon.com/xray/latest/devguide/xray-concepts.html#xray-concepts-tracingheader>`_.
"""
def __init__(self, root=None, parent=None, sampled=None, data=None):
"""
:param str root: trace id
:param str parent: parent id
:param int sampled: 0 means not sampled, 1 means sampled
:param dict data: arbitrary data fields
"""
self._root = root
self._parent = parent
self._sampled = None
self._data = data
if sampled is not None:
if sampled == '?':
self._sampled = sampled
if sampled is True or sampled == '1' or sampled == 1:
self._sampled = 1
if sampled is False or sampled == '0' or sampled == 0:
self._sampled = 0
@classmethod
def from_header_str(cls, header):
"""
Create a TraceHeader object from a tracing header string
extracted from a http request headers.
"""
if not header:
return cls()
try:
params = header.strip().split(HEADER_DELIMITER)
header_dict = {}
data = {}
for param in params:
entry = param.split('=')
key = entry[0]
if key in (ROOT, PARENT, SAMPLE):
header_dict[key] = entry[1]
# Ignore any "Self=" trace ids injected from ALB.
elif key != SELF:
data[key] = entry[1]
return cls(
root=header_dict.get(ROOT, None),
parent=header_dict.get(PARENT, None),
sampled=header_dict.get(SAMPLE, None),
data=data,
)
except Exception:
log.warning("malformed tracing header %s, ignore.", header)
return cls()
def to_header_str(self):
"""
Convert to a tracing header string that can be injected to
outgoing http request headers.
"""
h_parts = []
if self.root:
h_parts.append(ROOT + '=' + self.root)
if self.parent:
h_parts.append(PARENT + '=' + self.parent)
if self.sampled is not None:
h_parts.append(SAMPLE + '=' + str(self.sampled))
if self.data:
for key in self.data:
h_parts.append(key + '=' + self.data[key])
return HEADER_DELIMITER.join(h_parts)
@property
def root(self):
"""
Return trace id of the header
"""
return self._root
@property
def parent(self):
"""
Return the parent segment id in the header
"""
return self._parent
@property
def sampled(self):
"""
Return the sampling decision in the header.
It's 0 or 1 or '?'.
"""
return self._sampled
@property
def data(self):
"""
Return the arbitrary fields in the trace header.
"""
return self._data
================================================
FILE: aws_xray_sdk/core/models/traceid.py
================================================
import os
import time
import binascii
class TraceId:
"""
A trace ID tracks the path of a request through your application.
A trace collects all the segments generated by a single request.
A trace ID is required for a segment.
"""
VERSION = '1'
DELIMITER = '-'
def __init__(self):
"""
Generate a random trace id.
"""
self.start_time = int(time.time())
self.__number = binascii.b2a_hex(os.urandom(12)).decode('utf-8')
def to_id(self):
"""
Convert TraceId object to a string.
"""
return "%s%s%s%s%s" % (TraceId.VERSION, TraceId.DELIMITER,
format(self.start_time, 'x'),
TraceId.DELIMITER, self.__number)
================================================
FILE: aws_xray_sdk/core/patcher.py
================================================
import importlib
import inspect
import logging
import os
import pkgutil
import re
import sys
import wrapt
from aws_xray_sdk import global_sdk_config
from .utils.compat import is_classmethod, is_instance_method
log = logging.getLogger(__name__)
SUPPORTED_MODULES = (
'aiobotocore',
'botocore',
'pynamodb',
'requests',
'sqlite3',
'mysql',
'httplib',
'pymongo',
'pymysql',
'psycopg2',
'psycopg',
'pg8000',
'sqlalchemy_core',
'httpx',
)
NO_DOUBLE_PATCH = (
'aiobotocore',
'botocore',
'pynamodb',
'requests',
'sqlite3',
'mysql',
'pymongo',
'pymysql',
'psycopg2',
'psycopg',
'pg8000',
'sqlalchemy_core',
'httpx',
)
_PATCHED_MODULES = set()
def patch_all(double_patch=False):
"""
The X-Ray Python SDK supports patching aioboto3, aiobotocore, boto3, botocore, pynamodb, requests,
sqlite3, mysql, httplib, pymongo, pymysql, psycopg2, pg8000, sqlalchemy_core, httpx, and mysql-connector.
To patch all supported libraries::
from aws_xray_sdk.core import patch_all
patch_all()
:param bool double_patch: enable or disable patching of indirect dependencies.
"""
if double_patch:
patch(SUPPORTED_MODULES, raise_errors=False)
else:
patch(NO_DOUBLE_PATCH, raise_errors=False)
def _is_valid_import(module):
module = module.replace('.', '/')
realpath = os.path.realpath(module)
is_module = os.path.isdir(realpath) and (
os.path.isfile('{}/__init__.py'.format(module)) or os.path.isfile('{}/__init__.pyc'.format(module))
)
is_file = not is_module and (
os.path.isfile('{}.py'.format(module)) or os.path.isfile('{}.pyc'.format(module))
)
return is_module or is_file
def patch(modules_to_patch, raise_errors=True, ignore_module_patterns=None):
"""
To patch specific modules::
from aws_xray_sdk.core import patch
i_want_to_patch = ('botocore') # a tuple that contains the libs you want to patch
patch(i_want_to_patch)
:param tuple modules_to_patch: a tuple containing the list of libraries to be patched
"""
enabled = global_sdk_config.sdk_enabled()
if not enabled:
log.debug("Skipped patching modules %s because the SDK is currently disabled." % ', '.join(modules_to_patch))
return # Disable module patching if the SDK is disabled.
modules = set()
for module_to_patch in modules_to_patch:
# boto3 depends on botocore and patching botocore is sufficient
if module_to_patch == 'boto3':
modules.add('botocore')
# aioboto3 depends on aiobotocore and patching aiobotocore is sufficient
elif module_to_patch == 'aioboto3':
modules.add('aiobotocore')
# pynamodb requires botocore to be patched as well
elif module_to_patch == 'pynamodb':
modules.add('botocore')
modules.add(module_to_patch)
else:
modules.add(module_to_patch)
unsupported_modules = set(module for module in modules if module not in SUPPORTED_MODULES)
native_modules = modules - unsupported_modules
external_modules = set(module for module in unsupported_modules if _is_valid_import(module))
unsupported_modules = unsupported_modules - external_modules
if unsupported_modules:
raise Exception('modules %s are currently not supported for patching'
% ', '.join(unsupported_modules))
for m in native_modules:
_patch_module(m, raise_errors)
ignore_module_patterns = [re.compile(pattern) for pattern in ignore_module_patterns or []]
for m in external_modules:
_external_module_patch(m, ignore_module_patterns)
def _patch_module(module_to_patch, raise_errors=True):
try:
_patch(module_to_patch)
except Exception:
if raise_errors:
raise
log.debug('failed to patch module %s', module_to_patch)
def _patch(module_to_patch):
path = 'aws_xray_sdk.ext.%s' % module_to_patch
if module_to_patch in _PATCHED_MODULES:
log.debug('%s already patched', module_to_patch)
return
imported_module = importlib.import_module(path)
imported_module.patch()
_PATCHED_MODULES.add(module_to_patch)
log.info('successfully patched module %s', module_to_patch)
def _patch_func(parent, func_name, func, modifier=lambda x: x):
if func_name not in parent.__dict__:
# Ignore functions not directly defined in parent, i.e. exclude inherited ones
return
from aws_xray_sdk.core import xray_recorder
capture_name = func_name
if func_name.startswith('__') and func_name.endswith('__'):
capture_name = '{}.{}'.format(parent.__name__, capture_name)
setattr(parent, func_name, modifier(xray_recorder.capture(name=capture_name)(func)))
def _patch_class(module, cls):
for member_name, member in inspect.getmembers(cls, inspect.isclass):
if member.__module__ == module.__name__:
# Only patch classes of the module, ignore imports
_patch_class(module, member)
for member_name, member in inspect.getmembers(cls, inspect.ismethod):
if member.__module__ == module.__name__:
# Only patch methods of the class defined in the module, ignore other modules
if is_classmethod(member):
# classmethods are internally generated through descriptors. The classmethod
# decorator must be the last applied, so we cannot apply another one on top
log.warning('Cannot automatically patch classmethod %s.%s, '
'please apply decorator manually', cls.__name__, member_name)
else:
_patch_func(cls, member_name, member)
for member_name, member in inspect.getmembers(cls, inspect.isfunction):
if member.__module__ == module.__name__:
# Only patch static methods of the class defined in the module, ignore other modules
if is_instance_method(cls, member_name, member):
_patch_func(cls, member_name, member)
else:
_patch_func(cls, member_name, member, modifier=staticmethod)
def _on_import(module):
for member_name, member in inspect.getmembers(module, inspect.isfunction):
if member.__module__ == module.__name__:
# Only patch functions of the module, ignore imports
_patch_func(module, member_name, member)
for member_name, member in inspect.getmembers(module, inspect.isclass):
if member.__module__ == module.__name__:
# Only patch classes of the module, ignore imports
_patch_class(module, member)
def _external_module_patch(module, ignore_module_patterns):
if module.startswith('.'):
raise Exception('relative packages not supported for patching: {}'.format(module))
if module in _PATCHED_MODULES:
log.debug('%s already patched', module)
elif any(pattern.match(module) for pattern in ignore_module_patterns):
log.debug('%s ignored due to rules: %s', module, ignore_module_patterns)
else:
if module in sys.modules:
_on_import(sys.modules[module])
else:
wrapt.importer.when_imported(module)(_on_import)
for loader, submodule_name, is_module in pkgutil.iter_modules([module.replace('.', '/')]):
submodule = '.'.join([module, submodule_name])
if is_module:
_external_module_patch(submodule, ignore_module_patterns)
else:
if submodule in _PATCHED_MODULES:
log.debug('%s already patched', submodule)
continue
elif any(pattern.match(submodule) for pattern in ignore_module_patterns):
log.debug('%s ignored due to rules: %s', submodule, ignore_module_patterns)
continue
if submodule in sys.modules:
_on_import(sys.modules[submodule])
else:
wrapt.importer.when_imported(submodule)(_on_import)
_PATCHED_MODULES.add(submodule)
log.info('successfully patched module %s', submodule)
if module not in _PATCHED_MODULES:
_PATCHED_MODULES.add(module)
log.info('successfully patched module %s', module)
================================================
FILE: aws_xray_sdk/core/plugins/__init__.py
================================================
================================================
FILE: aws_xray_sdk/core/plugins/ec2_plugin.py
================================================
import json
import logging
from urllib.request import Request, urlopen
log = logging.getLogger(__name__)
SERVICE_NAME = 'ec2'
ORIGIN = 'AWS::EC2::Instance'
IMDS_URL = 'http://169.254.169.254/latest/'
def initialize():
"""
Try to get EC2 instance-id and AZ if running on EC2
by querying http://169.254.169.254/latest/meta-data/.
If not continue.
"""
global runtime_context
# get session token with 60 seconds TTL to not have the token lying around for a long time
token = get_token()
# get instance metadata
runtime_context = get_metadata(token)
def get_token():
"""
Get the session token for IMDSv2 endpoint valid for 60 seconds
by specifying the X-aws-ec2-metadata-token-ttl-seconds header.
"""
token = None
try:
headers = {"X-aws-ec2-metadata-token-ttl-seconds": "60"}
token = do_request(url=IMDS_URL + "api/token",
headers=headers,
method="PUT")
except Exception:
log.warning("Failed to get token for IMDSv2")
return token
def get_metadata(token=None):
try:
header = None
if token:
header = {"X-aws-ec2-metadata-token": token}
metadata_json = do_request(url=IMDS_URL + "dynamic/instance-identity/document",
headers=header,
method="GET")
return parse_metadata_json(metadata_json)
except Exception:
log.warning("Failed to get EC2 metadata")
return {}
def parse_metadata_json(json_str):
data = json.loads(json_str)
dict = {
'instance_id': data['instanceId'],
'availability_zone': data['availabilityZone'],
'instance_type': data['instanceType'],
'ami_id': data['imageId']
}
return dict
def do_request(url, headers=None, method="GET"):
if headers is None:
headers = {}
if url is None:
return None
req = Request(url=url)
req.headers = headers
req.method = method
res = urlopen(req, timeout=1)
return res.read().decode('utf-8')
================================================
FILE: aws_xray_sdk/core/plugins/ecs_plugin.py
================================================
import socket
import logging
log = logging.getLogger(__name__)
SERVICE_NAME = 'ecs'
ORIGIN = 'AWS::ECS::Container'
def initialize():
global runtime_context
try:
runtime_context = {}
host_name = socket.gethostname()
if host_name:
runtime_context['container'] = host_name
except Exception:
runtime_context = None
log.warning("failed to get ecs container metadata")
================================================
FILE: aws_xray_sdk/core/plugins/elasticbeanstalk_plugin.py
================================================
import logging
import json
log = logging.getLogger(__name__)
CONF_PATH = '/var/elasticbeanstalk/xray/environment.conf'
SERVICE_NAME = 'elastic_beanstalk'
ORIGIN = 'AWS::ElasticBeanstalk::Environment'
def initialize():
global runtime_context
try:
with open(CONF_PATH) as f:
runtime_context = json.load(f)
except Exception:
runtime_context = None
log.warning("failed to load Elastic Beanstalk environment config file")
================================================
FILE: aws_xray_sdk/core/plugins/utils.py
================================================
import importlib
from ..exceptions.exceptions import MissingPluginNames
module_prefix = 'aws_xray_sdk.core.plugins.'
PLUGIN_MAPPING = {
'elasticbeanstalkplugin': 'elasticbeanstalk_plugin',
'ec2plugin': 'ec2_plugin',
'ecsplugin': 'ecs_plugin'
}
def get_plugin_modules(plugins):
"""
Get plugin modules from input strings
:param tuple plugins: a tuple of plugin names in str
"""
if not plugins:
raise MissingPluginNames("input plugin names are required")
modules = []
for plugin in plugins:
short_name = PLUGIN_MAPPING.get(plugin.lower(), plugin.lower())
full_path = '%s%s' % (module_prefix, short_name)
modules.append(importlib.import_module(full_path))
return tuple(modules)
================================================
FILE: aws_xray_sdk/core/recorder.py
================================================
import copy
import json
import logging
import os
import platform
import time
from aws_xray_sdk import global_sdk_config
from aws_xray_sdk.version import VERSION
from .models.segment import Segment, SegmentContextManager
from .models.subsegment import Subsegment, SubsegmentContextManager
from .models.default_dynamic_naming import DefaultDynamicNaming
from .models.dummy_entities import DummySegment, DummySubsegment
from .emitters.udp_emitter import UDPEmitter
from .streaming.default_streaming import DefaultStreaming
from .context import Context
from .daemon_config import DaemonConfig
from .plugins.utils import get_plugin_modules
from .lambda_launcher import check_in_lambda
from .exceptions.exceptions import SegmentNameMissingException, SegmentNotFoundException
from .utils import stacktrace
log = logging.getLogger(__name__)
TRACING_NAME_KEY = 'AWS_XRAY_TRACING_NAME'
DAEMON_ADDR_KEY = 'AWS_XRAY_DAEMON_ADDRESS'
CONTEXT_MISSING_KEY = 'AWS_XRAY_CONTEXT_MISSING'
XRAY_META = {
'xray': {
'sdk': 'X-Ray for Python',
'sdk_version': VERSION
}
}
SERVICE_INFO = {
'runtime': platform.python_implementation(),
'runtime_version': platform.python_version()
}
class AWSXRayRecorder:
"""
A global AWS X-Ray recorder that will begin/end segments/subsegments
and send them to the X-Ray daemon. This recorder is initialized during
loading time so you can use::
from aws_xray_sdk.core import xray_recorder
in your module to access it
"""
def __init__(self):
self._streaming = DefaultStreaming()
context = check_in_lambda()
if context:
# Special handling when running on AWS Lambda.
from .sampling.local.sampler import LocalSampler
self._context = context
self.streaming_threshold = 0
self._sampler = LocalSampler()
else:
from .sampling.sampler import DefaultSampler
self._context = Context()
self._sampler = DefaultSampler()
self._emitter = UDPEmitter()
self._sampling = True
self._max_trace_back = 10
self._plugins = None
self._service = os.getenv(TRACING_NAME_KEY)
self._dynamic_naming = None
self._aws_metadata = copy.deepcopy(XRAY_META)
self._origin = None
self._stream_sql = True
if type(self.sampler).__name__ == 'DefaultSampler':
self.sampler.load_settings(DaemonConfig(), self.context)
def configure(self, sampling=None, plugins=None,
context_missing=None, sampling_rules=None,
daemon_address=None, service=None,
context=None, emitter=None, streaming=None,
dynamic_naming=None, streaming_threshold=None,
max_trace_back=None, sampler=None,
stream_sql=True):
"""Configure global X-Ray recorder.
Configure needs to run before patching thrid party libraries
to avoid creating dangling subsegment.
:param bool sampling: If sampling is enabled, every time the recorder
creates a segment it decides whether to send this segment to
the X-Ray daemon. This setting is not used if the recorder
is running in AWS Lambda. The recorder always respect the incoming
sampling decisions regardless of this setting.
:param sampling_rules: Pass a set of local custom sampling rules.
Can be an absolute path of the sampling rule config json file
or a dictionary that defines those rules. This will also be the
fallback rules in case of centralized sampling opted-in while
the cetralized sampling rules are not available.
:param sampler: The sampler used to make sampling decisions. The SDK
provides two built-in samplers. One is centralized rules based and
the other is local rules based. The former is the default.
:param tuple plugins: plugins that add extra metadata to each segment.
Currently available plugins are EC2Plugin, ECS plugin and
ElasticBeanstalkPlugin.
If you want to disable all previously enabled plugins,
pass an empty tuple ``()``.
:param str context_missing: recorder behavior when it tries to mutate
a segment or add a subsegment but there is no active segment.
RUNTIME_ERROR means the recorder will raise an exception.
LOG_ERROR means the recorder will only log the error and
do nothing.
IGNORE_ERROR means the recorder will do nothing
:param str daemon_address: The X-Ray daemon address where the recorder
sends data to.
:param str service: default segment name if creating a segment without
providing a name.
:param context: You can pass your own implementation of context storage
for active segment/subsegment by overriding the default
``Context`` class.
:param emitter: The emitter that sends a segment/subsegment to
the X-Ray daemon. You can override ``UDPEmitter`` class.
:param dynamic_naming: a string that defines a pattern that host names
should match. Alternatively you can pass a module which
overrides ``DefaultDynamicNaming`` module.
:param streaming: The streaming module to stream out trace documents
when they grow too large. You can override ``DefaultStreaming``
class to have your own implementation of the streaming process.
:param streaming_threshold: If breaks within a single segment it will
start streaming out children subsegments. By default it is the
maximum number of subsegments within a segment.
:param int max_trace_back: The maxinum number of stack traces recorded
by auto-capture. Lower this if a single document becomes too large.
:param bool stream_sql: Whether SQL query texts should be streamed.
Environment variables AWS_XRAY_DAEMON_ADDRESS, AWS_XRAY_CONTEXT_MISSING
and AWS_XRAY_TRACING_NAME respectively overrides arguments
daemon_address, context_missing and service.
"""
if sampling is not None:
self.sampling = sampling
if sampler:
self.sampler = sampler
if service:
self.service = os.getenv(TRACING_NAME_KEY, service)
if sampling_rules:
self._load_sampling_rules(sampling_rules)
if emitter:
self.emitter = emitter
if daemon_address:
self.emitter.set_daemon_address(os.getenv(DAEMON_ADDR_KEY, daemon_address))
if context:
self.context = context
if context_missing:
self.context.context_missing = os.getenv(CONTEXT_MISSING_KEY, context_missing)
if dynamic_naming:
self.dynamic_naming = dynamic_naming
if streaming:
self.streaming = streaming
if streaming_threshold is not None:
self.streaming_threshold = streaming_threshold
if type(max_trace_back) == int and max_trace_back >= 0:
self.max_trace_back = max_trace_back
if stream_sql is not None:
self.stream_sql = stream_sql
if plugins:
plugin_modules = get_plugin_modules(plugins)
for plugin in plugin_modules:
plugin.initialize()
if plugin.runtime_context:
self._aws_metadata[plugin.SERVICE_NAME] = plugin.runtime_context
self._origin = plugin.ORIGIN
# handling explicitly using empty list to clean up plugins.
elif plugins is not None:
self._aws_metadata = copy.deepcopy(XRAY_META)
self._origin = None
if type(self.sampler).__name__ == 'DefaultSampler':
self.sampler.load_settings(DaemonConfig(daemon_address),
self.context, self._origin)
def in_segment(self, name=None, **segment_kwargs):
"""
Return a segment context manager.
:param str name: the name of the segment
:param dict segment_kwargs: remaining arguments passed directly to `begin_segment`
"""
return SegmentContextManager(self, name=name, **segment_kwargs)
def in_subsegment(self, name=None, **subsegment_kwargs):
"""
Return a subsegment context manager.
:param str name: the name of the subsegment
:param dict subsegment_kwargs: remaining arguments passed directly to `begin_subsegment`
"""
return SubsegmentContextManager(self, name=name, **subsegment_kwargs)
def begin_segment(self, name=None, traceid=None,
parent_id=None, sampling=None):
"""
Begin a segment on the current thread and return it. The recorder
only keeps one segment at a time. Create the second one without
closing existing one will overwrite it.
:param str name: the name of the segment
:param str traceid: trace id of the segment
:param int sampling: 0 means not sampled, 1 means sampled
"""
# Disable the recorder; return a generated dummy segment.
if not global_sdk_config.sdk_enabled():
return DummySegment(global_sdk_config.DISABLED_ENTITY_NAME)
seg_name = name or self.service
if not seg_name:
raise SegmentNameMissingException("Segment name is required.")
# Sampling decision is None if not sampled.
# In a sampled case it could be either a string or 1
# depending on if centralized or local sampling rule takes effect.
decision = True
# we respect the input sampling decision
# regardless of recorder configuration.
if sampling == 0:
decision = False
elif sampling:
decision = sampling
elif self.sampling:
decision = self._sampler.should_trace({'service': seg_name})
if not decision:
segment = DummySegment(seg_name)
else:
segment = Segment(name=seg_name, traceid=traceid,
parent_id=parent_id)
self._populate_runtime_context(segment, decision)
self.context.put_segment(segment)
return segment
def end_segment(self, end_time=None):
"""
End the current segment and send it to X-Ray daemon
if it is ready to send. Ready means segment and
all its subsegments are closed.
:param float end_time: segment completion in unix epoch in seconds.
"""
# When the SDK is disabled we return
if not global_sdk_config.sdk_enabled():
return
self.context.end_segment(end_time)
segment = self.current_segment()
if segment and segment.ready_to_send():
self._send_segment()
def current_segment(self):
"""
Return the currently active segment. In a multithreading environment,
this will make sure the segment returned is the one created by the
same thread.
"""
entity = self.get_trace_entity()
if self._is_subsegment(entity):
return entity.parent_segment
else:
return entity
def _begin_subsegment_helper(self, name, namespace='local', beginWithoutSampling=False):
'''
Helper method to begin_subsegment and begin_subsegment_without_sampling
'''
# Generating the parent dummy segment is necessary.
# We don't need to store anything in context. Assumption here
# is that we only work with recorder-level APIs.
if not global_sdk_config.sdk_enabled():
return DummySubsegment(DummySegment(global_sdk_config.DISABLED_ENTITY_NAME))
segment = self.current_segment()
if not segment:
log.warning("No segment found, cannot begin subsegment %s." % name)
return None
current_entity = self.get_trace_entity()
if not current_entity.sampled or beginWithoutSampling:
subsegment = DummySubsegment(segment, name)
else:
subsegment = Subsegment(name, namespace, segment)
self.context.put_subsegment(subsegment)
return subsegment
def begin_subsegment(self, name, namespace='local'):
"""
Begin a new subsegment.
If there is open subsegment, the newly created subsegment will be the
child of latest opened subsegment.
If not, it will be the child of the current open segment.
:param str name: the name of the subsegment.
:param str namespace: currently can only be 'local', 'remote', 'aws'.
"""
return self._begin_subsegment_helper(name, namespace)
def begin_subsegment_without_sampling(self, name):
"""
Begin a new unsampled subsegment.
If there is open subsegment, the newly created subsegment will be the
child of latest opened subsegment.
If not, it will be the child of the current open segment.
:param str name: the name of the subsegment.
"""
return self._begin_subsegment_helper(name, beginWithoutSampling=True)
def current_subsegment(self):
"""
Return the latest opened subsegment. In a multithreading environment,
this will make sure the subsegment returned is one created
by the same thread.
"""
if not global_sdk_config.sdk_enabled():
return DummySubsegment(DummySegment(global_sdk_config.DISABLED_ENTITY_NAME))
entity = self.get_trace_entity()
if self._is_subsegment(entity):
return entity
else:
return None
def end_subsegment(self, end_time=None):
"""
End the current active subsegment. If this is the last one open
under its parent segment, the entire segment will be sent.
:param float end_time: subsegment compeletion in unix epoch in seconds.
"""
if not global_sdk_config.sdk_enabled():
return
if not self.context.end_subsegment(end_time):
return
# if segment is already close, we check if we can send entire segment
# otherwise we check if we need to stream some subsegments
if self.current_segment().ready_to_send():
self._send_segment()
else:
self.stream_subsegments()
def put_annotation(self, key, value):
"""
Annotate current active trace entity with a key-value pair.
Annotations will be indexed for later search query.
:param str key: annotation key
:param object value: annotation value. Any type other than
string/number/bool will be dropped
"""
if not global_sdk_config.sdk_enabled():
return
entity = self.get_trace_entity()
if entity and entity.sampled:
entity.put_annotation(key, value)
def put_metadata(self, key, value, namespace='default'):
"""
Add metadata to the current active trace entity.
Metadata is not indexed but can be later retrieved
by BatchGetTraces API.
:param str namespace: optional. Default namespace is `default`.
It must be a string and prefix `AWS.` is reserved.
:param str key: metadata key under specified namespace
:param object value: any object that can be serialized into JSON string
"""
if not global_sdk_config.sdk_enabled():
return
entity = self.get_trace_entity()
if entity and entity.sampled:
entity.put_metadata(key, value, namespace)
def is_sampled(self):
"""
Check if the current trace entity is sampled or not.
Return `False` if no active entity found.
"""
if not global_sdk_config.sdk_enabled():
# Disabled SDK is never sampled
return False
entity = self.get_trace_entity()
if entity:
return entity.sampled
return False
def get_trace_entity(self):
"""
A pass through method to ``context.get_trace_entity()``.
"""
return self.context.get_trace_entity()
def set_trace_entity(self, trace_entity):
"""
A pass through method to ``context.set_trace_entity()``.
"""
self.context.set_trace_entity(trace_entity)
def clear_trace_entities(self):
"""
A pass through method to ``context.clear_trace_entities()``.
"""
self.context.clear_trace_entities()
def stream_subsegments(self):
"""
Stream all closed subsegments to the daemon
and remove reference to the parent segment.
No-op for a not sampled segment.
"""
segment = self.current_segment()
if self.streaming.is_eligible(segment):
self.streaming.stream(segment, self._stream_subsegment_out)
def capture(self, name=None):
"""
A decorator that records enclosed function in a subsegment.
It only works with synchronous functions.
params str name: The name of the subsegment. If not specified
the function name will be used.
"""
return self.in_subsegment(name=name)
def record_subsegment(self, wrapped, instance, args, kwargs, name,
namespace, meta_processor):
subsegment = self.begin_subsegment(name, namespace)
exception = None
stack = None
return_value = None
try:
return_value = wrapped(*args, **kwargs)
return return_value
except Exception as e:
exception = e
stack = stacktrace.get_stacktrace(limit=self.max_trace_back)
raise
finally:
# No-op if subsegment is `None` due to `LOG_ERROR`.
if subsegment is not None:
end_time = time.time()
if callable(meta_processor):
meta_processor(
wrapped=wrapped,
instance=instance,
args=args,
kwargs=kwargs,
return_value=return_value,
exception=exception,
subsegment=subsegment,
stack=stack,
)
elif exception:
subsegment.add_exception(exception, stack)
self.end_subsegment(end_time)
def _populate_runtime_context(self, segment, sampling_decision):
if self._origin:
setattr(segment, 'origin', self._origin)
segment.set_aws(copy.deepcopy(self._aws_metadata))
segment.set_service(SERVICE_INFO)
if isinstance(sampling_decision, str):
segment.set_rule_name(sampling_decision)
def _send_segment(self):
"""
Send the current segment to X-Ray daemon if it is present and
sampled, then clean up context storage.
The emitter will handle failures.
"""
segment = self.current_segment()
if not segment:
return
if segment.sampled:
self.emitter.send_entity(segment)
self.clear_trace_entities()
def _stream_subsegment_out(self, subsegment):
log.debug("streaming subsegments...")
if subsegment.sampled:
self.emitter.send_entity(subsegment)
def _load_sampling_rules(self, sampling_rules):
if not sampling_rules:
return
if isinstance(sampling_rules, dict):
self.sampler.load_local_rules(sampling_rules)
else:
with open(sampling_rules) as f:
self.sampler.load_local_rules(json.load(f))
def _is_subsegment(self, entity):
return (hasattr(entity, 'type') and entity.type == 'subsegment')
@property
def enabled(self):
return self._enabled
@enabled.setter
def enabled(self, value):
self._enabled = value
@property
def sampling(self):
return self._sampling
@sampling.setter
def sampling(self, value):
self._sampling = value
@property
def sampler(self):
return self._sampler
@sampler.setter
def sampler(self, value):
self._sampler = value
@property
def service(self):
return self._service
@service.setter
def service(self, value):
self._service = value
@property
def dynamic_naming(self):
return self._dynamic_naming
@dynamic_naming.setter
def dynamic_naming(self, value):
if isinstance(value, str):
self._dynamic_naming = DefaultDynamicNaming(value, self.service)
else:
self._dynamic_naming = value
@property
def context(self):
return self._context
@context.setter
def context(self, cxt):
self._context = cxt
@property
def emitter(self):
return self._emitter
@emitter.setter
def emitter(self, value):
self._emitter = value
@property
def streaming(self):
return self._streaming
@streaming.setter
def streaming(self, value):
self._streaming = value
@property
def streaming_threshold(self):
"""
Proxy method to Streaming module's `streaming_threshold` property.
"""
return self.streaming.streaming_threshold
@streaming_threshold.setter
def streaming_threshold(self, value):
"""
Proxy method to Streaming module's `streaming_threshold` property.
"""
self.streaming.streaming_threshold = value
@property
def max_trace_back(self):
return self._max_trace_back
@max_trace_back.setter
def max_trace_back(self, value):
self._max_trace_back = value
@property
def stream_sql(self):
return self._stream_sql
@stream_sql.setter
def stream_sql(self, value):
self._stream_sql = value
================================================
FILE: aws_xray_sdk/core/sampling/__init__.py
================================================
================================================
FILE: aws_xray_sdk/core/sampling/connector.py
================================================
import binascii
import os
import time
from datetime import datetime
import botocore.session
from botocore import UNSIGNED
from botocore.client import Config
from .sampling_rule import SamplingRule
from aws_xray_sdk.core.models.dummy_entities import DummySegment
from aws_xray_sdk.core.context import Context
class ServiceConnector:
"""
Connector class that translates Centralized Sampling poller functions to
actual X-Ray back-end APIs and communicates with X-Ray daemon as the
signing proxy.
"""
def __init__(self):
self._xray_client = self._create_xray_client()
self._client_id = binascii.b2a_hex(os.urandom(12)).decode('utf-8')
self._context = Context()
def _context_wrapped(func):
"""
Wrapping boto calls with dummy segment. This is because botocore
has two dependencies (requests and httplib) that might be
monkey-patched in user code to capture subsegments. The wrapper
makes sure there is always a non-sampled segment present when
the connector makes an AWS API call using botocore.
This context wrapper doesn't work with asyncio based context
as event loop is not thread-safe.
"""
def wrapper(self, *args, **kargs):
if type(self.context).__name__ == 'AsyncContext':
return func(self, *args, **kargs)
segment = DummySegment()
self.context.set_trace_entity(segment)
result = func(self, *args, **kargs)
self.context.clear_trace_entities()
return result
return wrapper
@_context_wrapped
def fetch_sampling_rules(self):
"""
Use X-Ray botocore client to get the centralized sampling rules
from X-Ray service. The call is proxied and signed by X-Ray Daemon.
"""
new_rules = []
resp = self._xray_client.get_sampling_rules()
records = resp['SamplingRuleRecords']
for record in records:
rule_def = record['SamplingRule']
if self._is_rule_valid(rule_def):
rule = SamplingRule(name=rule_def['RuleName'],
priority=rule_def['Priority'],
rate=rule_def['FixedRate'],
reservoir_size=rule_def['ReservoirSize'],
host=rule_def['Host'],
service=rule_def['ServiceName'],
method=rule_def['HTTPMethod'],
path=rule_def['URLPath'],
service_type=rule_def['ServiceType'])
new_rules.append(rule)
return new_rules
@_context_wrapped
def fetch_sampling_target(self, rules):
"""
Report the current statistics of sampling rules and
get back the new assgiend quota/TTL froom the X-Ray service.
The call is proxied and signed via X-Ray Daemon.
"""
now = int(time.time())
report_docs = self._generate_reporting_docs(rules, now)
resp = self._xray_client.get_sampling_targets(
SamplingStatisticsDocuments=report_docs
)
new_docs = resp['SamplingTargetDocuments']
targets_mapping = {}
for doc in new_docs:
TTL = self._dt_to_epoch(doc['ReservoirQuotaTTL']) if doc.get('ReservoirQuotaTTL', None) else None
target = {
'rate': doc['FixedRate'],
'quota': doc.get('ReservoirQuota', None),
'TTL': TTL,
'interval': doc.get('Interval', None),
}
targets_mapping[doc['RuleName']] = target
return targets_mapping, self._dt_to_epoch(resp['LastRuleModification'])
def setup_xray_client(self, ip, port, client):
"""
Setup the xray client based on ip and port.
If a preset client is specified, ip and port
will be ignored.
"""
if not client:
client = self._create_xray_client(ip, port)
self._xray_client = client
@property
def context(self):
return self._context
@context.setter
def context(self, v):
self._context = v
def _generate_reporting_docs(self, rules, now):
report_docs = []
for rule in rules:
statistics = rule.snapshot_statistics()
doc = {
'RuleName': rule.name,
'ClientID': self._client_id,
'RequestCount': statistics['request_count'],
'BorrowCount': statistics['borrow_count'],
'SampledCount': statistics['sampled_count'],
'Timestamp': now,
}
report_docs.append(doc)
return report_docs
def _dt_to_epoch(self, dt):
"""
Convert a offset-aware datetime to POSIX time.
"""
# Added in python 3.3+ and directly returns POSIX time.
return int(dt.timestamp())
def _is_rule_valid(self, record):
# We currently only handle v1 sampling rules.
return record.get('Version', None) == 1 and \
record.get('ResourceARN', None) == '*' and \
record.get('ServiceType', None) and \
not record.get('Attributes', None)
def _create_xray_client(self, ip='127.0.0.1', port='2000'):
session = botocore.session.get_session()
url = 'http://%s:%s' % (ip, port)
return session.create_client('xray', endpoint_url=url,
region_name='us-west-2',
config=Config(signature_version=UNSIGNED),
aws_access_key_id='', aws_secret_access_key=''
)
================================================
FILE: aws_xray_sdk/core/sampling/local/__init__.py
================================================
================================================
FILE: aws_xray_sdk/core/sampling/local/reservoir.py
================================================
import time
import threading
class Reservoir:
"""
Keeps track of the number of sampled segments within
a single second. This class is implemented to be
thread-safe to achieve accurate sampling.
"""
def __init__(self, traces_per_sec=0):
"""
:param int traces_per_sec: number of guranteed
sampled segments.
"""
self._lock = threading.Lock()
self.traces_per_sec = traces_per_sec
self.used_this_sec = 0
self.this_sec = int(time.time())
def take(self):
"""
Returns True if there are segments left within the
current second, otherwise return False.
"""
with self._lock:
now = int(time.time())
if now != self.this_sec:
self.used_this_sec = 0
self.this_sec = now
if self.used_this_sec >= self.traces_per_sec:
return False
self.used_this_sec = self.used_this_sec + 1
return True
================================================
FILE: aws_xray_sdk/core/sampling/local/sampler.py
================================================
import json
import pkgutil
from random import Random
from .sampling_rule import SamplingRule
from ...exceptions.exceptions import InvalidSamplingManifestError
# `.decode('utf-8')` needed for Python 3.4, 3.5.
local_sampling_rule = json.loads(pkgutil.get_data(__name__, 'sampling_rule.json').decode('utf-8'))
SUPPORTED_RULE_VERSION = (1, 2)
class LocalSampler:
"""
The local sampler that holds either custom sampling rules
or default sampling rules defined locally. The X-Ray recorder
use it to calculate if this segment should be sampled or not
when local rules are neccessary.
"""
def __init__(self, rules=local_sampling_rule):
"""
:param dict rules: a dict that defines custom sampling rules.
An example configuration:
{
"version": 2,
"rules": [
{
"description": "Player moves.",
"host": "*",
"http_method": "*",
"url_path": "/api/move/*",
"fixed_target": 0,
"rate": 0.05
}
],
"default": {
"fixed_target": 1,
"rate": 0.1
}
}
This example defines one custom rule and a default rule.
The custom rule applies a five-percent sampling rate with no minimum
number of requests to trace for paths under /api/move/. The default
rule traces the first request each second and 10 percent of additional requests.
The SDK applies custom rules in the order in which they are defined.
If a request matches multiple custom rules, the SDK applies only the first rule.
"""
self.load_local_rules(rules)
self._random = Random()
def should_trace(self, sampling_req=None):
"""
Return True if the sampler decide to sample based on input
information and sampling rules. It will first check if any
custom rule should be applied, if not it falls back to the
default sampling rule.
All optional arugments are extracted from incoming requests by
X-Ray middleware to perform path based sampling.
"""
if sampling_req is None:
return self._should_trace(self._default_rule)
host = sampling_req.get('host', None)
method = sampling_req.get('method', None)
path = sampling_req.get('path', None)
for rule in self._rules:
if rule.applies(host, method, path):
return self._should_trace(rule)
return self._should_trace(self._default_rule)
def load_local_rules(self, rules):
version = rules.get('version', None)
if version not in SUPPORTED_RULE_VERSION:
raise InvalidSamplingManifestError('Manifest version: %s is not supported.', version)
if 'default' not in rules:
raise InvalidSamplingManifestError('A default rule must be provided.')
self._default_rule = SamplingRule(rule_dict=rules['default'],
version=version,
default=True)
self._rules = []
if 'rules' in rules:
for rule in rules['rules']:
self._rules.append(SamplingRule(rule, version))
def _should_trace(self, sampling_rule):
if sampling_rule.reservoir.take():
return True
else:
return self._random.random() < sampling_rule.rate
================================================
FILE: aws_xray_sdk/core/sampling/local/sampling_rule.json
================================================
{
"version": 2,
"default": {
"fixed_target": 1,
"rate": 0.05
},
"rules": [
]
}
================================================
FILE: aws_xray_sdk/core/sampling/local/sampling_rule.py
================================================
from .reservoir import Reservoir
from ...exceptions.exceptions import InvalidSamplingManifestError
from aws_xray_sdk.core.utils.search_pattern import wildcard_match
class SamplingRule:
"""
One SamplingRule represents one rule defined from local rule json file
or from a dictionary. It can be either a custom rule or default rule.
"""
FIXED_TARGET = 'fixed_target'
RATE = 'rate'
HOST = 'host'
METHOD = 'http_method'
PATH = 'url_path'
SERVICE_NAME = 'service_name'
def __init__(self, rule_dict, version=2, default=False):
"""
:param dict rule_dict: The dictionary that defines a single rule.
:param bool default: Indicates if this is the default rule. A default
rule cannot have `host`, `http_method` or `url_path`.
"""
if version == 2:
self._host_key = self.HOST
elif version == 1:
self._host_key = self.SERVICE_NAME
self._fixed_target = rule_dict.get(self.FIXED_TARGET, None)
self._rate = rule_dict.get(self.RATE, None)
self._host = rule_dict.get(self._host_key, None)
self._method = rule_dict.get(self.METHOD, None)
self._path = rule_dict.get(self.PATH, None)
self._default = default
self._validate()
self._reservoir = Reservoir(self.fixed_target)
def applies(self, host, method, path):
"""
Determines whether or not this sampling rule applies to
the incoming request based on some of the request's parameters.
Any None parameters provided will be considered an implicit match.
"""
return (not host or wildcard_match(self.host, host)) \
and (not method or wildcard_match(self.method, method)) \
and (not path or wildcard_match(self.path, path))
@property
def fixed_target(self):
"""
Defines fixed number of sampled segments per second.
This doesn't count for sampling rate.
"""
return self._fixed_target
@property
def rate(self):
"""
A float number less than 1.0 defines the sampling rate.
"""
return self._rate
@property
def host(self):
"""
The host name of the reqest to sample.
"""
return self._host
@property
def method(self):
"""
HTTP method of the request to sample.
"""
return self._method
@property
def path(self):
"""
The url path of the request to sample.
"""
return self._path
@property
def reservoir(self):
"""
Keeps track of used sampled targets within the second.
"""
return self._reservoir
@property
def version(self):
"""
Keeps track of used sampled targets within the second.
"""
return self._version
def _validate(self):
if self.fixed_target < 0 or self.rate < 0:
raise InvalidSamplingManifestError('All rules must have non-negative values for '
'fixed_target and rate')
if self._default:
if self.host or self.method or self.path:
raise InvalidSamplingManifestError('The default rule must not specify values for '
'url_path, %s, or http_method', self._host_key)
else:
if not self.host or not self.method or not self.path:
raise InvalidSamplingManifestError('All non-default rules must have values for '
'url_path, %s, and http_method', self._host_key)
================================================
FILE: aws_xray_sdk/core/sampling/reservoir.py
================================================
import threading
from enum import Enum
class Reservoir:
"""
Centralized thread-safe reservoir which holds fixed sampling
quota, borrowed count and TTL.
"""
def __init__(self):
self._lock = threading.Lock()
self._quota = None
self._TTL = None
self._this_sec = 0
self._taken_this_sec = 0
self._borrowed_this_sec = 0
self._report_interval = 1
self._report_elapsed = 0
def borrow_or_take(self, now, can_borrow):
"""
Decide whether to borrow or take one quota from
the reservoir. Return ``False`` if it can neither
borrow nor take. This method is thread-safe.
"""
with self._lock:
return self._borrow_or_take(now, can_borrow)
def load_quota(self, quota, TTL, interval):
"""
Load new quota with a TTL. If the input is None,
the reservoir will continue using old quota until it
expires or has a non-None quota/TTL in a future load.
"""
if quota is not None:
self._quota = quota
if TTL is not None:
self._TTL = TTL
if interval is not None:
self._report_interval = interval / 10
@property
def quota(self):
return self._quota
@property
def TTL(self):
return self._TTL
def _time_to_report(self):
if self._report_elapsed + 1 >= self._report_interval:
self._report_elapsed = 0
return True
else:
self._report_elapsed += 1
def _borrow_or_take(self, now, can_borrow):
self._adjust_this_sec(now)
# Don't borrow if the quota is available and fresh.
if (self._quota is not None and self._quota >= 0 and
self._TTL is not None and self._TTL >= now):
if(self._taken_this_sec >= self._quota):
return ReservoirDecision.NO
self._taken_this_sec = self._taken_this_sec + 1
return ReservoirDecision.TAKE
# Otherwise try to borrow if the quota is not present or expired.
if can_borrow:
if self._borrowed_this_sec >= 1:
return ReservoirDecision.NO
self._borrowed_this_sec = self._borrowed_this_sec + 1
return ReservoirDecision.BORROW
def _adjust_this_sec(self, now):
if now != self._this_sec:
self._taken_this_sec = 0
self._borrowed_this_sec = 0
self._this_sec = now
class ReservoirDecision(Enum):
"""
An Enum of decisions the reservoir could make based on
assigned quota with TTL and the current timestamp/usage.
"""
TAKE = 'take'
BORROW = 'borrow'
NO = 'no'
================================================
FILE: aws_xray_sdk/core/sampling/rule_cache.py
================================================
import threading
from operator import attrgetter
TTL = 60 * 60 # The cache expires 1 hour after the last refresh time.
class RuleCache:
"""
Cache sampling rules and quota retrieved by ``TargetPoller``
and ``RulePoller``. It will not return anything if it expires.
"""
def __init__(self):
self._last_updated = None
self._rules = []
self._lock = threading.Lock()
def get_matched_rule(self, sampling_req, now):
if self._is_expired(now):
return None
matched_rule = None
for rule in self.rules:
if(not matched_rule and rule.match(sampling_req)):
matched_rule = rule
if(not matched_rule and rule.is_default()):
matched_rule = rule
return matched_rule
def load_rules(self, rules):
# Record the old rules for later merging.
with self._lock:
self._load_rules(rules)
def load_targets(self, targets_dict):
with self._lock:
self._load_targets(targets_dict)
def _load_rules(self, rules):
oldRules = {}
for rule in self.rules:
oldRules[rule.name] = rule
# Update the rules in the cache.
self.rules = rules
# Transfer state information to refreshed rules.
for rule in self.rules:
old = oldRules.get(rule.name, None)
if old:
rule.merge(old)
# The cache should maintain the order of the rules based on
# priority. If priority is the same we sort name by alphabet
# as rule name is unique.
self.rules.sort(key=attrgetter('priority', 'name'))
def _load_targets(self, targets_dict):
for rule in self.rules:
target = targets_dict.get(rule.name, None)
if target:
rule.reservoir.load_quota(target['quota'],
gitextract_js1ahssn/ ├── .github/ │ ├── CODEOWNERS │ ├── PULL_REQUEST_TEMPLATE.md │ ├── dependency-check-suppressions.xml │ ├── stale.yml │ ├── trivy/ │ │ └── daily-scan.trivyignore.yaml │ └── workflows/ │ ├── IntegrationTesting.yaml │ ├── Release.yaml │ ├── UnitTesting.yaml │ ├── continuous-monitoring.yml │ └── daily-scan.yml ├── .gitignore ├── CHANGELOG.rst ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── LICENSE ├── MANIFEST.in ├── NOTICE ├── README.md ├── __init__.py ├── aws_xray_sdk/ │ ├── __init__.py │ ├── core/ │ │ ├── __init__.py │ │ ├── async_context.py │ │ ├── async_recorder.py │ │ ├── context.py │ │ ├── daemon_config.py │ │ ├── emitters/ │ │ │ ├── __init__.py │ │ │ └── udp_emitter.py │ │ ├── exceptions/ │ │ │ ├── __init__.py │ │ │ └── exceptions.py │ │ ├── lambda_launcher.py │ │ ├── models/ │ │ │ ├── __init__.py │ │ │ ├── default_dynamic_naming.py │ │ │ ├── dummy_entities.py │ │ │ ├── entity.py │ │ │ ├── facade_segment.py │ │ │ ├── http.py │ │ │ ├── noop_traceid.py │ │ │ ├── segment.py │ │ │ ├── subsegment.py │ │ │ ├── throwable.py │ │ │ ├── trace_header.py │ │ │ └── traceid.py │ │ ├── patcher.py │ │ ├── plugins/ │ │ │ ├── __init__.py │ │ │ ├── ec2_plugin.py │ │ │ ├── ecs_plugin.py │ │ │ ├── elasticbeanstalk_plugin.py │ │ │ └── utils.py │ │ ├── recorder.py │ │ ├── sampling/ │ │ │ ├── __init__.py │ │ │ ├── connector.py │ │ │ ├── local/ │ │ │ │ ├── __init__.py │ │ │ │ ├── reservoir.py │ │ │ │ ├── sampler.py │ │ │ │ ├── sampling_rule.json │ │ │ │ └── sampling_rule.py │ │ │ ├── reservoir.py │ │ │ ├── rule_cache.py │ │ │ ├── rule_poller.py │ │ │ ├── sampler.py │ │ │ ├── sampling_rule.py │ │ │ └── target_poller.py │ │ ├── streaming/ │ │ │ ├── __init__.py │ │ │ └── default_streaming.py │ │ └── utils/ │ │ ├── __init__.py │ │ ├── atomic_counter.py │ │ ├── compat.py │ │ ├── conversion.py │ │ ├── search_pattern.py │ │ ├── sqs_message_helper.py │ │ └── stacktrace.py │ ├── ext/ │ │ ├── __init__.py │ │ ├── aiobotocore/ │ │ │ ├── __init__.py │ │ │ └── patch.py │ │ ├── aiohttp/ │ │ │ ├── __init__.py │ │ │ ├── client.py │ │ │ └── middleware.py │ │ ├── boto_utils.py │ │ ├── botocore/ │ │ │ ├── __init__.py │ │ │ └── patch.py │ │ ├── bottle/ │ │ │ ├── __init__.py │ │ │ └── middleware.py │ │ ├── dbapi2.py │ │ ├── django/ │ │ │ ├── __init__.py │ │ │ ├── apps.py │ │ │ ├── conf.py │ │ │ ├── db.py │ │ │ ├── middleware.py │ │ │ └── templates.py │ │ ├── flask/ │ │ │ ├── __init__.py │ │ │ └── middleware.py │ │ ├── flask_sqlalchemy/ │ │ │ ├── __init__.py │ │ │ └── query.py │ │ ├── httplib/ │ │ │ ├── __init__.py │ │ │ └── patch.py │ │ ├── httpx/ │ │ │ ├── __init__.py │ │ │ └── patch.py │ │ ├── mysql/ │ │ │ ├── __init__.py │ │ │ └── patch.py │ │ ├── pg8000/ │ │ │ ├── README.md │ │ │ ├── __init__.py │ │ │ └── patch.py │ │ ├── psycopg/ │ │ │ ├── __init__.py │ │ │ └── patch.py │ │ ├── psycopg2/ │ │ │ ├── __init__.py │ │ │ └── patch.py │ │ ├── pymongo/ │ │ │ ├── __init__.py │ │ │ └── patch.py │ │ ├── pymysql/ │ │ │ ├── __init__.py │ │ │ └── patch.py │ │ ├── pynamodb/ │ │ │ ├── __init__.py │ │ │ └── patch.py │ │ ├── requests/ │ │ │ ├── __init__.py │ │ │ └── patch.py │ │ ├── resources/ │ │ │ └── aws_para_whitelist.json │ │ ├── sqlalchemy/ │ │ │ ├── __init__.py │ │ │ ├── query.py │ │ │ └── util/ │ │ │ ├── __init__.py │ │ │ └── decorators.py │ │ ├── sqlalchemy_core/ │ │ │ ├── __init__.py │ │ │ └── patch.py │ │ ├── sqlite3/ │ │ │ ├── __init__.py │ │ │ └── patch.py │ │ └── util.py │ ├── sdk_config.py │ └── version.py ├── docs/ │ ├── .gitignore │ ├── Makefile │ ├── _templates/ │ │ └── layout.html │ ├── aws_xray_sdk.core.emitters.rst │ ├── aws_xray_sdk.core.exceptions.rst │ ├── aws_xray_sdk.core.models.rst │ ├── aws_xray_sdk.core.plugins.rst │ ├── aws_xray_sdk.core.rst │ ├── aws_xray_sdk.core.sampling.rst │ ├── aws_xray_sdk.core.streaming.rst │ ├── aws_xray_sdk.core.utils.rst │ ├── aws_xray_sdk.ext.aiobotocore.rst │ ├── aws_xray_sdk.ext.aiohttp.rst │ ├── aws_xray_sdk.ext.botocore.rst │ ├── aws_xray_sdk.ext.django.rst │ ├── aws_xray_sdk.ext.flask.rst │ ├── aws_xray_sdk.ext.flask_sqlalchemy.rst │ ├── aws_xray_sdk.ext.httplib.rst │ ├── aws_xray_sdk.ext.httpx.rst │ ├── aws_xray_sdk.ext.mysql.rst │ ├── aws_xray_sdk.ext.pg8000.rst │ ├── aws_xray_sdk.ext.psycopg2.rst │ ├── aws_xray_sdk.ext.pymongo.rst │ ├── aws_xray_sdk.ext.pymysql.rst │ ├── aws_xray_sdk.ext.pynamodb.rst │ ├── aws_xray_sdk.ext.requests.rst │ ├── aws_xray_sdk.ext.rst │ ├── aws_xray_sdk.ext.sqlalchemy.rst │ ├── aws_xray_sdk.ext.sqlalchemy.util.rst │ ├── aws_xray_sdk.ext.sqlalchemy_core.rst │ ├── aws_xray_sdk.ext.sqlite3.rst │ ├── aws_xray_sdk.rst │ ├── basic.rst │ ├── changes.rst │ ├── conf.py │ ├── configurations.rst │ ├── frameworks.rst │ ├── index.rst │ ├── license.rst │ ├── make.bat │ ├── modules.rst │ └── thirdparty.rst ├── sample-apps/ │ ├── LICENSE │ └── flask/ │ ├── Dockerfile │ ├── application.py │ └── requirements.txt ├── setup.cfg ├── setup.py ├── terraform/ │ ├── eb.tf │ ├── fixtures.us-west-2.tfvars │ └── variables.tf ├── tests/ │ ├── __init__.py │ ├── distributioncheck/ │ │ ├── __init__.py │ │ └── test_sanity.py │ ├── ext/ │ │ ├── __init__.py │ │ ├── aiobotocore/ │ │ │ ├── __init__.py │ │ │ └── test_aiobotocore.py │ │ ├── aiohttp/ │ │ │ ├── __init__.py │ │ │ ├── test_client.py │ │ │ └── test_middleware.py │ │ ├── botocore/ │ │ │ ├── __init__.py │ │ │ └── test_botocore.py │ │ ├── bottle/ │ │ │ ├── __init__.py │ │ │ ├── test_bottle.py │ │ │ └── views/ │ │ │ └── index.tpl │ │ ├── django/ │ │ │ ├── __init__.py │ │ │ ├── app/ │ │ │ │ ├── __init__.py │ │ │ │ ├── settings.py │ │ │ │ ├── templates/ │ │ │ │ │ ├── block.html │ │ │ │ │ ├── block_user.html │ │ │ │ │ └── index.html │ │ │ │ └── views.py │ │ │ ├── test_db.py │ │ │ ├── test_middleware.py │ │ │ └── test_settings.py │ │ ├── flask/ │ │ │ ├── __init__.py │ │ │ └── test_flask.py │ │ ├── flask_sqlalchemy/ │ │ │ ├── __init__.py │ │ │ └── test_query.py │ │ ├── httplib/ │ │ │ ├── __init__.py │ │ │ └── test_httplib.py │ │ ├── httpx/ │ │ │ ├── __init__.py │ │ │ ├── test_httpx.py │ │ │ └── test_httpx_async.py │ │ ├── pg8000/ │ │ │ ├── __init__.py │ │ │ └── test_pg8000.py │ │ ├── psycopg/ │ │ │ ├── __init__.py │ │ │ └── test_psycopg.py │ │ ├── psycopg2/ │ │ │ ├── __init__.py │ │ │ └── test_psycopg2.py │ │ ├── pymysql/ │ │ │ ├── __init__.py │ │ │ └── test_pymysql.py │ │ ├── pynamodb/ │ │ │ ├── __init__.py │ │ │ └── test_pynamodb.py │ │ ├── requests/ │ │ │ ├── __init__.py │ │ │ └── test_requests.py │ │ ├── sqlalchemy/ │ │ │ ├── __init__.py │ │ │ └── test_query.py │ │ ├── sqlalchemy_core/ │ │ │ ├── __init__.py │ │ │ ├── test_base.py │ │ │ ├── test_dburl.py │ │ │ ├── test_postgres.py │ │ │ ├── test_sqlalchemy_core.py │ │ │ └── test_sqlalchemy_core_2.py │ │ └── sqlite3/ │ │ ├── __init__.py │ │ └── test_sqlite3.py │ ├── mock_module/ │ │ ├── __init__.py │ │ ├── mock_file.py │ │ └── mock_submodule/ │ │ ├── __init__.py │ │ └── mock_subfile.py │ ├── mock_sampling_rule.json │ ├── test_async_local_storage.py │ ├── test_async_recorder.py │ ├── test_daemon_config.py │ ├── test_dummy_entites.py │ ├── test_facade_segment.py │ ├── test_lambda_context.py │ ├── test_local_sampling.py │ ├── test_local_sampling_benchmark.py │ ├── test_patcher.py │ ├── test_plugins.py │ ├── test_recorder.py │ ├── test_sampling_rule_cache.py │ ├── test_sdk_config.py │ ├── test_serialize_entities.py │ ├── test_sqs_message_helper.py │ ├── test_throwable.py │ ├── test_trace_entities.py │ ├── test_trace_header.py │ ├── test_traceid.py │ ├── test_utils.py │ ├── test_wildcard_match.py │ └── util.py ├── tox-distributioncheck.ini └── tox.ini
SYMBOL INDEX (926 symbols across 125 files)
FILE: aws_xray_sdk/core/async_context.py
class AsyncContext (line 7) | class AsyncContext(_Context):
method __init__ (line 15) | def __init__(self, *args, loop=None, use_task_factory=True, **kwargs):
method clear_trace_entities (line 27) | def clear_trace_entities(self):
class TaskLocalStorage (line 35) | class TaskLocalStorage:
method __init__ (line 39) | def __init__(self, loop=None):
method __setattr__ (line 44) | def __setattr__(self, name, value):
method __getattribute__ (line 60) | def __getattribute__(self, item):
method clear (line 74) | def clear(self):
function task_factory (line 81) | def task_factory(loop, coro):
FILE: aws_xray_sdk/core/async_recorder.py
class AsyncSegmentContextManager (line 9) | class AsyncSegmentContextManager(SegmentContextManager):
method __aenter__ (line 10) | async def __aenter__(self):
method __aexit__ (line 13) | async def __aexit__(self, exc_type, exc_val, exc_tb):
class AsyncSubsegmentContextManager (line 16) | class AsyncSubsegmentContextManager(SubsegmentContextManager):
method __call__ (line 19) | async def __call__(self, wrapped, instance, args, kwargs):
method __aenter__ (line 36) | async def __aenter__(self):
method __aexit__ (line 39) | async def __aexit__(self, exc_type, exc_val, exc_tb):
class AsyncAWSXRayRecorder (line 43) | class AsyncAWSXRayRecorder(AWSXRayRecorder):
method capture_async (line 44) | def capture_async(self, name=None):
method in_segment_async (line 54) | def in_segment_async(self, name=None, **segment_kwargs):
method in_subsegment_async (line 63) | def in_subsegment_async(self, name=None, **subsegment_kwargs):
method record_subsegment_async (line 72) | async def record_subsegment_async(self, wrapped, instance, args, kwarg...
FILE: aws_xray_sdk/core/context.py
class Context (line 17) | class Context:
method __init__ (line 30) | def __init__(self, context_missing='LOG_ERROR'):
method put_segment (line 36) | def put_segment(self, segment):
method end_segment (line 43) | def end_segment(self, end_time=None):
method put_subsegment (line 59) | def put_subsegment(self, subsegment):
method end_subsegment (line 73) | def end_subsegment(self, end_time=None):
method get_trace_entity (line 92) | def get_trace_entity(self):
method set_trace_entity (line 105) | def set_trace_entity(self, trace_entity):
method clear_trace_entities (line 112) | def clear_trace_entities(self):
method handle_context_missing (line 120) | def handle_context_missing(self):
method _is_subsegment (line 129) | def _is_subsegment(self, entity):
method context_missing (line 134) | def context_missing(self):
method context_missing (line 138) | def context_missing(self, value):
FILE: aws_xray_sdk/core/daemon_config.py
class DaemonConfig (line 9) | class DaemonConfig:
method __init__ (line 20) | def __init__(self, daemon_address=DEFAULT_ADDRESS):
method _parse_single_form (line 33) | def _parse_single_form(self, val):
method _parse_double_form (line 43) | def _parse_double_form(self, val1, val2, origin):
method udp_ip (line 63) | def udp_ip(self):
method udp_port (line 67) | def udp_port(self):
method tcp_ip (line 71) | def tcp_ip(self):
method tcp_port (line 75) | def tcp_port(self):
FILE: aws_xray_sdk/core/emitters/udp_emitter.py
class UDPEmitter (line 15) | class UDPEmitter:
method __init__ (line 22) | def __init__(self, daemon_address=DEFAULT_DAEMON_ADDRESS):
method send_entity (line 28) | def send_entity(self, entity):
method set_daemon_address (line 45) | def set_daemon_address(self, address):
method ip (line 55) | def ip(self):
method port (line 59) | def port(self):
method _send_data (line 62) | def _send_data(self, data):
method _parse_address (line 65) | def _parse_address(self, daemon_address):
FILE: aws_xray_sdk/core/exceptions/exceptions.py
class InvalidSamplingManifestError (line 1) | class InvalidSamplingManifestError(Exception):
class SegmentNotFoundException (line 5) | class SegmentNotFoundException(Exception):
class InvalidDaemonAddressException (line 9) | class InvalidDaemonAddressException(Exception):
class SegmentNameMissingException (line 13) | class SegmentNameMissingException(Exception):
class SubsegmentNameMissingException (line 17) | class SubsegmentNameMissingException(Exception):
class FacadeSegmentMutationException (line 21) | class FacadeSegmentMutationException(Exception):
class MissingPluginNames (line 25) | class MissingPluginNames(Exception):
class AlreadyEndedException (line 29) | class AlreadyEndedException(Exception):
FILE: aws_xray_sdk/core/lambda_launcher.py
function check_in_lambda (line 20) | def check_in_lambda():
class LambdaContext (line 44) | class LambdaContext(Context):
method __init__ (line 51) | def __init__(self):
method put_segment (line 55) | def put_segment(self, segment):
method end_segment (line 61) | def end_segment(self, end_time=None):
method put_subsegment (line 67) | def put_subsegment(self, subsegment):
method set_trace_entity (line 82) | def set_trace_entity(self, trace_entity):
method get_trace_entity (line 94) | def get_trace_entity(self):
method _refresh_context (line 101) | def _refresh_context(self):
method context_missing (line 125) | def context_missing(self):
method context_missing (line 129) | def context_missing(self, value):
method handle_context_missing (line 132) | def handle_context_missing(self):
method _initialize_context (line 138) | def _initialize_context(self, trace_header):
FILE: aws_xray_sdk/core/models/default_dynamic_naming.py
class DefaultDynamicNaming (line 4) | class DefaultDynamicNaming:
method __init__ (line 12) | def __init__(self, pattern, fallback):
method get_name (line 24) | def get_name(self, host_name):
FILE: aws_xray_sdk/core/models/dummy_entities.py
class DummySegment (line 8) | class DummySegment(Segment):
method __init__ (line 18) | def __init__(self, name='dummy'):
method set_aws (line 26) | def set_aws(self, aws_meta):
method put_http_meta (line 32) | def put_http_meta(self, key, value):
method put_annotation (line 38) | def put_annotation(self, key, value):
method put_metadata (line 44) | def put_metadata(self, key, value, namespace='default'):
method set_user (line 50) | def set_user(self, user):
method set_service (line 56) | def set_service(self, service_info):
method apply_status_code (line 62) | def apply_status_code(self, status_code):
method add_exception (line 68) | def add_exception(self, exception, stack, remote=False):
method serialize (line 74) | def serialize(self):
class DummySubsegment (line 81) | class DummySubsegment(Subsegment):
method __init__ (line 89) | def __init__(self, segment, name='dummy'):
method set_aws (line 98) | def set_aws(self, aws_meta):
method put_http_meta (line 104) | def put_http_meta(self, key, value):
method put_annotation (line 110) | def put_annotation(self, key, value):
method put_metadata (line 116) | def put_metadata(self, key, value, namespace='default'):
method set_sql (line 122) | def set_sql(self, sql):
method apply_status_code (line 128) | def apply_status_code(self, status_code):
method add_exception (line 134) | def add_exception(self, exception, stack, remote=False):
method serialize (line 140) | def serialize(self):
FILE: aws_xray_sdk/core/models/entity.py
class Entity (line 24) | class Entity:
method __init__ (line 30) | def __init__(self, name, entity_id=None):
method close (line 62) | def close(self, end_time=None):
method add_subsegment (line 78) | def add_subsegment(self, subsegment):
method remove_subsegment (line 90) | def remove_subsegment(self, subsegment):
method put_http_meta (line 96) | def put_http_meta(self, key, value):
method put_annotation (line 131) | def put_annotation(self, key, value):
method put_metadata (line 156) | def put_metadata(self, key, value, namespace='default'):
method set_aws (line 181) | def set_aws(self, aws_meta):
method add_throttle_flag (line 191) | def add_throttle_flag(self):
method add_fault_flag (line 194) | def add_fault_flag(self):
method add_error_flag (line 197) | def add_error_flag(self):
method apply_status_code (line 200) | def apply_status_code(self, status_code):
method add_exception (line 218) | def add_exception(self, exception, stack, remote=False):
method save_origin_trace_header (line 250) | def save_origin_trace_header(self, trace_header):
method get_origin_trace_header (line 258) | def get_origin_trace_header(self):
method serialize (line 264) | def serialize(self):
method to_dict (line 271) | def to_dict(self):
method _check_ended (line 304) | def _check_ended(self):
method _generate_random_id (line 308) | def _generate_random_id(self):
FILE: aws_xray_sdk/core/models/facade_segment.py
class FacadeSegment (line 8) | class FacadeSegment(Segment):
method __init__ (line 17) | def __init__(self, name, entityid, traceid, sampled):
method close (line 32) | def close(self, end_time=None):
method put_http_meta (line 38) | def put_http_meta(self, key, value):
method put_annotation (line 44) | def put_annotation(self, key, value):
method put_metadata (line 50) | def put_metadata(self, key, value, namespace='default'):
method set_aws (line 56) | def set_aws(self, aws_meta):
method set_user (line 62) | def set_user(self, user):
method add_throttle_flag (line 68) | def add_throttle_flag(self):
method add_fault_flag (line 74) | def add_fault_flag(self):
method add_error_flag (line 80) | def add_error_flag(self):
method add_exception (line 86) | def add_exception(self, exception, stack, remote=False):
method apply_status_code (line 92) | def apply_status_code(self, status_code):
method serialize (line 98) | def serialize(self):
method ready_to_send (line 104) | def ready_to_send(self):
method increment (line 111) | def increment(self):
method decrement_ref_counter (line 117) | def decrement_ref_counter(self):
method _is_initializing (line 123) | def _is_initializing(self, entityid, traceid, sampled):
FILE: aws_xray_sdk/core/models/noop_traceid.py
class NoOpTraceId (line 1) | class NoOpTraceId:
method __init__ (line 10) | def __init__(self):
method to_id (line 17) | def to_id(self):
FILE: aws_xray_sdk/core/models/segment.py
class SegmentContextManager (line 12) | class SegmentContextManager:
method __init__ (line 17) | def __init__(self, recorder, name=None, **segment_kwargs):
method __enter__ (line 23) | def __enter__(self):
method __exit__ (line 28) | def __exit__(self, exc_type, exc_val, exc_tb):
class Segment (line 43) | class Segment(Entity):
method __init__ (line 49) | def __init__(self, name, entityid=None, traceid=None,
method add_subsegment (line 83) | def add_subsegment(self, subsegment):
method increment (line 91) | def increment(self):
method decrement_ref_counter (line 100) | def decrement_ref_counter(self):
method ready_to_send (line 106) | def ready_to_send(self):
method get_total_subsegments_size (line 113) | def get_total_subsegments_size(self):
method decrement_subsegments_size (line 119) | def decrement_subsegments_size(self):
method remove_subsegment (line 126) | def remove_subsegment(self, subsegment):
method set_user (line 133) | def set_user(self, user):
method set_service (line 141) | def set_service(self, service_info):
method set_rule_name (line 148) | def set_rule_name(self, rule_name):
method to_dict (line 158) | def to_dict(self):
FILE: aws_xray_sdk/core/models/subsegment.py
function set_as_recording (line 14) | def set_as_recording(decorated_func, wrapped):
function is_already_recording (line 19) | def is_already_recording(func):
function subsegment_decorator (line 26) | def subsegment_decorator(wrapped, instance, args, kwargs):
class SubsegmentContextManager (line 32) | class SubsegmentContextManager:
method __init__ (line 37) | def __init__(self, recorder, name=None, **subsegment_kwargs):
method __call__ (line 44) | def __call__(self, wrapped, instance, args, kwargs):
method __enter__ (line 61) | def __enter__(self):
method __exit__ (line 66) | def __exit__(self, exc_type, exc_val, exc_tb):
class Subsegment (line 81) | class Subsegment(Entity):
method __init__ (line 89) | def __init__(self, name, namespace, segment):
method add_subsegment (line 111) | def add_subsegment(self, subsegment):
method remove_subsegment (line 120) | def remove_subsegment(self, subsegment):
method close (line 130) | def close(self, end_time=None):
method set_sql (line 142) | def set_sql(self, sql):
method to_dict (line 152) | def to_dict(self):
FILE: aws_xray_sdk/core/models/throwable.py
class Throwable (line 9) | class Throwable:
method __init__ (line 15) | def __init__(self, exception, stack, remote=False):
method to_dict (line 48) | def to_dict(self):
method _normalize_stack_trace (line 61) | def _normalize_stack_trace(self, stack):
FILE: aws_xray_sdk/core/models/trace_header.py
class TraceHeader (line 13) | class TraceHeader:
method __init__ (line 21) | def __init__(self, root=None, parent=None, sampled=None, data=None):
method from_header_str (line 42) | def from_header_str(cls, header):
method to_header_str (line 75) | def to_header_str(self):
method root (line 94) | def root(self):
method parent (line 101) | def parent(self):
method sampled (line 108) | def sampled(self):
method data (line 116) | def data(self):
FILE: aws_xray_sdk/core/models/traceid.py
class TraceId (line 6) | class TraceId:
method __init__ (line 15) | def __init__(self):
method to_id (line 22) | def to_id(self):
FILE: aws_xray_sdk/core/patcher.py
function patch_all (line 51) | def patch_all(double_patch=False):
function _is_valid_import (line 70) | def _is_valid_import(module):
function patch (line 82) | def patch(modules_to_patch, raise_errors=True, ignore_module_patterns=No...
function _patch_module (line 130) | def _patch_module(module_to_patch, raise_errors=True):
function _patch (line 139) | def _patch(module_to_patch):
function _patch_func (line 154) | def _patch_func(parent, func_name, func, modifier=lambda x: x):
function _patch_class (line 167) | def _patch_class(module, cls):
function _on_import (line 193) | def _on_import(module):
function _external_module_patch (line 205) | def _external_module_patch(module, ignore_module_patterns):
FILE: aws_xray_sdk/core/plugins/ec2_plugin.py
function initialize (line 12) | def initialize():
function get_token (line 27) | def get_token():
function get_metadata (line 43) | def get_metadata(token=None):
function parse_metadata_json (line 59) | def parse_metadata_json(json_str):
function do_request (line 71) | def do_request(url, headers=None, method="GET"):
FILE: aws_xray_sdk/core/plugins/ecs_plugin.py
function initialize (line 10) | def initialize():
FILE: aws_xray_sdk/core/plugins/elasticbeanstalk_plugin.py
function initialize (line 11) | def initialize():
FILE: aws_xray_sdk/core/plugins/utils.py
function get_plugin_modules (line 13) | def get_plugin_modules(plugins):
FILE: aws_xray_sdk/core/recorder.py
class AWSXRayRecorder (line 42) | class AWSXRayRecorder:
method __init__ (line 52) | def __init__(self):
method configure (line 80) | def configure(self, sampling=None, plugins=None,
method in_segment (line 186) | def in_segment(self, name=None, **segment_kwargs):
method in_subsegment (line 195) | def in_subsegment(self, name=None, **subsegment_kwargs):
method begin_segment (line 204) | def begin_segment(self, name=None, traceid=None,
method end_segment (line 247) | def end_segment(self, end_time=None):
method current_segment (line 264) | def current_segment(self):
method _begin_subsegment_helper (line 277) | def _begin_subsegment_helper(self, name, namespace='local', beginWitho...
method begin_subsegment (line 303) | def begin_subsegment(self, name, namespace='local'):
method begin_subsegment_without_sampling (line 316) | def begin_subsegment_without_sampling(self, name):
method current_subsegment (line 327) | def current_subsegment(self):
method end_subsegment (line 342) | def end_subsegment(self, end_time=None):
method put_annotation (line 362) | def put_annotation(self, key, value):
method put_metadata (line 377) | def put_metadata(self, key, value, namespace='default'):
method is_sampled (line 394) | def is_sampled(self):
method get_trace_entity (line 407) | def get_trace_entity(self):
method set_trace_entity (line 413) | def set_trace_entity(self, trace_entity):
method clear_trace_entities (line 419) | def clear_trace_entities(self):
method stream_subsegments (line 425) | def stream_subsegments(self):
method capture (line 436) | def capture(self, name=None):
method record_subsegment (line 446) | def record_subsegment(self, wrapped, instance, args, kwargs, name,
method _populate_runtime_context (line 482) | def _populate_runtime_context(self, segment, sampling_decision):
method _send_segment (line 492) | def _send_segment(self):
method _stream_subsegment_out (line 507) | def _stream_subsegment_out(self, subsegment):
method _load_sampling_rules (line 512) | def _load_sampling_rules(self, sampling_rules):
method _is_subsegment (line 523) | def _is_subsegment(self, entity):
method enabled (line 528) | def enabled(self):
method enabled (line 532) | def enabled(self, value):
method sampling (line 536) | def sampling(self):
method sampling (line 540) | def sampling(self, value):
method sampler (line 544) | def sampler(self):
method sampler (line 548) | def sampler(self, value):
method service (line 552) | def service(self):
method service (line 556) | def service(self, value):
method dynamic_naming (line 560) | def dynamic_naming(self):
method dynamic_naming (line 564) | def dynamic_naming(self, value):
method context (line 571) | def context(self):
method context (line 575) | def context(self, cxt):
method emitter (line 579) | def emitter(self):
method emitter (line 583) | def emitter(self, value):
method streaming (line 587) | def streaming(self):
method streaming (line 591) | def streaming(self, value):
method streaming_threshold (line 595) | def streaming_threshold(self):
method streaming_threshold (line 602) | def streaming_threshold(self, value):
method max_trace_back (line 609) | def max_trace_back(self):
method max_trace_back (line 613) | def max_trace_back(self, value):
method stream_sql (line 617) | def stream_sql(self):
method stream_sql (line 621) | def stream_sql(self, value):
FILE: aws_xray_sdk/core/sampling/connector.py
class ServiceConnector (line 15) | class ServiceConnector:
method __init__ (line 21) | def __init__(self):
method _context_wrapped (line 26) | def _context_wrapped(func):
method fetch_sampling_rules (line 48) | def fetch_sampling_rules(self):
method fetch_sampling_target (line 75) | def fetch_sampling_target(self, rules):
method setup_xray_client (line 101) | def setup_xray_client(self, ip, port, client):
method context (line 112) | def context(self):
method context (line 116) | def context(self, v):
method _generate_reporting_docs (line 119) | def _generate_reporting_docs(self, rules, now):
method _dt_to_epoch (line 135) | def _dt_to_epoch(self, dt):
method _is_rule_valid (line 142) | def _is_rule_valid(self, record):
method _create_xray_client (line 149) | def _create_xray_client(self, ip='127.0.0.1', port='2000'):
FILE: aws_xray_sdk/core/sampling/local/reservoir.py
class Reservoir (line 5) | class Reservoir:
method __init__ (line 11) | def __init__(self, traces_per_sec=0):
method take (line 21) | def take(self):
FILE: aws_xray_sdk/core/sampling/local/sampler.py
class LocalSampler (line 14) | class LocalSampler:
method __init__ (line 21) | def __init__(self, rules=local_sampling_rule):
method should_trace (line 52) | def should_trace(self, sampling_req=None):
method load_local_rules (line 75) | def load_local_rules(self, rules):
method _should_trace (line 92) | def _should_trace(self, sampling_rule):
FILE: aws_xray_sdk/core/sampling/local/sampling_rule.py
class SamplingRule (line 6) | class SamplingRule:
method __init__ (line 19) | def __init__(self, rule_dict, version=2, default=False):
method applies (line 43) | def applies(self, host, method, path):
method fixed_target (line 54) | def fixed_target(self):
method rate (line 62) | def rate(self):
method host (line 69) | def host(self):
method method (line 76) | def method(self):
method path (line 83) | def path(self):
method reservoir (line 90) | def reservoir(self):
method version (line 97) | def version(self):
method _validate (line 103) | def _validate(self):
FILE: aws_xray_sdk/core/sampling/reservoir.py
class Reservoir (line 5) | class Reservoir:
method __init__ (line 10) | def __init__(self):
method borrow_or_take (line 23) | def borrow_or_take(self, now, can_borrow):
method load_quota (line 32) | def load_quota(self, quota, TTL, interval):
method quota (line 46) | def quota(self):
method TTL (line 50) | def TTL(self):
method _time_to_report (line 53) | def _time_to_report(self):
method _borrow_or_take (line 60) | def _borrow_or_take(self, now, can_borrow):
method _adjust_this_sec (line 79) | def _adjust_this_sec(self, now):
class ReservoirDecision (line 86) | class ReservoirDecision(Enum):
FILE: aws_xray_sdk/core/sampling/rule_cache.py
class RuleCache (line 7) | class RuleCache:
method __init__ (line 12) | def __init__(self):
method get_matched_rule (line 18) | def get_matched_rule(self, sampling_req, now):
method load_rules (line 29) | def load_rules(self, rules):
method load_targets (line 34) | def load_targets(self, targets_dict):
method _load_rules (line 38) | def _load_rules(self, rules):
method _load_targets (line 57) | def _load_targets(self, targets_dict):
method _is_expired (line 66) | def _is_expired(self, now):
method rules (line 73) | def rules(self):
method rules (line 77) | def rules(self, v):
method last_updated (line 81) | def last_updated(self):
method last_updated (line 85) | def last_updated(self, v):
FILE: aws_xray_sdk/core/sampling/rule_poller.py
class RulePoller (line 11) | class RulePoller:
method __init__ (line 13) | def __init__(self, cache, connector):
method start (line 21) | def start(self):
method _worker (line 26) | def _worker(self):
method wake_up (line 37) | def wake_up(self):
method _refresh_cache (line 45) | def _refresh_cache(self):
method _reset_time_to_wait (line 55) | def _reset_time_to_wait(self):
FILE: aws_xray_sdk/core/sampling/sampler.py
class DefaultSampler (line 17) | class DefaultSampler:
method __init__ (line 22) | def __init__(self):
method start (line 36) | def start(self):
method should_trace (line 50) | def should_trace(self, sampling_req=None):
method load_local_rules (line 77) | def load_local_rules(self, rules):
method load_settings (line 83) | def load_settings(self, daemon_config, context, origin=None):
method _process_matched_rule (line 98) | def _process_matched_rule(self, rule, now):
method xray_client (line 121) | def xray_client(self):
method xray_client (line 125) | def xray_client(self, v):
FILE: aws_xray_sdk/core/sampling/sampling_rule.py
class SamplingRule (line 7) | class SamplingRule:
method __init__ (line 11) | def __init__(self, name, priority, rate, reservoir_size,
method match (line 30) | def match(self, sampling_req):
method is_default (line 51) | def is_default(self):
method snapshot_statistics (line 55) | def snapshot_statistics(self):
method merge (line 71) | def merge(self, rule):
method ever_matched (line 82) | def ever_matched(self):
method time_to_report (line 89) | def time_to_report(self):
method increment_request_count (line 96) | def increment_request_count(self):
method increment_borrow_count (line 100) | def increment_borrow_count(self):
method increment_sampled_count (line 104) | def increment_sampled_count(self):
method _reset_statistics (line 108) | def _reset_statistics(self):
method rate (line 114) | def rate(self):
method rate (line 118) | def rate(self, v):
method name (line 122) | def name(self):
method priority (line 126) | def priority(self):
method reservoir (line 130) | def reservoir(self):
method reservoir (line 134) | def reservoir(self, v):
method can_borrow (line 138) | def can_borrow(self):
method request_count (line 142) | def request_count(self):
method borrow_count (line 146) | def borrow_count(self):
method sampled_count (line 150) | def sampled_count(self):
FILE: aws_xray_sdk/core/sampling/target_poller.py
class TargetPoller (line 9) | class TargetPoller:
method __init__ (line 15) | def __init__(self, cache, rule_poller, connector):
method start (line 22) | def start(self):
method _worker (line 27) | def _worker(self):
method _do_work (line 35) | def _do_work(self):
method _get_candidates (line 47) | def _get_candidates(self, all_rules):
method _get_jitter (line 59) | def _get_jitter(self):
FILE: aws_xray_sdk/core/streaming/default_streaming.py
class DefaultStreaming (line 4) | class DefaultStreaming:
method __init__ (line 10) | def __init__(self, streaming_threshold=30):
method is_eligible (line 14) | def is_eligible(self, segment):
method stream (line 24) | def stream(self, entity, callback):
method _stream (line 35) | def _stream(self, entity, callback):
method streaming_threshold (line 57) | def streaming_threshold(self):
method streaming_threshold (line 61) | def streaming_threshold(self, value):
FILE: aws_xray_sdk/core/utils/atomic_counter.py
class AtomicCounter (line 4) | class AtomicCounter:
method __init__ (line 8) | def __init__(self, initial=0):
method increment (line 14) | def increment(self, num=1):
method decrement (line 20) | def decrement(self, num=1):
method get_current (line 26) | def get_current(self):
method reset (line 31) | def reset(self):
FILE: aws_xray_sdk/core/utils/compat.py
function is_classmethod (line 6) | def is_classmethod(func):
function is_instance_method (line 10) | def is_instance_method(parent_class, func_name, func):
FILE: aws_xray_sdk/core/utils/conversion.py
function metadata_to_dict (line 5) | def metadata_to_dict(obj):
FILE: aws_xray_sdk/core/utils/search_pattern.py
function wildcard_match (line 1) | def wildcard_match(pattern, text, case_insensitive=True):
FILE: aws_xray_sdk/core/utils/sqs_message_helper.py
class SqsMessageHelper (line 2) | class SqsMessageHelper:
method isSampled (line 5) | def isSampled(sqs_message):
FILE: aws_xray_sdk/core/utils/stacktrace.py
function get_stacktrace (line 5) | def get_stacktrace(limit=None):
FILE: aws_xray_sdk/ext/aiobotocore/patch.py
function patch (line 8) | def patch():
function _xray_traced_aiobotocore (line 30) | async def _xray_traced_aiobotocore(wrapped, instance, args, kwargs):
FILE: aws_xray_sdk/ext/aiohttp/client.py
function begin_subsegment (line 24) | async def begin_subsegment(session, trace_config_ctx, params):
function end_subsegment (line 38) | async def end_subsegment(session, trace_config_ctx, params):
function end_subsegment_with_exception (line 47) | async def end_subsegment_with_exception(session, trace_config_ctx, params):
function aws_xray_trace_config (line 63) | def aws_xray_trace_config(name=None):
FILE: aws_xray_sdk/ext/aiohttp/middleware.py
function middleware (line 15) | async def middleware(request, handler):
FILE: aws_xray_sdk/ext/boto_utils.py
function inject_header (line 16) | def inject_header(wrapped, instance, args, kwargs):
function aws_meta_processor (line 35) | def aws_meta_processor(wrapped, instance, args, kwargs,
function _aws_error_handler (line 68) | def _aws_error_handler(exception, stack, subsegment, aws_meta):
function _extract_whitelisted_params (line 86) | def _extract_whitelisted_params(service, operation,
function _record_params (line 115) | def _record_params(whitelisted, actual, aws_meta):
function _record_special_params (line 123) | def _record_special_params(whitelisted, actual, aws_meta):
function _process_descriptor (line 130) | def _process_descriptor(descriptor, value, aws_meta):
FILE: aws_xray_sdk/ext/botocore/patch.py
function patch (line 8) | def patch():
function _xray_traced_botocore (line 30) | def _xray_traced_botocore(wrapped, instance, args, kwargs):
FILE: aws_xray_sdk/ext/bottle/middleware.py
class XRayMiddleware (line 10) | class XRayMiddleware:
method __init__ (line 17) | def __init__(self, recorder):
method apply (line 26) | def apply(self, callback, route):
function _patch_render (line 101) | def _patch_render(recorder):
FILE: aws_xray_sdk/ext/dbapi2.py
class XRayTracedConn (line 7) | class XRayTracedConn(wrapt.ObjectProxy):
method __init__ (line 11) | def __init__(self, conn, meta={}):
method cursor (line 16) | def cursor(self, *args, **kwargs):
class XRayTracedCursor (line 22) | class XRayTracedCursor(wrapt.ObjectProxy):
method __init__ (line 26) | def __init__(self, cursor, meta={}):
method __enter__ (line 36) | def __enter__(self):
method execute (line 44) | def execute(self, query, *args, **kwargs):
method executemany (line 50) | def executemany(self, query, *args, **kwargs):
method callproc (line 56) | def callproc(self, proc, args):
function add_sql_meta (line 62) | def add_sql_meta(meta):
FILE: aws_xray_sdk/ext/django/apps.py
class XRayConfig (line 15) | class XRayConfig(AppConfig):
method ready (line 18) | def ready(self):
FILE: aws_xray_sdk/ext/django/conf.py
class XRaySettings (line 33) | class XRaySettings:
method __init__ (line 39) | def __init__(self, user_settings=None):
method user_settings (line 47) | def user_settings(self):
method __getattr__ (line 54) | def __getattr__(self, attr):
function reload_settings (line 73) | def reload_settings(*args, **kwargs):
FILE: aws_xray_sdk/ext/django/db.py
function patch_db (line 13) | def patch_db():
class DjangoXRayTracedCursor (line 19) | class DjangoXRayTracedCursor(XRayTracedCursor):
method execute (line 20) | def execute(self, query, *args, **kwargs):
method executemany (line 29) | def executemany(self, query, *args, **kwargs):
method callproc (line 38) | def callproc(self, proc, args):
function _patch_cursor (line 48) | def _patch_cursor(cursor_name, conn):
function _patch_conn (line 87) | def _patch_conn(conn):
FILE: aws_xray_sdk/ext/django/middleware.py
class XRayMiddleware (line 22) | class XRayMiddleware:
method __init__ (line 26) | def __init__(self, get_response):
method _urls_as_annotation (line 34) | def _urls_as_annotation(self):
method __call__ (line 43) | def __call__(self, request):
method process_exception (line 117) | def process_exception(self, request, exception):
FILE: aws_xray_sdk/ext/django/templates.py
function patch_template (line 11) | def patch_template():
FILE: aws_xray_sdk/ext/flask/middleware.py
class XRayMiddleware (line 11) | class XRayMiddleware:
method __init__ (line 13) | def __init__(self, app, recorder):
method _before_request (line 28) | def _before_request(self):
method _after_request (line 69) | def _after_request(self, response):
method _teardown_request (line 86) | def _teardown_request(self, exception):
function _patch_render (line 109) | def _patch_render(recorder):
FILE: aws_xray_sdk/ext/flask_sqlalchemy/query.py
class XRayBaseQuery (line 10) | class XRayBaseQuery(BaseQuery):
class XRaySignallingSession (line 14) | class XRaySignallingSession(XRaySession):
method __init__ (line 27) | def __init__(self, db, autocommit=False, autoflush=True, **options):
method get_bind (line 42) | def get_bind(self, mapper=None, clause=None):
class XRayFlaskSqlAlchemy (line 53) | class XRayFlaskSqlAlchemy(SQLAlchemy):
method __init__ (line 54) | def __init__(self, app=None, use_native_unicode=True, session_options=...
method create_session (line 59) | def create_session(self, options):
FILE: aws_xray_sdk/ext/httplib/patch.py
function add_ignored (line 25) | def add_ignored(subclass=None, hostname=None, urls=None):
function reset_ignored (line 32) | def reset_ignored():
function _ignored_add_default (line 38) | def _ignored_add_default():
function http_response_processor (line 47) | def http_response_processor(wrapped, instance, args, kwargs, return_value,
function _xray_traced_http_getresponse (line 67) | def _xray_traced_http_getresponse(wrapped, instance, args, kwargs):
function http_send_request_processor (line 80) | def http_send_request_processor(wrapped, instance, args, kwargs, return_...
function _ignore_request (line 94) | def _ignore_request(instance, hostname, url):
function _send_request (line 110) | def _send_request(wrapped, instance, args, kwargs):
function http_read_processor (line 151) | def http_read_processor(wrapped, instance, args, kwargs, return_value,
function _xray_traced_http_client_read (line 166) | def _xray_traced_http_client_read(wrapped, instance, args, kwargs):
function patch (line 179) | def patch():
function unpatch (line 207) | def unpatch():
FILE: aws_xray_sdk/ext/httpx/patch.py
function patch (line 8) | def patch():
class _InstrumentedClient (line 14) | class _InstrumentedClient(httpx.Client):
method __init__ (line 15) | def __init__(self, *args, **kwargs):
class _InstrumentedAsyncClient (line 22) | class _InstrumentedAsyncClient(httpx.AsyncClient):
method __init__ (line 23) | def __init__(self, *args, **kwargs):
class SyncInstrumentedTransport (line 30) | class SyncInstrumentedTransport(httpx.BaseTransport):
method __init__ (line 31) | def __init__(self, transport: httpx.BaseTransport):
method handle_request (line 34) | def handle_request(self, request: httpx.Request) -> httpx.Response:
class AsyncInstrumentedTransport (line 52) | class AsyncInstrumentedTransport(httpx.AsyncBaseTransport):
method __init__ (line 53) | def __init__(self, transport: httpx.AsyncBaseTransport):
method handle_async_request (line 56) | async def handle_async_request(self, request: httpx.Request) -> httpx....
FILE: aws_xray_sdk/ext/mysql/patch.py
function patch (line 13) | def patch():
function _xray_traced_connect (line 26) | def _xray_traced_connect(wrapped, instance, args, kwargs):
function sanitize_db_ver (line 43) | def sanitize_db_ver(raw):
FILE: aws_xray_sdk/ext/pg8000/patch.py
function patch (line 9) | def patch():
function _xray_traced_connect (line 18) | def _xray_traced_connect(wrapped, instance, args, kwargs):
function unpatch (line 35) | def unpatch():
FILE: aws_xray_sdk/ext/psycopg/patch.py
function patch (line 7) | def patch():
function _xray_traced_connect (line 21) | def _xray_traced_connect(wrapped, instance, args, kwargs):
FILE: aws_xray_sdk/ext/psycopg2/patch.py
function patch (line 9) | def patch():
function _xray_traced_connect (line 33) | def _xray_traced_connect(wrapped, instance, args, kwargs):
function _xray_register_type_fix (line 52) | def _xray_register_type_fix(wrapped, instance, args, kwargs):
function _xray_register_default_jsonb_fix (line 61) | def _xray_register_default_jsonb_fix(wrapped, instance, args, kwargs):
FILE: aws_xray_sdk/ext/pymongo/patch.py
class XrayCommandListener (line 6) | class XrayCommandListener(monitoring.CommandListener):
method __init__ (line 17) | def __init__(self, record_full_documents):
method started (line 21) | def started(self, event):
method succeeded (line 35) | def succeeded(self, event):
method failed (line 42) | def failed(self, event):
function patch (line 50) | def patch(record_full_documents=False):
FILE: aws_xray_sdk/ext/pymysql/patch.py
function patch (line 9) | def patch():
function _xray_traced_connect (line 22) | def _xray_traced_connect(wrapped, instance, args, kwargs):
function sanitize_db_ver (line 39) | def sanitize_db_ver(raw):
function unpatch (line 47) | def unpatch():
FILE: aws_xray_sdk/ext/pynamodb/patch.py
function patch (line 17) | def patch():
function _xray_traced_pynamodb (line 40) | def _xray_traced_pynamodb(wrapped, instance, args, kwargs):
function pynamodb_meta_processor (line 58) | def pynamodb_meta_processor(wrapped, instance, args, kwargs, return_value,
FILE: aws_xray_sdk/ext/requests/patch.py
function patch (line 8) | def patch():
function _xray_traced_requests (line 23) | def _xray_traced_requests(wrapped, instance, args, kwargs):
function _inject_header (line 35) | def _inject_header(wrapped, instance, args, kwargs):
function requests_processor (line 44) | def requests_processor(wrapped, instance, args, kwargs,
FILE: aws_xray_sdk/ext/sqlalchemy/query.py
class XRaySession (line 8) | class XRaySession(Session):
class XRayQuery (line 13) | class XRayQuery(Query):
class XRaySessionMaker (line 18) | class XRaySessionMaker(sessionmaker):
method __init__ (line 19) | def __init__(self, bind=None, class_=XRaySession, autoflush=True,
FILE: aws_xray_sdk/ext/sqlalchemy/util/decorators.py
function decorate_all_functions (line 11) | def decorate_all_functions(function_decorator):
function xray_on_call (line 27) | def xray_on_call(cls, func):
function parse_bind (line 93) | def parse_bind(bind):
FILE: aws_xray_sdk/ext/sqlalchemy_core/patch.py
function _sql_meta (line 14) | def _sql_meta(engine_instance, args):
function _xray_traced_sqlalchemy_execute (line 57) | def _xray_traced_sqlalchemy_execute(wrapped, instance, args, kwargs):
function _xray_traced_sqlalchemy_session (line 61) | def _xray_traced_sqlalchemy_session(wrapped, instance, args, kwargs):
function _process_request (line 65) | def _process_request(wrapped, engine_instance, args, kwargs):
function patch (line 86) | def patch():
function unpatch (line 100) | def unpatch():
FILE: aws_xray_sdk/ext/sqlite3/patch.py
function patch (line 7) | def patch():
function _xray_traced_connect (line 16) | def _xray_traced_connect(wrapped, instance, args, kwargs):
class XRayTracedSQLite (line 29) | class XRayTracedSQLite(XRayTracedConn):
method execute (line 31) | def execute(self, *args, **kwargs):
method executemany (line 34) | def executemany(self, *args, **kwargs):
FILE: aws_xray_sdk/ext/util.py
function inject_trace_header (line 14) | def inject_trace_header(headers, entity):
function calculate_sampling_decision (line 44) | def calculate_sampling_decision(trace_header, recorder, sampling_req):
function construct_xray_header (line 62) | def construct_xray_header(headers):
function calculate_segment_name (line 76) | def calculate_segment_name(host_name, recorder):
function prepare_response_header (line 88) | def prepare_response_header(origin_header, segment):
function to_snake_case (line 102) | def to_snake_case(name):
function strip_url (line 112) | def strip_url(url):
function get_hostname (line 121) | def get_hostname(url):
function unwrap (line 131) | def unwrap(obj, attr):
FILE: aws_xray_sdk/sdk_config.py
class SDKConfig (line 7) | class SDKConfig:
method __get_enabled_from_env (line 35) | def __get_enabled_from_env(cls):
method sdk_enabled (line 52) | def sdk_enabled(cls):
method set_sdk_enabled (line 61) | def set_sdk_enabled(cls, value):
FILE: sample-apps/flask/application.py
class User (line 20) | class User(db.Model):
function callHTTP (line 29) | def callHTTP():
function callAWSSDK (line 36) | def callAWSSDK():
function callSQL (line 45) | def callSQL():
function default (line 55) | def default():
FILE: tests/distributioncheck/test_sanity.py
function test_create_segment (line 3) | def test_create_segment():
FILE: tests/ext/aiobotocore/test_aiobotocore.py
function recorder (line 15) | def recorder(event_loop):
function test_describe_table (line 27) | async def test_describe_table(event_loop, recorder):
function test_s3_parameter_capture (line 50) | async def test_s3_parameter_capture(event_loop, recorder):
function test_list_parameter_counting (line 75) | async def test_list_parameter_counting(event_loop, recorder):
function test_map_parameter_grouping (line 108) | async def test_map_parameter_grouping(event_loop, recorder):
function test_context_missing_not_swallow_return (line 136) | async def test_context_missing_not_swallow_return(event_loop, recorder):
function test_context_missing_not_suppress_exception (line 152) | async def test_context_missing_not_suppress_exception(event_loop, record...
FILE: tests/ext/aiohttp/test_client.py
function recorder (line 20) | def recorder(loop):
function test_ok (line 30) | async def test_ok(loop, recorder):
function test_ok_name (line 49) | async def test_ok_name(loop, recorder):
function test_error (line 62) | async def test_error(loop, recorder):
function test_throttle (line 81) | async def test_throttle(loop, recorder):
function test_fault (line 101) | async def test_fault(loop, recorder):
function test_invalid_url (line 120) | async def test_invalid_url(loop, recorder):
function test_no_segment_raise (line 139) | async def test_no_segment_raise(loop, recorder):
function test_no_segment_log_error (line 150) | async def test_no_segment_log_error(loop, recorder, caplog):
function test_no_segment_ignore_error (line 165) | async def test_no_segment_ignore_error(loop, recorder, caplog):
FILE: tests/ext/aiohttp/test_middleware.py
class CustomStubbedEmitter (line 22) | class CustomStubbedEmitter(UDPEmitter):
method __init__ (line 27) | def __init__(self, daemon_address='127.0.0.1:2000'):
method send_entity (line 31) | def send_entity(self, entity):
method pop (line 34) | def pop(self):
class ServerTest (line 41) | class ServerTest:
method __init__ (line 47) | def __init__(self, loop):
method handle_ok (line 50) | async def handle_ok(self, request: web.Request) -> web.Response:
method handle_error (line 61) | async def handle_error(self, request: web.Request) -> web.Response:
method handle_unauthorized (line 67) | async def handle_unauthorized(self, request: web.Request) -> web.Respo...
method handle_exception (line 73) | async def handle_exception(self, request: web.Request) -> web.Response:
method handle_delay (line 80) | async def handle_delay(self, request: web.Request) -> web.Response:
method get_app (line 90) | def get_app(self) -> web.Application:
method app (line 101) | def app(cls, loop=None) -> web.Application:
function recorder (line 106) | def recorder(loop):
function test_ok (line 123) | async def test_ok(aiohttp_client, loop, recorder):
function test_ok_x_forwarded_for (line 147) | async def test_ok_x_forwarded_for(aiohttp_client, loop, recorder):
function test_ok_content_length (line 165) | async def test_ok_content_length(aiohttp_client, loop, recorder):
function test_error (line 182) | async def test_error(aiohttp_client, loop, recorder):
function test_exception (line 207) | async def test_exception(aiohttp_client, loop, recorder):
function test_unhauthorized (line 234) | async def test_unhauthorized(aiohttp_client, loop, recorder):
function test_response_trace_header (line 259) | async def test_response_trace_header(aiohttp_client, loop, recorder):
function test_concurrent (line 269) | async def test_concurrent(aiohttp_client, loop, recorder):
function test_disabled_sdk (line 295) | async def test_disabled_sdk(aiohttp_client, loop, recorder):
FILE: tests/ext/botocore/test_botocore.py
function construct_ctx (line 16) | def construct_ctx():
function test_ddb_table_name (line 29) | def test_ddb_table_name():
function test_s3_bucket_name_capture (line 53) | def test_s3_bucket_name_capture():
function test_list_parameter_counting (line 77) | def test_list_parameter_counting():
function test_map_parameter_grouping (line 107) | def test_map_parameter_grouping():
function test_pass_through_on_context_missing (line 131) | def test_pass_through_on_context_missing():
function test_sns_publish_parameters (line 155) | def test_sns_publish_parameters():
FILE: tests/ext/bottle/test_bottle.py
function ok (line 20) | def ok():
function error (line 28) | def error():
function faulty_client (line 34) | def faulty_client():
function faulty_server (line 44) | def faulty_server():
function fault (line 49) | def fault():
function template_ (line 54) | def template_():
function view_ (line 60) | def view_(name='bottle'):
function cleanup (line 75) | def cleanup():
function test_ok (line 85) | def test_ok():
function test_error (line 101) | def test_error():
function test_custom_client_error (line 119) | def test_custom_client_error():
function test_server_error (line 139) | def test_server_error():
function test_fault (line 156) | def test_fault():
function test_render_template (line 173) | def test_render_template():
function test_render_view (line 187) | def test_render_view():
function test_incoming_sampling_decision_respected (line 202) | def test_incoming_sampling_decision_respected():
function test_trace_header_data_perservation (line 214) | def test_trace_header_data_perservation():
function test_sampled_response_header (line 223) | def test_sampled_response_header():
function test_disabled_sdk (line 233) | def test_disabled_sdk():
function test_lambda_serverless (line 241) | def test_lambda_serverless():
function test_lambda_default_ctx (line 285) | def test_lambda_default_ctx():
FILE: tests/ext/django/app/views.py
class IndexView (line 8) | class IndexView(TemplateView):
class TemplateBlockView (line 12) | class TemplateBlockView(TemplateView):
function ok (line 16) | def ok(request):
function fault (line 20) | def fault(request):
function call_db (line 24) | def call_db(request):
FILE: tests/ext/django/test_db.py
function setup (line 11) | def setup():
function user_class (line 18) | def user_class(setup):
function func_setup (line 37) | def func_setup(request, user_class):
function _assert_query (line 52) | def _assert_query(sql_meta):
function test_all (line 64) | def test_all(user_class):
function test_filter (line 75) | def test_filter(user_class):
FILE: tests/ext/django/test_middleware.py
class XRayTestCase (line 14) | class XRayTestCase(TestCase):
method setUp (line 16) | def setUp(self):
method tearDown (line 22) | def tearDown(self):
method test_ok (line 25) | def test_ok(self):
method test_error (line 37) | def test_error(self):
method test_fault (line 49) | def test_fault(self):
method test_db (line 68) | def test_db(self):
method test_template (line 83) | def test_template(self):
method test_template_block (line 94) | def test_template_block(self):
method test_trace_header_data_perservation (line 105) | def test_trace_header_data_perservation(self):
method test_response_header (line 113) | def test_response_header(self):
method test_disabled_sdk (line 122) | def test_disabled_sdk(self):
method test_lambda_serverless (line 129) | def test_lambda_serverless(self):
method test_lambda_default_ctx (line 167) | def test_lambda_default_ctx(self):
FILE: tests/ext/django/test_settings.py
class XRayConfigurationTestCase (line 13) | class XRayConfigurationTestCase(TestCase):
method test_sampler_can_be_configured (line 14) | def test_sampler_can_be_configured(self):
FILE: tests/ext/flask/test_flask.py
function ok (line 17) | def ok():
function error (line 22) | def error():
function fault (line 27) | def fault():
function fault_no_exception (line 32) | def fault_no_exception():
function template (line 37) | def template():
function cleanup (line 57) | def cleanup():
function test_ok (line 67) | def test_ok():
function test_error (line 83) | def test_error():
function test_fault (line 98) | def test_fault():
function test_fault_no_exception (line 115) | def test_fault_no_exception():
function test_render_template (line 127) | def test_render_template():
function test_incoming_sampling_decision_respected (line 141) | def test_incoming_sampling_decision_respected():
function test_trace_header_data_perservation (line 152) | def test_trace_header_data_perservation():
function test_sampled_response_header (line 161) | def test_sampled_response_header():
function test_disabled_sdk (line 171) | def test_disabled_sdk():
function test_lambda_serverless (line 179) | def test_lambda_serverless():
function test_lambda_default_ctx (line 221) | def test_lambda_default_ctx():
FILE: tests/ext/flask_sqlalchemy/test_query.py
class User (line 15) | class User(db.Model):
function session (line 30) | def session(request):
function test_all (line 41) | def test_all(capsys, session):
function test_add (line 52) | def test_add(capsys, session):
FILE: tests/ext/httplib/test_httplib.py
function construct_ctx (line 15) | def construct_ctx():
function _do_req (line 34) | def _do_req(url, method='GET', use_https=True):
function test_ok (line 49) | def test_ok():
function test_error (line 62) | def test_error():
function test_throttle (line 76) | def test_throttle():
function test_fault (line 91) | def test_fault():
function test_invalid_url (line 105) | def test_invalid_url():
function test_correct_identify_http (line 118) | def test_correct_identify_http():
function test_correct_identify_https (line 129) | def test_correct_identify_https():
function test_ignore_url (line 140) | def test_ignore_url():
function test_ignore_hostname (line 149) | def test_ignore_hostname():
function test_ignore_hostname_glob (line 158) | def test_ignore_hostname_glob():
class CustomHttpsConnection (line 167) | class CustomHttpsConnection(httplib.HTTPSConnection):
function test_ignore_subclass (line 171) | def test_ignore_subclass():
function test_ignore_multiple_match (line 182) | def test_ignore_multiple_match():
function test_ignore_multiple_no_match (line 193) | def test_ignore_multiple_no_match():
FILE: tests/ext/httpx/test_httpx.py
function construct_ctx (line 17) | def construct_ctx():
function test_ok (line 31) | def test_ok(use_client):
function test_error (line 53) | def test_error(use_client):
function test_throttle (line 75) | def test_throttle(use_client):
function test_fault (line 98) | def test_fault(use_client):
function test_nonexistent_domain (line 120) | def test_nonexistent_domain(use_client):
function test_invalid_url (line 137) | def test_invalid_url(use_client):
function test_name_uses_hostname (line 159) | def test_name_uses_hostname(use_client):
function test_strip_http_url (line 201) | def test_strip_http_url(use_client):
FILE: tests/ext/httpx/test_httpx_async.py
function construct_ctx (line 17) | def construct_ctx():
function test_ok_async (line 31) | async def test_ok_async():
function test_error_async (line 50) | async def test_error_async():
function test_throttle_async (line 69) | async def test_throttle_async():
function test_fault_async (line 89) | async def test_fault_async():
function test_nonexistent_domain_async (line 108) | async def test_nonexistent_domain_async():
function test_invalid_url_async (line 122) | async def test_invalid_url_async():
function test_name_uses_hostname_async (line 141) | async def test_name_uses_hostname_async():
function test_strip_http_url_async (line 176) | async def test_strip_http_url_async():
FILE: tests/ext/pg8000/test_pg8000.py
function patch_module (line 13) | def patch_module():
function construct_ctx (line 20) | def construct_ctx():
function test_execute_dsn_kwargs (line 33) | def test_execute_dsn_kwargs():
function test_execute_bad_query (line 53) | def test_execute_bad_query():
FILE: tests/ext/psycopg/test_psycopg.py
function construct_ctx (line 16) | def construct_ctx():
function test_execute_dsn_kwargs (line 29) | def test_execute_dsn_kwargs():
function test_execute_dsn_string (line 51) | def test_execute_dsn_string():
function test_execute_in_pool (line 73) | def test_execute_in_pool():
function test_execute_bad_query (line 98) | def test_execute_bad_query():
function test_query_as_string (line 125) | def test_query_as_string():
FILE: tests/ext/psycopg2/test_psycopg2.py
function construct_ctx (line 17) | def construct_ctx():
function test_execute_dsn_kwargs (line 30) | def test_execute_dsn_kwargs():
function test_execute_dsn_kwargs_alt_dbname (line 52) | def test_execute_dsn_kwargs_alt_dbname():
function test_execute_dsn_string (line 78) | def test_execute_dsn_string():
function test_execute_in_pool (line 100) | def test_execute_in_pool():
function test_execute_bad_query (line 123) | def test_execute_bad_query():
function test_register_extensions (line 151) | def test_register_extensions():
function test_query_as_string (line 164) | def test_query_as_string():
function test_register_default_jsonb (line 178) | def test_register_default_jsonb():
FILE: tests/ext/pymysql/test_pymysql.py
function patch_module (line 17) | def patch_module():
function construct_ctx (line 24) | def construct_ctx():
function test_execute_dsn_kwargs (line 37) | def test_execute_dsn_kwargs():
function test_execute_bad_query (line 56) | def test_execute_bad_query():
FILE: tests/ext/pynamodb/test_pynamodb.py
function construct_ctx (line 18) | def construct_ctx():
function test_exception (line 31) | def test_exception():
function test_empty_response (line 57) | def test_empty_response():
function test_only_dynamodb_calls_are_traced (line 80) | def test_only_dynamodb_calls_are_traced():
FILE: tests/ext/requests/test_requests.py
function construct_ctx (line 17) | def construct_ctx():
function test_ok (line 30) | def test_ok():
function test_error (line 44) | def test_error():
function test_throttle (line 58) | def test_throttle():
function test_fault (line 73) | def test_fault():
function test_nonexistent_domain (line 87) | def test_nonexistent_domain():
function test_invalid_url (line 100) | def test_invalid_url():
function test_name_uses_hostname (line 118) | def test_name_uses_hostname():
function test_strip_http_url (line 148) | def test_strip_http_url():
FILE: tests/ext/sqlalchemy/test_query.py
class User (line 13) | class User(Base):
function engine (line 23) | def engine():
function session (line 28) | def session(engine):
function connection (line 43) | def connection(engine):
function test_all (line 57) | def test_all(capsys, session):
function test_supports_connection (line 68) | def test_supports_connection(capsys, connection):
function test_add (line 76) | def test_add(capsys, session):
function test_filter_first (line 87) | def test_filter_first(capsys, session):
FILE: tests/ext/sqlalchemy_core/test_base.py
class User (line 12) | class User(Base):
function db_url (line 22) | def db_url():
function engine (line 27) | def engine(db_url):
function connection (line 47) | def connection(engine):
function session (line 52) | def session(engine):
FILE: tests/ext/sqlalchemy_core/test_dburl.py
function construct_ctx (line 18) | def construct_ctx():
function test_db_url_with_special_char (line 31) | def test_db_url_with_special_char():
FILE: tests/ext/sqlalchemy_core/test_postgres.py
function postgres_db (line 15) | def postgres_db():
function db_url (line 21) | def db_url(postgres_db):
function sanitized_db_url (line 26) | def sanitized_db_url(postgres_db):
function test_all (line 36) | def test_all(session, sanitized_db_url):
function test_insert_on_conflict_renders (line 47) | def test_insert_on_conflict_renders(connection):
FILE: tests/ext/sqlalchemy_core/test_sqlalchemy_core.py
function test_all (line 5) | def test_all(session):
function test_filter_first (line 16) | def test_filter_first(session):
function test_connection_add (line 27) | def test_connection_add(connection):
function test_connection_query (line 38) | def test_connection_query(connection):
FILE: tests/ext/sqlalchemy_core/test_sqlalchemy_core_2.py
function test_orm_style_select_execution (line 7) | def test_orm_style_select_execution(session):
FILE: tests/ext/sqlite3/test_sqlite3.py
function db (line 11) | def db():
function construct_ctx (line 17) | def construct_ctx():
function test_execute (line 30) | def test_execute(db):
function test_invalid_syntax (line 42) | def test_invalid_syntax(db):
FILE: tests/mock_module/__init__.py
function mock_init (line 1) | def mock_init():
FILE: tests/mock_module/mock_file.py
function mock_func (line 1) | def mock_func():
FILE: tests/mock_module/mock_submodule/__init__.py
function mock_subinit (line 1) | def mock_subinit():
FILE: tests/mock_module/mock_submodule/mock_subfile.py
function mock_subfunc (line 4) | def mock_subfunc():
function mock_no_doublepatch (line 9) | def mock_no_doublepatch():
class MockClass (line 13) | class MockClass:
method __init__ (line 14) | def __init__(self):
method mock_method (line 17) | def mock_method(self):
method mock_classmethod (line 21) | def mock_classmethod(cls):
method mock_staticmethod (line 26) | def mock_staticmethod():
class MockSubclass (line 30) | class MockSubclass(MockClass):
method __init__ (line 31) | def __init__(self):
method mock_submethod (line 34) | def mock_submethod(self):
FILE: tests/test_async_local_storage.py
function test_localstorage_isolation (line 8) | def test_localstorage_isolation(event_loop):
FILE: tests/test_async_recorder.py
function async_method2 (line 13) | async def async_method2():
function async_method (line 18) | async def async_method():
function test_capture (line 22) | async def test_capture(event_loop):
function test_concurrent_calls (line 49) | async def test_concurrent_calls(event_loop):
function test_async_context_managers (line 74) | async def test_async_context_managers(event_loop):
FILE: tests/test_daemon_config.py
function test_default_config (line 11) | def test_default_config():
function test_single_address (line 20) | def test_single_address():
function test_set_tcp_udp_separately (line 30) | def test_set_tcp_udp_separately():
function test_invalid_address (line 48) | def test_invalid_address():
FILE: tests/test_dummy_entites.py
function test_not_sampled (line 5) | def test_not_sampled():
function test_no_ops (line 13) | def test_no_ops():
function test_structure_intact (line 42) | def test_structure_intact():
function test_invalid_entity_name (line 58) | def test_invalid_entity_name():
function test_dummy_segment_trace_id (line 66) | def test_dummy_segment_trace_id():
FILE: tests/test_facade_segment.py
function test_not_ready (line 9) | def test_not_ready():
function test_initializing (line 16) | def test_initializing():
function test_unsupported_operations (line 25) | def test_unsupported_operations():
function test_structure_intact (line 48) | def test_structure_intact():
function test_adding_unsampled_subsegment (line 59) | def test_adding_unsampled_subsegment():
FILE: tests/test_lambda_context.py
function setup (line 20) | def setup():
function test_facade_segment_generation (line 25) | def test_facade_segment_generation():
function test_put_subsegment (line 34) | def test_put_subsegment():
function test_disable (line 58) | def test_disable():
function test_non_initialized (line 70) | def test_non_initialized():
function test_lambda_passthrough (line 89) | def test_lambda_passthrough():
function test_set_trace_entity (line 114) | def test_set_trace_entity():
FILE: tests/test_local_sampling.py
function test_should_trace (line 35) | def test_should_trace():
function test_missing_version_num (line 43) | def test_missing_version_num():
function test_default_matching (line 51) | def test_default_matching():
function test_path_matching (line 56) | def test_path_matching():
function test_negative_rate (line 66) | def test_negative_rate():
function test_negative_fixed_target (line 74) | def test_negative_fixed_target():
function test_invalid_default (line 82) | def test_invalid_default():
function test_incomplete_path_rule (line 88) | def test_incomplete_path_rule():
FILE: tests/test_local_sampling_benchmark.py
function test_pkgutil_static_read (line 6) | def test_pkgutil_static_read(benchmark):
function test_pathlib_static_read (line 12) | def test_pathlib_static_read(benchmark):
FILE: tests/test_patcher.py
function construct_ctx (line 30) | def construct_ctx():
function _call_all_mock_functions (line 61) | def _call_all_mock_functions():
function test_incorrect_import_fails (line 81) | def test_incorrect_import_fails(modules):
function test_external_file (line 87) | def test_external_file():
function test_external_module (line 101) | def test_external_module():
function test_external_submodules_full (line 121) | def test_external_submodules_full():
function test_external_submodules_ignores_file (line 143) | def test_external_submodules_ignores_file():
function test_external_submodules_ignores_module (line 164) | def test_external_submodules_ignores_module():
function test_disable_sdk_disables_patching (line 179) | def test_disable_sdk_disables_patching():
FILE: tests/test_plugins.py
function test_runtime_context_available (line 12) | def test_runtime_context_available():
function test_ec2_plugin_imdsv2_success (line 21) | def test_ec2_plugin_imdsv2_success(mock_do_request):
function test_ec2_plugin_v2_fail_v1_success (line 38) | def test_ec2_plugin_v2_fail_v1_success(mock_do_request):
function test_ec2_plugin_v2_fail_v1_fail (line 55) | def test_ec2_plugin_v2_fail_v1_fail(mock_do_request):
FILE: tests/test_recorder.py
function construct_ctx (line 22) | def construct_ctx(monkeypatch):
function test_default_runtime_context (line 34) | def test_default_runtime_context():
function test_subsegment_parenting (line 45) | def test_subsegment_parenting():
function test_subsegments_streaming (line 68) | def test_subsegments_streaming():
function test_subsegment_streaming_set_zero (line 81) | def test_subsegment_streaming_set_zero():
function test_put_annotation_metadata (line 91) | def test_put_annotation_metadata():
function test_default_pass_through_with_missing_context (line 103) | def test_default_pass_through_with_missing_context():
function test_raise_runtime_error_with_missing_context (line 112) | def test_raise_runtime_error_with_missing_context():
function test_capture_not_suppress_exception (line 120) | def test_capture_not_suppress_exception():
function test_capture_not_swallow_return (line 132) | def test_capture_not_swallow_return():
function test_first_begin_segment_sampled (line 145) | def test_first_begin_segment_sampled():
function test_unsampled_subsegment_of_sampled_parent (line 152) | def test_unsampled_subsegment_of_sampled_parent():
function test_begin_subsegment_unsampled (line 161) | def test_begin_subsegment_unsampled():
function test_in_segment_closing (line 171) | def test_in_segment_closing():
function test_in_segment_exception (line 192) | def test_in_segment_exception():
function test_default_enabled (line 215) | def test_default_enabled():
function test_disable_is_dummy (line 223) | def test_disable_is_dummy():
function test_unsampled_subsegment_is_dummy (line 230) | def test_unsampled_subsegment_is_dummy():
function test_subsegment_respects_parent_sampling_decision (line 237) | def test_subsegment_respects_parent_sampling_decision():
function test_disabled_empty_context_current_calls (line 248) | def test_disabled_empty_context_current_calls():
function test_disabled_out_of_order_begins (line 254) | def test_disabled_out_of_order_begins():
function test_disabled_put_methods (line 264) | def test_disabled_put_methods():
function test_disabled_ends (line 272) | def test_disabled_ends():
function test_disabled_begin_subsegment (line 283) | def test_disabled_begin_subsegment():
function test_disabled_force_sampling (line 290) | def test_disabled_force_sampling():
function test_disabled_get_context_entity (line 300) | def test_disabled_get_context_entity():
function test_max_stack_trace_zero (line 306) | def test_max_stack_trace_zero():
class CustomSampler (line 329) | class CustomSampler(DefaultSampler):
method start (line 330) | def start(self):
method should_trace (line 333) | def should_trace(self, sampling_req=None):
function test_begin_segment_matches_sampling_rule_on_name (line 359) | def test_begin_segment_matches_sampling_rule_on_name():
FILE: tests/test_sampling_rule_cache.py
function reset_rules (line 25) | def reset_rules():
function test_rules_sorting (line 36) | def test_rules_sorting():
function test_evict_deleted_rules (line 48) | def test_evict_deleted_rules():
function test_rule_matching (line 58) | def test_rule_matching():
function test_preserving_sampling_statistics (line 90) | def test_preserving_sampling_statistics():
function test_correct_target_mapping (line 110) | def test_correct_target_mapping():
function test_expired_cache (line 124) | def test_expired_cache():
FILE: tests/test_sdk_config.py
function cleanup (line 10) | def cleanup():
function test_enable_key (line 22) | def test_enable_key():
function test_default_enabled (line 26) | def test_default_enabled():
function test_env_var_precedence (line 30) | def test_env_var_precedence():
function test_env_enable_case (line 49) | def test_env_enable_case():
function test_invalid_env_string (line 84) | def test_invalid_env_string():
FILE: tests/test_serialize_entities.py
function test_serialize_segment (line 14) | def test_serialize_segment():
function test_serialize_segment_with_aws (line 32) | def test_serialize_segment_with_aws():
function test_serialize_segment_with_services (line 66) | def test_serialize_segment_with_services():
function test_serialize_segment_with_annotation (line 96) | def test_serialize_segment_with_annotation():
function test_serialize_segment_with_metadata (line 120) | def test_serialize_segment_with_metadata():
function test_serialize_segment_with_http (line 217) | def test_serialize_segment_with_http():
function test_serialize_segment_with_exception (line 257) | def test_serialize_segment_with_exception():
function test_serialize_subsegment (line 363) | def test_serialize_subsegment():
function test_serialize_subsegment_with_http (line 386) | def test_serialize_subsegment_with_http():
function test_serialize_subsegment_with_sql (line 425) | def test_serialize_subsegment_with_sql():
function test_serialize_subsegment_with_aws (line 469) | def test_serialize_subsegment_with_aws():
function test_serialize_with_ast_metadata (line 515) | def test_serialize_with_ast_metadata():
FILE: tests/test_sqs_message_helper.py
function test_return_true_when_sampling_1 (line 61) | def test_return_true_when_sampling_1():
function test_return_false_when_sampling_0 (line 64) | def test_return_false_when_sampling_0():
function test_return_false_with_no_sampling_flag (line 67) | def test_return_false_with_no_sampling_flag():
FILE: tests/test_throwable.py
function test_message_and_type (line 4) | def test_message_and_type():
function test_stack_trace_parsing (line 13) | def test_stack_trace_parsing():
FILE: tests/test_trace_entities.py
function test_unicode_entity_name (line 18) | def test_unicode_entity_name():
function test_segment_user (line 29) | def test_segment_user():
function test_put_http_meta (line 37) | def test_put_http_meta():
function test_put_metadata (line 51) | def test_put_metadata():
function test_put_annotation (line 71) | def test_put_annotation():
function test_reference_counting (line 96) | def test_reference_counting():
function test_flags_on_status_code (line 121) | def test_flags_on_status_code():
function test_mutate_closed_entity (line 137) | def test_mutate_closed_entity():
function test_no_rule_name_pollution (line 155) | def test_no_rule_name_pollution():
function test_no_empty_properties (line 165) | def test_no_empty_properties():
function test_required_properties (line 179) | def test_required_properties():
function test_missing_segment_name (line 191) | def test_missing_segment_name():
function test_missing_parent_segment (line 197) | def test_missing_parent_segment():
function test_add_exception (line 203) | def test_add_exception():
function test_add_exception_referencing (line 221) | def test_add_exception_referencing():
function test_add_exception_cause_resetting (line 239) | def test_add_exception_cause_resetting():
function test_add_exception_appending_exceptions (line 256) | def test_add_exception_appending_exceptions():
function test_adding_subsegments_with_recorder (line 266) | def test_adding_subsegments_with_recorder():
FILE: tests/test_trace_header.py
function test_no_sample (line 8) | def test_no_sample():
function test_no_parent (line 16) | def test_no_parent():
function test_from_str (line 22) | def test_from_str():
function test_arbitrary_fields (line 45) | def test_arbitrary_fields():
function test_invalid_str (line 54) | def test_invalid_str():
FILE: tests/test_traceid.py
function cleanup (line 8) | def cleanup():
function test_id_format (line 19) | def test_id_format():
function test_id_generation_default_sampling_false (line 29) | def test_id_generation_default_sampling_false():
function test_id_generation_default_sampling_true (line 46) | def test_id_generation_default_sampling_true():
function test_id_generation_noop_true (line 63) | def test_id_generation_noop_true():
function test_id_generation_noop_false (line 81) | def test_id_generation_noop_false():
FILE: tests/test_utils.py
function test_to_snake_case (line 12) | def test_to_snake_case():
function test_get_hostname (line 26) | def test_get_hostname():
function test_strip_url (line 49) | def test_strip_url():
function test_inject_trace_header_unsampled (line 63) | def test_inject_trace_header_unsampled():
function test_inject_trace_header_respects_parent_subsegment (line 74) | def test_inject_trace_header_respects_parent_subsegment():
function test_inject_trace_header_sampled (line 86) | def test_inject_trace_header_sampled():
FILE: tests/test_wildcard_match.py
function test_match_exact_positive (line 4) | def test_match_exact_positive():
function test_match_exact_negative (line 10) | def test_match_exact_negative():
function test_single_wildcard_positive (line 16) | def test_single_wildcard_positive():
function test_single_wildcard_negative (line 22) | def test_single_wildcard_negative():
function test_multiple_wildcard_positive (line 28) | def test_multiple_wildcard_positive():
function test_multiple_wildcard_negative (line 34) | def test_multiple_wildcard_negative():
function test_glob_positive_zero_or_more (line 40) | def test_glob_positive_zero_or_more():
function test_glob_negative_zero_or_more (line 46) | def test_glob_negative_zero_or_more():
function test_glob_negative (line 52) | def test_glob_negative():
function test_glob_and_single_positive (line 58) | def test_glob_and_single_positive():
function test_glob_and_single_negative (line 64) | def test_glob_and_single_negative():
function test_pure_wildcard (line 70) | def test_pure_wildcard():
function test_exact_match (line 76) | def test_exact_match():
function test_misc (line 82) | def test_misc():
function test_case_insensitivity (line 105) | def test_case_insensitivity():
function test_no_globs (line 126) | def test_no_globs():
function test_edge_case_globs (line 130) | def test_edge_case_globs():
function test_multi_globs (line 149) | def test_multi_globs():
FILE: tests/util.py
class CircularReferenceClass (line 10) | class CircularReferenceClass:
method __init__ (line 12) | def __init__(self, name):
class StubbedEmitter (line 17) | class StubbedEmitter(UDPEmitter):
method __init__ (line 19) | def __init__(self, daemon_address='127.0.0.1:2000'):
method send_entity (line 23) | def send_entity(self, entity):
method pop (line 26) | def pop(self):
class StubbedSampler (line 36) | class StubbedSampler(DefaultSampler):
method start (line 38) | def start(self):
function get_new_stubbed_recorder (line 42) | def get_new_stubbed_recorder():
function entity_to_dict (line 53) | def entity_to_dict(trace_entity):
function _search_entity (line 59) | def _search_entity(entity, name):
function find_subsegment (line 75) | def find_subsegment(segment, name):
function find_subsegment_by_annotation (line 85) | def find_subsegment_by_annotation(segment, key, value):
function _search_entity_by_annotation (line 95) | def _search_entity_by_annotation(entity, key, value):
function test_metadata_to_dict_self_reference (line 112) | def test_metadata_to_dict_self_reference():
Condensed preview — 261 files, each showing path, character count, and a content snippet. Download the .json file or copy for the full structured content (629K chars).
[
{
"path": ".github/CODEOWNERS",
"chars": 269,
"preview": "#####################################################\n#\n# List of approvers for this repository\n#\n######################"
},
{
"path": ".github/PULL_REQUEST_TEMPLATE.md",
"chars": 169,
"preview": "*Issue #, if available:*\n\n*Description of changes:*\n\n\nBy submitting this pull request, I confirm that my contribution is"
},
{
"path": ".github/dependency-check-suppressions.xml",
"chars": 154,
"preview": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<suppressions xmlns=\"https://jeremylong.github.io/DependencyCheck/dependency-supp"
},
{
"path": ".github/stale.yml",
"chars": 834,
"preview": "# Number of days of inactivity before an issue becomes stale\ndaysUntilStale: 30\n# Number of days of inactivity before a "
},
{
"path": ".github/trivy/daily-scan.trivyignore.yaml",
"chars": 467,
"preview": "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.\n# SPDX-License-Identifier: Apache-2.0\n\n# Trivy igno"
},
{
"path": ".github/workflows/IntegrationTesting.yaml",
"chars": 5802,
"preview": "name: Integration Testing\non:\n push:\n branches:\n - master\n\npermissions:\n id-token: write\n contents: read\n\njob"
},
{
"path": ".github/workflows/Release.yaml",
"chars": 909,
"preview": "name: Release X-Ray Python SDK\n\non:\n workflow_dispatch:\n inputs:\n version:\n description: The version to "
},
{
"path": ".github/workflows/UnitTesting.yaml",
"chars": 3049,
"preview": "name: Unit Testing\npermissions:\n contents: read\non:\n push:\n branches:\n - master\n pull_request:\n branches:\n"
},
{
"path": ".github/workflows/continuous-monitoring.yml",
"chars": 1522,
"preview": "name: Continuous monitoring of distribution channels\non:\n workflow_dispatch:\n schedule:\n - cron: '*/10 * * * *'\n\np"
},
{
"path": ".github/workflows/daily-scan.yml",
"chars": 5545,
"preview": "## Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.\n## SPDX-License-Identifier: Apache-2.0\n# Performs "
},
{
"path": ".gitignore",
"chars": 181,
"preview": ".DS_Store\n*.pyc\n.Python\n.cache\n.pytest_cache\nman\n\nbuild\nbin\ninclude\nlib\ndist\n*.egg\n*.egg-info\n.tox\n.python-version\n.pyte"
},
{
"path": "CHANGELOG.rst",
"chars": 21828,
"preview": "=========\nCHANGELOG\n=========\n\nUnreleased\n==========\n\n2.15.0\n==========\n* bugfix: Fix log stack overflow if metadata con"
},
{
"path": "CODE_OF_CONDUCT.md",
"chars": 311,
"preview": "## Code of Conduct\nThis project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-condu"
},
{
"path": "CONTRIBUTING.md",
"chars": 3587,
"preview": "# Contributing Guidelines\n\nThank you for your interest in contributing to our project. Whether it's a bug report, new fe"
},
{
"path": "LICENSE",
"chars": 11357,
"preview": " Apache License\n Version 2.0, January 2004\n "
},
{
"path": "MANIFEST.in",
"chars": 139,
"preview": "include aws_xray_sdk/ext/resources/*.json\ninclude aws_xray_sdk/core/sampling/local/*.json\ninclude README.md\ninclude LICE"
},
{
"path": "NOTICE",
"chars": 67,
"preview": "Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.\n"
},
{
"path": "README.md",
"chars": 22255,
"preview": "\n[![codec"
},
{
"path": "__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "aws_xray_sdk/__init__.py",
"chars": 67,
"preview": "from .sdk_config import SDKConfig\n\nglobal_sdk_config = SDKConfig()\n"
},
{
"path": "aws_xray_sdk/core/__init__.py",
"chars": 254,
"preview": "from .async_recorder import AsyncAWSXRayRecorder\nfrom .patcher import patch, patch_all\nfrom .recorder import AWSXRayReco"
},
{
"path": "aws_xray_sdk/core/async_context.py",
"chars": 3698,
"preview": "import asyncio\nimport copy\n\nfrom .context import Context as _Context\n\n\nclass AsyncContext(_Context):\n \"\"\"\n Async C"
},
{
"path": "aws_xray_sdk/core/async_recorder.py",
"chars": 3832,
"preview": "import time\n\nfrom aws_xray_sdk.core.recorder import AWSXRayRecorder\nfrom aws_xray_sdk.core.utils import stacktrace\nfrom "
},
{
"path": "aws_xray_sdk/core/context.py",
"chars": 4988,
"preview": "import threading\nimport logging\nimport os\n\nfrom .exceptions.exceptions import SegmentNotFoundException\nfrom .models.dumm"
},
{
"path": "aws_xray_sdk/core/daemon_config.py",
"chars": 2516,
"preview": "import os\n\nfrom .exceptions.exceptions import InvalidDaemonAddressException\n\nDAEMON_ADDRESS_KEY = \"AWS_XRAY_DAEMON_ADDRE"
},
{
"path": "aws_xray_sdk/core/emitters/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "aws_xray_sdk/core/emitters/udp_emitter.py",
"chars": 2309,
"preview": "import logging\nimport socket\n\nfrom aws_xray_sdk.core.daemon_config import DaemonConfig\nfrom ..exceptions.exceptions impo"
},
{
"path": "aws_xray_sdk/core/exceptions/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "aws_xray_sdk/core/exceptions/exceptions.py",
"chars": 445,
"preview": "class InvalidSamplingManifestError(Exception):\n pass\n\n\nclass SegmentNotFoundException(Exception):\n pass\n\n\nclass In"
},
{
"path": "aws_xray_sdk/core/lambda_launcher.py",
"chars": 5489,
"preview": "import os\nimport logging\nimport threading\n\nfrom aws_xray_sdk import global_sdk_config\nfrom .models.dummy_entities import"
},
{
"path": "aws_xray_sdk/core/models/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "aws_xray_sdk/core/models/default_dynamic_naming.py",
"chars": 1210,
"preview": "from ..utils.search_pattern import wildcard_match\n\n\nclass DefaultDynamicNaming:\n \"\"\"\n Decides what name to use on "
},
{
"path": "aws_xray_sdk/core/models/dummy_entities.py",
"chars": 3164,
"preview": "import os\nfrom .noop_traceid import NoOpTraceId\nfrom .traceid import TraceId\nfrom .segment import Segment\nfrom .subsegme"
},
{
"path": "aws_xray_sdk/core/models/entity.py",
"chars": 10405,
"preview": "import logging\nimport os\nimport binascii\nimport time\nimport string\n\nimport json\n\nfrom ..utils.compat import annotation_v"
},
{
"path": "aws_xray_sdk/core/models/facade_segment.py",
"chars": 3801,
"preview": "from .segment import Segment\nfrom ..exceptions.exceptions import FacadeSegmentMutationException\n\n\nMUTATION_UNSUPPORTED_M"
},
{
"path": "aws_xray_sdk/core/models/http.py",
"chars": 382,
"preview": "URL = \"url\"\nMETHOD = \"method\"\nUSER_AGENT = \"user_agent\"\nCLIENT_IP = \"client_ip\"\nX_FORWARDED_FOR = \"x_forwarded_for\"\n\nSTA"
},
{
"path": "aws_xray_sdk/core/models/noop_traceid.py",
"chars": 708,
"preview": "class NoOpTraceId:\n \"\"\"\n A trace ID tracks the path of a request through your application.\n A trace collects al"
},
{
"path": "aws_xray_sdk/core/models/segment.py",
"chars": 5227,
"preview": "import copy\nimport traceback\n\nfrom .entity import Entity\nfrom .traceid import TraceId\nfrom ..utils.atomic_counter import"
},
{
"path": "aws_xray_sdk/core/models/subsegment.py",
"chars": 5138,
"preview": "import copy\nimport traceback\n\nimport wrapt\n\nfrom .entity import Entity\nfrom ..exceptions.exceptions import SegmentNotFou"
},
{
"path": "aws_xray_sdk/core/models/throwable.py",
"chars": 2354,
"preview": "import copy\nimport os\nimport binascii\nimport logging\n\nlog = logging.getLogger(__name__)\n\n\nclass Throwable:\n \"\"\"\n A"
},
{
"path": "aws_xray_sdk/core/models/trace_header.py",
"chars": 3504,
"preview": "import logging\n\nlog = logging.getLogger(__name__)\n\nROOT = 'Root'\nPARENT = 'Parent'\nSAMPLE = 'Sampled'\nSELF = 'Self'\n\nHEA"
},
{
"path": "aws_xray_sdk/core/models/traceid.py",
"chars": 774,
"preview": "import os\nimport time\nimport binascii\n\n\nclass TraceId:\n \"\"\"\n A trace ID tracks the path of a request through your "
},
{
"path": "aws_xray_sdk/core/patcher.py",
"chars": 8353,
"preview": "import importlib\nimport inspect\nimport logging\nimport os\nimport pkgutil\nimport re\nimport sys\nimport wrapt\n\nfrom aws_xray"
},
{
"path": "aws_xray_sdk/core/plugins/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "aws_xray_sdk/core/plugins/ec2_plugin.py",
"chars": 2123,
"preview": "import json\nimport logging\nfrom urllib.request import Request, urlopen\n\nlog = logging.getLogger(__name__)\n\nSERVICE_NAME "
},
{
"path": "aws_xray_sdk/core/plugins/ecs_plugin.py",
"chars": 432,
"preview": "import socket\nimport logging\n\nlog = logging.getLogger(__name__)\n\nSERVICE_NAME = 'ecs'\nORIGIN = 'AWS::ECS::Container'\n\n\nd"
},
{
"path": "aws_xray_sdk/core/plugins/elasticbeanstalk_plugin.py",
"chars": 469,
"preview": "import logging\nimport json\n\nlog = logging.getLogger(__name__)\n\nCONF_PATH = '/var/elasticbeanstalk/xray/environment.conf'"
},
{
"path": "aws_xray_sdk/core/plugins/utils.py",
"chars": 757,
"preview": "import importlib\nfrom ..exceptions.exceptions import MissingPluginNames\n\nmodule_prefix = 'aws_xray_sdk.core.plugins.'\n\nP"
},
{
"path": "aws_xray_sdk/core/recorder.py",
"chars": 22204,
"preview": "import copy\nimport json\nimport logging\nimport os\nimport platform\nimport time\n\nfrom aws_xray_sdk import global_sdk_config"
},
{
"path": "aws_xray_sdk/core/sampling/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "aws_xray_sdk/core/sampling/connector.py",
"chars": 5829,
"preview": "import binascii\nimport os\nimport time\nfrom datetime import datetime\n\nimport botocore.session\nfrom botocore import UNSIGN"
},
{
"path": "aws_xray_sdk/core/sampling/local/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "aws_xray_sdk/core/sampling/local/reservoir.py",
"chars": 1020,
"preview": "import time\nimport threading\n\n\nclass Reservoir:\n \"\"\"\n Keeps track of the number of sampled segments within\n a s"
},
{
"path": "aws_xray_sdk/core/sampling/local/sampler.py",
"chars": 3536,
"preview": "import json\nimport pkgutil\nfrom random import Random\n\nfrom .sampling_rule import SamplingRule\nfrom ...exceptions.excepti"
},
{
"path": "aws_xray_sdk/core/sampling/local/sampling_rule.json",
"chars": 104,
"preview": "{\n \"version\": 2,\n \"default\": {\n \"fixed_target\": 1,\n \"rate\": 0.05\n },\n \"rules\": [\n ]\n }"
},
{
"path": "aws_xray_sdk/core/sampling/local/sampling_rule.py",
"chars": 3691,
"preview": "from .reservoir import Reservoir\nfrom ...exceptions.exceptions import InvalidSamplingManifestError\nfrom aws_xray_sdk.cor"
},
{
"path": "aws_xray_sdk/core/sampling/reservoir.py",
"chars": 2727,
"preview": "import threading\nfrom enum import Enum\n\n\nclass Reservoir:\n \"\"\"\n Centralized thread-safe reservoir which holds fixe"
},
{
"path": "aws_xray_sdk/core/sampling/rule_cache.py",
"chars": 2548,
"preview": "import threading\nfrom operator import attrgetter\n\nTTL = 60 * 60 # The cache expires 1 hour after the last refresh time."
},
{
"path": "aws_xray_sdk/core/sampling/rule_poller.py",
"chars": 1880,
"preview": "import logging\nfrom random import Random\nimport time\nimport threading\n\nlog = logging.getLogger(__name__)\n\nDEFAULT_INTERV"
},
{
"path": "aws_xray_sdk/core/sampling/sampler.py",
"chars": 4626,
"preview": "import logging\nfrom random import Random\nimport time\nimport threading\n\nfrom .local.sampler import LocalSampler\nfrom .rul"
},
{
"path": "aws_xray_sdk/core/sampling/sampling_rule.py",
"chars": 4338,
"preview": "import threading\n\nfrom .reservoir import Reservoir\nfrom aws_xray_sdk.core.utils.search_pattern import wildcard_match\n\n\nc"
},
{
"path": "aws_xray_sdk/core/sampling/target_poller.py",
"chars": 2277,
"preview": "import logging\nfrom random import Random\nimport time\nimport threading\n\nlog = logging.getLogger(__name__)\n\n\nclass TargetP"
},
{
"path": "aws_xray_sdk/core/streaming/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "aws_xray_sdk/core/streaming/default_streaming.py",
"chars": 1975,
"preview": "import threading\n\n\nclass DefaultStreaming:\n \"\"\"\n The default streaming strategy. It uses the total count of a\n "
},
{
"path": "aws_xray_sdk/core/utils/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "aws_xray_sdk/core/utils/atomic_counter.py",
"chars": 688,
"preview": "import threading\n\n\nclass AtomicCounter:\n \"\"\"\n A helper class that implements a thread-safe counter.\n \"\"\"\n de"
},
{
"path": "aws_xray_sdk/core/utils/compat.py",
"chars": 590,
"preview": "import inspect\n\nannotation_value_types = (int, float, bool, str)\n\n\ndef is_classmethod(func):\n return getattr(func, '_"
},
{
"path": "aws_xray_sdk/core/utils/conversion.py",
"chars": 1259,
"preview": "import logging\n\nlog = logging.getLogger(__name__)\n\ndef metadata_to_dict(obj):\n \"\"\"\n Convert object to dict with al"
},
{
"path": "aws_xray_sdk/core/utils/search_pattern.py",
"chars": 1892,
"preview": "def wildcard_match(pattern, text, case_insensitive=True):\n \"\"\"\n Performs a case-insensitive wildcard match against"
},
{
"path": "aws_xray_sdk/core/utils/sqs_message_helper.py",
"chars": 292,
"preview": "SQS_XRAY_HEADER = \"AWSTraceHeader\"\nclass SqsMessageHelper:\n \n @staticmethod \n def isSampled(sqs_message):\n "
},
{
"path": "aws_xray_sdk/core/utils/stacktrace.py",
"chars": 1970,
"preview": "import sys\nimport traceback\n\n\ndef get_stacktrace(limit=None):\n \"\"\"\n Get a full stacktrace for the current state of"
},
{
"path": "aws_xray_sdk/ext/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "aws_xray_sdk/ext/aiobotocore/__init__.py",
"chars": 46,
"preview": "from .patch import patch\n\n__all__ = ['patch']\n"
},
{
"path": "aws_xray_sdk/ext/aiobotocore/patch.py",
"chars": 1028,
"preview": "import aiobotocore.client\nimport wrapt\n\nfrom aws_xray_sdk.core import xray_recorder\nfrom aws_xray_sdk.ext.boto_utils imp"
},
{
"path": "aws_xray_sdk/ext/aiohttp/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "aws_xray_sdk/ext/aiohttp/client.py",
"chars": 2627,
"preview": "\"\"\"\nAioHttp Client tracing, only compatible with Aiohttp 3.X versions\n\"\"\"\nimport aiohttp\n\nfrom types import SimpleNamesp"
},
{
"path": "aws_xray_sdk/ext/aiohttp/middleware.py",
"chars": 3013,
"preview": "\"\"\"\nAioHttp Middleware\n\"\"\"\nfrom aiohttp import web\nfrom aiohttp.web_exceptions import HTTPException\n\nfrom aws_xray_sdk.c"
},
{
"path": "aws_xray_sdk/ext/boto_utils.py",
"chars": 4485,
"preview": "import json\nimport pkgutil\n\nfrom botocore.exceptions import ClientError\n\nfrom aws_xray_sdk.core import xray_recorder\nfro"
},
{
"path": "aws_xray_sdk/ext/botocore/__init__.py",
"chars": 46,
"preview": "from .patch import patch\n\n__all__ = ['patch']\n"
},
{
"path": "aws_xray_sdk/ext/botocore/patch.py",
"chars": 1217,
"preview": "import wrapt\nimport botocore.client\n\nfrom aws_xray_sdk.core import xray_recorder\nfrom aws_xray_sdk.ext.boto_utils import"
},
{
"path": "aws_xray_sdk/ext/bottle/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "aws_xray_sdk/ext/bottle/middleware.py",
"chars": 3936,
"preview": "from bottle import request, response, SimpleTemplate\n\nfrom aws_xray_sdk.core.lambda_launcher import check_in_lambda, Lam"
},
{
"path": "aws_xray_sdk/ext/dbapi2.py",
"chars": 1844,
"preview": "import copy\nimport wrapt\n\nfrom aws_xray_sdk.core import xray_recorder\n\n\nclass XRayTracedConn(wrapt.ObjectProxy):\n\n _x"
},
{
"path": "aws_xray_sdk/ext/django/__init__.py",
"chars": 63,
"preview": "default_app_config = 'aws_xray_sdk.ext.django.apps.XRayConfig'\n"
},
{
"path": "aws_xray_sdk/ext/django/apps.py",
"chars": 2268,
"preview": "import logging\n\nfrom django.apps import AppConfig\n\nfrom .conf import settings\nfrom .db import patch_db\nfrom .templates i"
},
{
"path": "aws_xray_sdk/ext/django/conf.py",
"chars": 2394,
"preview": "import os\n\nfrom django.conf import settings as django_settings\nfrom django.test.signals import setting_changed\n\nDEFAULTS"
},
{
"path": "aws_xray_sdk/ext/django/db.py",
"chars": 2605,
"preview": "import copy\nimport logging\nimport importlib\n\nfrom django.db import connections\n\nfrom aws_xray_sdk.core import xray_recor"
},
{
"path": "aws_xray_sdk/ext/django/middleware.py",
"chars": 4955,
"preview": "import logging\nfrom .conf import settings\n\nfrom aws_xray_sdk.core import xray_recorder\nfrom aws_xray_sdk.core.models imp"
},
{
"path": "aws_xray_sdk/ext/django/templates.py",
"chars": 1026,
"preview": "import logging\n\nfrom django.template import Template\nfrom django.utils.safestring import SafeString\n\nfrom aws_xray_sdk.c"
},
{
"path": "aws_xray_sdk/ext/flask/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "aws_xray_sdk/ext/flask/middleware.py",
"chars": 4005,
"preview": "import flask.templating\nfrom flask import request\n\nfrom aws_xray_sdk.core.models import http\nfrom aws_xray_sdk.core.util"
},
{
"path": "aws_xray_sdk/ext/flask_sqlalchemy/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "aws_xray_sdk/ext/flask_sqlalchemy/query.py",
"chars": 2491,
"preview": "from builtins import super\nfrom flask_sqlalchemy.model import Model\nfrom sqlalchemy.orm.session import sessionmaker\nfrom"
},
{
"path": "aws_xray_sdk/ext/httplib/__init__.py",
"chars": 126,
"preview": "from .patch import patch, unpatch, add_ignored, reset_ignored\n\n__all__ = ['patch', 'unpatch', 'add_ignored', 'reset_igno"
},
{
"path": "aws_xray_sdk/ext/httplib/patch.py",
"chars": 7506,
"preview": "import fnmatch\nfrom collections import namedtuple\n\nimport urllib3.connection\nimport wrapt\n\nfrom aws_xray_sdk.core import"
},
{
"path": "aws_xray_sdk/ext/httpx/__init__.py",
"chars": 46,
"preview": "from .patch import patch\n\n__all__ = ['patch']\n"
},
{
"path": "aws_xray_sdk/ext/httpx/patch.py",
"chars": 2723,
"preview": "import httpx\n\nfrom aws_xray_sdk.core import xray_recorder\nfrom aws_xray_sdk.core.models import http\nfrom aws_xray_sdk.ex"
},
{
"path": "aws_xray_sdk/ext/mysql/__init__.py",
"chars": 47,
"preview": "from .patch import patch\n\n\n__all__ = ['patch']\n"
},
{
"path": "aws_xray_sdk/ext/mysql/patch.py",
"chars": 986,
"preview": "import wrapt\nimport mysql.connector\n\nfrom aws_xray_sdk.ext.dbapi2 import XRayTracedConn\n\n\nMYSQL_ATTR = {\n '_host': 'n"
},
{
"path": "aws_xray_sdk/ext/pg8000/README.md",
"chars": 57,
"preview": "## Requirements\n\nOnly compatible with `pg8000 <= 1.20.0`."
},
{
"path": "aws_xray_sdk/ext/pg8000/__init__.py",
"chars": 67,
"preview": "from .patch import patch, unpatch\n\n\n__all__ = ['patch', 'unpatch']\n"
},
{
"path": "aws_xray_sdk/ext/pg8000/patch.py",
"chars": 914,
"preview": "import pg8000\nimport wrapt\n\nfrom aws_xray_sdk.ext.dbapi2 import XRayTracedConn\nfrom aws_xray_sdk.core.patcher import _PA"
},
{
"path": "aws_xray_sdk/ext/psycopg/__init__.py",
"chars": 47,
"preview": "from .patch import patch\n\n\n__all__ = ['patch']\n"
},
{
"path": "aws_xray_sdk/ext/psycopg/patch.py",
"chars": 1086,
"preview": "import wrapt\nfrom operator import methodcaller\n\nfrom aws_xray_sdk.ext.dbapi2 import XRayTracedConn\n\n\ndef patch():\n wr"
},
{
"path": "aws_xray_sdk/ext/psycopg2/__init__.py",
"chars": 47,
"preview": "from .patch import patch\n\n\n__all__ = ['patch']\n"
},
{
"path": "aws_xray_sdk/ext/psycopg2/patch.py",
"chars": 2143,
"preview": "import copy\nimport re\nimport wrapt\nfrom operator import methodcaller\n\nfrom aws_xray_sdk.ext.dbapi2 import XRayTracedConn"
},
{
"path": "aws_xray_sdk/ext/pymongo/__init__.py",
"chars": 107,
"preview": "# Copyright © 2018 Clarity Movement Co. All rights reserved.\nfrom .patch import patch\n\n__all__ = ['patch']\n"
},
{
"path": "aws_xray_sdk/ext/pymongo/patch.py",
"chars": 2308,
"preview": "# Copyright © 2018 Clarity Movement Co. All rights reserved.\nfrom pymongo import monitoring\nfrom aws_xray_sdk.core impor"
},
{
"path": "aws_xray_sdk/ext/pymysql/__init__.py",
"chars": 67,
"preview": "from .patch import patch, unpatch\n\n\n__all__ = ['patch', 'unpatch']\n"
},
{
"path": "aws_xray_sdk/ext/pymysql/patch.py",
"chars": 1161,
"preview": "import pymysql\nimport wrapt\n\nfrom aws_xray_sdk.ext.dbapi2 import XRayTracedConn\nfrom aws_xray_sdk.core.patcher import _P"
},
{
"path": "aws_xray_sdk/ext/pynamodb/__init__.py",
"chars": 46,
"preview": "from .patch import patch\n\n__all__ = ['patch']\n"
},
{
"path": "aws_xray_sdk/ext/pynamodb/patch.py",
"chars": 2676,
"preview": "import json\nimport wrapt\nimport pynamodb\n\nfrom aws_xray_sdk.core import xray_recorder\nfrom aws_xray_sdk.core.models impo"
},
{
"path": "aws_xray_sdk/ext/requests/__init__.py",
"chars": 46,
"preview": "from .patch import patch\n\n__all__ = ['patch']\n"
},
{
"path": "aws_xray_sdk/ext/requests/patch.py",
"chars": 1490,
"preview": "import wrapt\n\nfrom aws_xray_sdk.core import xray_recorder\nfrom aws_xray_sdk.core.models import http\nfrom aws_xray_sdk.ex"
},
{
"path": "aws_xray_sdk/ext/resources/aws_para_whitelist.json",
"chars": 22036,
"preview": "{\n \"services\": {\n \"sns\": {\n \"operations\": {\n \"Publish\": {\n \"request_parameters\": [\n "
},
{
"path": "aws_xray_sdk/ext/sqlalchemy/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "aws_xray_sdk/ext/sqlalchemy/query.py",
"chars": 750,
"preview": "from builtins import super\nfrom sqlalchemy.orm.query import Query\nfrom sqlalchemy.orm.session import Session, sessionmak"
},
{
"path": "aws_xray_sdk/ext/sqlalchemy/util/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "aws_xray_sdk/ext/sqlalchemy/util/decorators.py",
"chars": 4248,
"preview": "import re\nimport types\nfrom urllib.parse import urlparse, uses_netloc\n\nfrom sqlalchemy.engine.base import Connection\n\nfr"
},
{
"path": "aws_xray_sdk/ext/sqlalchemy_core/__init__.py",
"chars": 65,
"preview": "from .patch import patch, unpatch\n\n__all__ = ['patch', 'unpatch']"
},
{
"path": "aws_xray_sdk/ext/sqlalchemy_core/patch.py",
"chars": 3983,
"preview": "import logging\nimport sys\nfrom urllib.parse import urlparse, uses_netloc, quote_plus\n\nimport wrapt\nfrom sqlalchemy.sql.e"
},
{
"path": "aws_xray_sdk/ext/sqlite3/__init__.py",
"chars": 47,
"preview": "from .patch import patch\n\n\n__all__ = ['patch']\n"
},
{
"path": "aws_xray_sdk/ext/sqlite3/patch.py",
"chars": 708,
"preview": "import wrapt\nimport sqlite3\n\nfrom aws_xray_sdk.ext.dbapi2 import XRayTracedConn\n\n\ndef patch():\n\n wrapt.wrap_function_"
},
{
"path": "aws_xray_sdk/ext/util.py",
"chars": 4285,
"preview": "import re\nfrom urllib.parse import urlparse\n\nimport wrapt\n\nfrom aws_xray_sdk.core.models import http\nfrom aws_xray_sdk.c"
},
{
"path": "aws_xray_sdk/sdk_config.py",
"chars": 3486,
"preview": "import os\nimport logging\n\nlog = logging.getLogger(__name__)\n\n\nclass SDKConfig:\n \"\"\"\n Global Configuration Class th"
},
{
"path": "aws_xray_sdk/version.py",
"chars": 19,
"preview": "VERSION = '2.15.0'\n"
},
{
"path": "docs/.gitignore",
"chars": 6,
"preview": "_build"
},
{
"path": "docs/Makefile",
"chars": 612,
"preview": "# Minimal makefile for Sphinx documentation\n#\n\n# You can set these variables from the command line.\nSPHINXOPTS =\nSPHI"
},
{
"path": "docs/_templates/layout.html",
"chars": 123,
"preview": "{% extends '!layout.html' %}\n\n{% block footer %}\n<script src=\"/SdkStatic/sdk-priv.js\" async=\"true\"></script>\n{% endblock"
},
{
"path": "docs/aws_xray_sdk.core.emitters.rst",
"chars": 444,
"preview": "aws\\_xray\\_sdk.core.emitters package\n====================================\n\nSubmodules\n----------\n\naws\\_xray\\_sdk.core.em"
},
{
"path": "docs/aws_xray_sdk.core.exceptions.rst",
"chars": 451,
"preview": "aws\\_xray\\_sdk.core.exceptions package\n======================================\n\nSubmodules\n----------\n\naws\\_xray\\_sdk.cor"
},
{
"path": "docs/aws_xray_sdk.core.models.rst",
"chars": 2261,
"preview": "aws\\_xray\\_sdk.core.models package\n==================================\n\nSubmodules\n----------\n\naws\\_xray\\_sdk.core.models"
},
{
"path": "docs/aws_xray_sdk.core.plugins.rst",
"chars": 1075,
"preview": "aws\\_xray\\_sdk.core.plugins package\n===================================\n\nSubmodules\n----------\n\naws\\_xray\\_sdk.core.plug"
},
{
"path": "docs/aws_xray_sdk.core.rst",
"chars": 1557,
"preview": "aws\\_xray\\_sdk.core package\n===========================\n\nSubpackages\n-----------\n\n.. toctree::\n\n aws_xray_sdk.core.em"
},
{
"path": "docs/aws_xray_sdk.core.sampling.rst",
"chars": 878,
"preview": "aws\\_xray\\_sdk.core.sampling package\n====================================\n\nSubmodules\n----------\n\naws\\_xray\\_sdk.core.sa"
},
{
"path": "docs/aws_xray_sdk.core.streaming.rst",
"chars": 468,
"preview": "aws\\_xray\\_sdk.core.streaming package\n=====================================\n\nSubmodules\n----------\n\naws\\_xray\\_sdk.core."
},
{
"path": "docs/aws_xray_sdk.core.utils.rst",
"chars": 833,
"preview": "aws\\_xray\\_sdk.core.utils package\n=================================\n\nSubmodules\n----------\n\naws\\_xray\\_sdk.core.utils.at"
},
{
"path": "docs/aws_xray_sdk.ext.aiobotocore.rst",
"chars": 436,
"preview": "aws\\_xray\\_sdk.ext.aiobotocore package\n======================================\n\nSubmodules\n----------\n\naws\\_xray\\_sdk.ext"
},
{
"path": "docs/aws_xray_sdk.ext.aiohttp.rst",
"chars": 616,
"preview": "aws\\_xray\\_sdk.ext.aiohttp package\n==================================\n\nSubmodules\n----------\n\naws\\_xray\\_sdk.ext.aiohttp"
},
{
"path": "docs/aws_xray_sdk.ext.botocore.rst",
"chars": 418,
"preview": "aws\\_xray\\_sdk.ext.botocore package\n===================================\n\nSubmodules\n----------\n\naws\\_xray\\_sdk.ext.botoc"
},
{
"path": "docs/aws_xray_sdk.ext.django.rst",
"chars": 1150,
"preview": "aws\\_xray\\_sdk.ext.django package\n=================================\n\nSubmodules\n----------\n\naws\\_xray\\_sdk.ext.django.ap"
},
{
"path": "docs/aws_xray_sdk.ext.flask.rst",
"chars": 415,
"preview": "aws\\_xray\\_sdk.ext.flask package\n================================\n\nSubmodules\n----------\n\naws\\_xray\\_sdk.ext.flask.middl"
},
{
"path": "docs/aws_xray_sdk.ext.flask_sqlalchemy.rst",
"chars": 470,
"preview": "aws\\_xray\\_sdk.ext.flask\\_sqlalchemy package\n============================================\n\nSubmodules\n----------\n\naws\\_x"
},
{
"path": "docs/aws_xray_sdk.ext.httplib.rst",
"chars": 412,
"preview": "aws\\_xray\\_sdk.ext.httplib package\n==================================\n\nSubmodules\n----------\n\naws\\_xray\\_sdk.ext.httplib"
},
{
"path": "docs/aws_xray_sdk.ext.httpx.rst",
"chars": 393,
"preview": "aws\\_xray\\_sdk.ext.httpx package\n================================\n\nSubmodules\n----------\n\naws\\_xray\\_sdk.ext.httpx.patch"
},
{
"path": "docs/aws_xray_sdk.ext.mysql.rst",
"chars": 400,
"preview": "aws\\_xray\\_sdk.ext.mysql package\n================================\n\nSubmodules\n----------\n\naws\\_xray\\_sdk.ext.mysql.patch"
},
{
"path": "docs/aws_xray_sdk.ext.pg8000.rst",
"chars": 399,
"preview": "aws\\_xray\\_sdk.ext.pg8000 package\n=================================\n\nSubmodules\n----------\n\naws\\_xray\\_sdk.ext.pg8000.pa"
},
{
"path": "docs/aws_xray_sdk.ext.psycopg2.rst",
"chars": 411,
"preview": "aws\\_xray\\_sdk.ext.psycopg2 package\n===================================\n\nSubmodules\n----------\n\naws\\_xray\\_sdk.ext.psyco"
},
{
"path": "docs/aws_xray_sdk.ext.pymongo.rst",
"chars": 405,
"preview": "aws\\_xray\\_sdk.ext.pymongo package\n==================================\n\nSubmodules\n----------\n\naws\\_xray\\_sdk.ext.pymongo"
},
{
"path": "docs/aws_xray_sdk.ext.pymysql.rst",
"chars": 405,
"preview": "aws\\_xray\\_sdk.ext.pymysql package\n==================================\n\nSubmodules\n----------\n\naws\\_xray\\_sdk.ext.pymysql"
},
{
"path": "docs/aws_xray_sdk.ext.pynamodb.rst",
"chars": 418,
"preview": "aws\\_xray\\_sdk.ext.pynamodb package\n===================================\n\nSubmodules\n----------\n\naws\\_xray\\_sdk.ext.pynam"
},
{
"path": "docs/aws_xray_sdk.ext.requests.rst",
"chars": 418,
"preview": "aws\\_xray\\_sdk.ext.requests package\n===================================\n\nSubmodules\n----------\n\naws\\_xray\\_sdk.ext.reque"
},
{
"path": "docs/aws_xray_sdk.ext.rst",
"chars": 1107,
"preview": "aws\\_xray\\_sdk.ext package\n==========================\n\nSubpackages\n-----------\n\n.. toctree::\n\n aws_xray_sdk.ext.aiobo"
},
{
"path": "docs/aws_xray_sdk.ext.sqlalchemy.rst",
"chars": 507,
"preview": "aws\\_xray\\_sdk.ext.sqlalchemy package\n=====================================\n\nSubpackages\n-----------\n\n.. toctree::\n\n "
},
{
"path": "docs/aws_xray_sdk.ext.sqlalchemy.util.rst",
"chars": 475,
"preview": "aws\\_xray\\_sdk.ext.sqlalchemy.util package\n==========================================\n\nSubmodules\n----------\n\naws\\_xray\\"
},
{
"path": "docs/aws_xray_sdk.ext.sqlalchemy_core.rst",
"chars": 457,
"preview": "aws\\_xray\\_sdk.ext.sqlalchemy\\_core package\n===========================================\n\nSubmodules\n----------\n\naws\\_xra"
},
{
"path": "docs/aws_xray_sdk.ext.sqlite3.rst",
"chars": 412,
"preview": "aws\\_xray\\_sdk.ext.sqlite3 package\n==================================\n\nSubmodules\n----------\n\naws\\_xray\\_sdk.ext.sqlite3"
},
{
"path": "docs/aws_xray_sdk.rst",
"chars": 429,
"preview": "aws\\_xray\\_sdk package\n======================\n\nSubpackages\n-----------\n\n.. toctree::\n\n aws_xray_sdk.core\n aws_xray"
},
{
"path": "docs/basic.rst",
"chars": 3564,
"preview": ".. _basic:\n\nBasic Usage\n===========\n\nThe SDK provides a global recorder, ``xray_recorder``, to generate segments and sub"
},
{
"path": "docs/changes.rst",
"chars": 43,
"preview": ".. _changes:\n\n.. include:: ../CHANGELOG.rst"
},
{
"path": "docs/conf.py",
"chars": 5678,
"preview": "# -*- coding: utf-8 -*-\n#\n# aws-xray-sdk documentation build configuration file, created by\n# sphinx-quickstart on Wed A"
},
{
"path": "docs/configurations.rst",
"chars": 7022,
"preview": ".. _configurations:\n\nConfigure Global Recorder\n=========================\n\nSampling\n--------\nSampling is enabled by defau"
},
{
"path": "docs/frameworks.rst",
"chars": 5425,
"preview": ".. _frameworks:\n\nDjango\n======\n\nConfigure X-Ray Recorder\n------------------------\nMake sure you add ``XRayMiddleWare`` a"
},
{
"path": "docs/index.rst",
"chars": 1817,
"preview": ".. aws-xray-sdk documentation master file, created by\n sphinx-quickstart on Wed Aug 2 15:33:56 2017.\n You can adapt"
},
{
"path": "docs/license.rst",
"chars": 121,
"preview": ".. _license:\n\nLicense\n=======\n\nPlease see Github page on https://github.com/aws/aws-xray-sdk-python/blob/master/LICENSE."
},
{
"path": "docs/make.bat",
"chars": 7581,
"preview": "@ECHO OFF\r\n\r\nREM Command file for Sphinx documentation\r\n\r\npushd %~dp0\r\n\r\nif \"%SPHINXBUILD%\" == \"\" (\r\n\tset SPHINXBUILD=py"
},
{
"path": "docs/modules.rst",
"chars": 73,
"preview": "aws_xray_sdk\n============\n\n.. toctree::\n :maxdepth: 4\n\n aws_xray_sdk\n"
},
{
"path": "docs/thirdparty.rst",
"chars": 2684,
"preview": ".. _thirdparty:\n\nThird Party Library Support\n===========================\n\nPatching Supported Libraries\n-----------------"
},
{
"path": "sample-apps/LICENSE",
"chars": 926,
"preview": "Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.\n\nPermission is hereby granted, free of charge, to any"
},
{
"path": "sample-apps/flask/Dockerfile",
"chars": 112,
"preview": "FROM python:3.6\n\nWORKDIR /app\n\nCOPY . ./\n\nRUN pip install -r requirements.txt\n\nCMD [\"python\", \"application.py\"]\n"
},
{
"path": "sample-apps/flask/application.py",
"chars": 1625,
"preview": "import boto3\nfrom flask import Flask\nfrom aws_xray_sdk.core import xray_recorder, patch_all\nfrom aws_xray_sdk.ext.flask."
},
{
"path": "sample-apps/flask/requirements.txt",
"chars": 181,
"preview": "boto3==1.34.26\ncertifi==2024.7.4\nchardet==5.2.0\nFlask==2.3.3\nidna==3.7\nrequests==2.32.0\nurllib3==1.26.19\nWerkzeug==3.0.6"
},
{
"path": "setup.cfg",
"chars": 25,
"preview": "[bdist_wheel]\nuniversal=1"
},
{
"path": "setup.py",
"chars": 1486,
"preview": "from setuptools import setup, find_packages\nfrom os import path\nfrom aws_xray_sdk.version import VERSION\n\nCURRENT_DIR = "
},
{
"path": "terraform/eb.tf",
"chars": 2023,
"preview": "terraform {\n required_providers {\n aws = {\n source = \"hashicorp/aws\"\n version = \"3.5.0\"\n }\n }\n}\n\nprov"
},
{
"path": "terraform/fixtures.us-west-2.tfvars",
"chars": 86,
"preview": "region = \"us-west-2\"\n\nbucket_key = \"beanstalk/deploy.zip\"\n\nsource_path = \"deploy.zip\"\n"
},
{
"path": "terraform/variables.tf",
"chars": 338,
"preview": "variable \"region\" {\n type = string\n description = \"AWS region for deployment of resources\"\n}\n\nvariable \"bucket_"
},
{
"path": "tests/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "tests/distributioncheck/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "tests/distributioncheck/test_sanity.py",
"chars": 145,
"preview": "from aws_xray_sdk.core.models.segment import Segment\n\ndef test_create_segment():\n segment = Segment('test')\n asser"
},
{
"path": "tests/ext/__init__.py",
"chars": 161,
"preview": "from aws_xray_sdk.core import xray_recorder\nfrom ..util import StubbedEmitter\n\n\nxray_recorder.configure(sampling=False)\n"
},
{
"path": "tests/ext/aiobotocore/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "tests/ext/aiobotocore/test_aiobotocore.py",
"chars": 5833,
"preview": "import pytest\n\nfrom aiobotocore.session import get_session\nfrom botocore.stub import Stubber, ANY\nfrom botocore.exceptio"
},
{
"path": "tests/ext/aiohttp/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "tests/ext/aiohttp/test_client.py",
"chars": 6576,
"preview": "import logging\n\nimport pytest\nfrom aiohttp import ClientSession\n\nfrom aws_xray_sdk.core import xray_recorder\nfrom aws_xr"
},
{
"path": "tests/ext/aiohttp/test_middleware.py",
"chars": 9343,
"preview": "\"\"\"\nTests the middleware for aiohttp server\n\nExpects pytest-aiohttp\n\"\"\"\nimport asyncio\nimport sys\nfrom unittest.mock imp"
},
{
"path": "tests/ext/botocore/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "tests/ext/botocore/test_botocore.py",
"chars": 5671,
"preview": "import pytest\nimport botocore.session\nfrom botocore.stub import Stubber, ANY\n\nfrom aws_xray_sdk.core import patch\nfrom a"
},
{
"path": "tests/ext/bottle/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "tests/ext/bottle/test_bottle.py",
"chars": 8608,
"preview": "import pytest\nfrom bottle import Bottle, request, response, template, view, HTTPError, TEMPLATE_PATH\nfrom webtest import"
},
{
"path": "tests/ext/bottle/views/index.tpl",
"chars": 53,
"preview": "<h1>Hello {{name.title()}}!</h1>\n<p>How are you?</p>\n"
},
{
"path": "tests/ext/django/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "tests/ext/django/app/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "tests/ext/django/app/settings.py",
"chars": 1778,
"preview": "\"\"\"\nConfig file for a django app used by django testing client\n\"\"\"\nimport os\nfrom aws_xray_sdk.core.sampling.sampler imp"
},
{
"path": "tests/ext/django/app/templates/block.html",
"chars": 19,
"preview": "<p>Hello World</p>\n"
},
{
"path": "tests/ext/django/app/templates/block_user.html",
"chars": 101,
"preview": "<!DOCTYPE html>\n<html>\n<body>\n\n<h1>Django Test App</h1>\n\n{% include \"block.html\" %}\n\n</body>\n</html>\n"
},
{
"path": "tests/ext/django/app/templates/index.html",
"chars": 92,
"preview": "<!DOCTYPE html>\n<html>\n<body>\n\n<h1>Django Test App</h1>\n\n<p>Hello World</p>\n\n</body>\n</html>"
},
{
"path": "tests/ext/django/app/views.py",
"chars": 845,
"preview": "import sqlite3\n\nfrom django.http import HttpResponse\nfrom django.urls import path\nfrom django.views.generic import Templ"
},
{
"path": "tests/ext/django/test_db.py",
"chars": 2501,
"preview": "import django\n\nimport pytest\n\nfrom aws_xray_sdk.core import xray_recorder\nfrom aws_xray_sdk.core.context import Context\n"
},
{
"path": "tests/ext/django/test_middleware.py",
"chars": 5618,
"preview": "import django\nfrom aws_xray_sdk import global_sdk_config\nfrom django.urls import reverse\nfrom django.test import TestCas"
}
]
// ... and 61 more files (download for full content)
About this extraction
This page contains the full source code of the aws/aws-xray-sdk-python GitHub repository, extracted and formatted as plain text for AI agents and large language models (LLMs). The extraction includes 261 files (574.9 KB), approximately 144.2k tokens, and a symbol index with 926 extracted functions, classes, methods, constants, and types. Use this with OpenClaw, Claude, ChatGPT, Cursor, Windsurf, or any other AI tool that accepts text input. You can copy the full output to your clipboard or download it as a .txt file.
Extracted by GitExtract — free GitHub repo to text converter for AI. Built by Nikandr Surkov.