Repository: aws/aws-xray-sdk-python
Branch: master
Commit: 48b6a8f2bb13
Files: 261
Total size: 574.9 KB
Directory structure:
gitextract_js1ahssn/
├── .github/
│ ├── CODEOWNERS
│ ├── PULL_REQUEST_TEMPLATE.md
│ ├── dependency-check-suppressions.xml
│ ├── stale.yml
│ ├── trivy/
│ │ └── daily-scan.trivyignore.yaml
│ └── workflows/
│ ├── IntegrationTesting.yaml
│ ├── Release.yaml
│ ├── UnitTesting.yaml
│ ├── continuous-monitoring.yml
│ └── daily-scan.yml
├── .gitignore
├── CHANGELOG.rst
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── LICENSE
├── MANIFEST.in
├── NOTICE
├── README.md
├── __init__.py
├── aws_xray_sdk/
│ ├── __init__.py
│ ├── core/
│ │ ├── __init__.py
│ │ ├── async_context.py
│ │ ├── async_recorder.py
│ │ ├── context.py
│ │ ├── daemon_config.py
│ │ ├── emitters/
│ │ │ ├── __init__.py
│ │ │ └── udp_emitter.py
│ │ ├── exceptions/
│ │ │ ├── __init__.py
│ │ │ └── exceptions.py
│ │ ├── lambda_launcher.py
│ │ ├── models/
│ │ │ ├── __init__.py
│ │ │ ├── default_dynamic_naming.py
│ │ │ ├── dummy_entities.py
│ │ │ ├── entity.py
│ │ │ ├── facade_segment.py
│ │ │ ├── http.py
│ │ │ ├── noop_traceid.py
│ │ │ ├── segment.py
│ │ │ ├── subsegment.py
│ │ │ ├── throwable.py
│ │ │ ├── trace_header.py
│ │ │ └── traceid.py
│ │ ├── patcher.py
│ │ ├── plugins/
│ │ │ ├── __init__.py
│ │ │ ├── ec2_plugin.py
│ │ │ ├── ecs_plugin.py
│ │ │ ├── elasticbeanstalk_plugin.py
│ │ │ └── utils.py
│ │ ├── recorder.py
│ │ ├── sampling/
│ │ │ ├── __init__.py
│ │ │ ├── connector.py
│ │ │ ├── local/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── reservoir.py
│ │ │ │ ├── sampler.py
│ │ │ │ ├── sampling_rule.json
│ │ │ │ └── sampling_rule.py
│ │ │ ├── reservoir.py
│ │ │ ├── rule_cache.py
│ │ │ ├── rule_poller.py
│ │ │ ├── sampler.py
│ │ │ ├── sampling_rule.py
│ │ │ └── target_poller.py
│ │ ├── streaming/
│ │ │ ├── __init__.py
│ │ │ └── default_streaming.py
│ │ └── utils/
│ │ ├── __init__.py
│ │ ├── atomic_counter.py
│ │ ├── compat.py
│ │ ├── conversion.py
│ │ ├── search_pattern.py
│ │ ├── sqs_message_helper.py
│ │ └── stacktrace.py
│ ├── ext/
│ │ ├── __init__.py
│ │ ├── aiobotocore/
│ │ │ ├── __init__.py
│ │ │ └── patch.py
│ │ ├── aiohttp/
│ │ │ ├── __init__.py
│ │ │ ├── client.py
│ │ │ └── middleware.py
│ │ ├── boto_utils.py
│ │ ├── botocore/
│ │ │ ├── __init__.py
│ │ │ └── patch.py
│ │ ├── bottle/
│ │ │ ├── __init__.py
│ │ │ └── middleware.py
│ │ ├── dbapi2.py
│ │ ├── django/
│ │ │ ├── __init__.py
│ │ │ ├── apps.py
│ │ │ ├── conf.py
│ │ │ ├── db.py
│ │ │ ├── middleware.py
│ │ │ └── templates.py
│ │ ├── flask/
│ │ │ ├── __init__.py
│ │ │ └── middleware.py
│ │ ├── flask_sqlalchemy/
│ │ │ ├── __init__.py
│ │ │ └── query.py
│ │ ├── httplib/
│ │ │ ├── __init__.py
│ │ │ └── patch.py
│ │ ├── httpx/
│ │ │ ├── __init__.py
│ │ │ └── patch.py
│ │ ├── mysql/
│ │ │ ├── __init__.py
│ │ │ └── patch.py
│ │ ├── pg8000/
│ │ │ ├── README.md
│ │ │ ├── __init__.py
│ │ │ └── patch.py
│ │ ├── psycopg/
│ │ │ ├── __init__.py
│ │ │ └── patch.py
│ │ ├── psycopg2/
│ │ │ ├── __init__.py
│ │ │ └── patch.py
│ │ ├── pymongo/
│ │ │ ├── __init__.py
│ │ │ └── patch.py
│ │ ├── pymysql/
│ │ │ ├── __init__.py
│ │ │ └── patch.py
│ │ ├── pynamodb/
│ │ │ ├── __init__.py
│ │ │ └── patch.py
│ │ ├── requests/
│ │ │ ├── __init__.py
│ │ │ └── patch.py
│ │ ├── resources/
│ │ │ └── aws_para_whitelist.json
│ │ ├── sqlalchemy/
│ │ │ ├── __init__.py
│ │ │ ├── query.py
│ │ │ └── util/
│ │ │ ├── __init__.py
│ │ │ └── decorators.py
│ │ ├── sqlalchemy_core/
│ │ │ ├── __init__.py
│ │ │ └── patch.py
│ │ ├── sqlite3/
│ │ │ ├── __init__.py
│ │ │ └── patch.py
│ │ └── util.py
│ ├── sdk_config.py
│ └── version.py
├── docs/
│ ├── .gitignore
│ ├── Makefile
│ ├── _templates/
│ │ └── layout.html
│ ├── aws_xray_sdk.core.emitters.rst
│ ├── aws_xray_sdk.core.exceptions.rst
│ ├── aws_xray_sdk.core.models.rst
│ ├── aws_xray_sdk.core.plugins.rst
│ ├── aws_xray_sdk.core.rst
│ ├── aws_xray_sdk.core.sampling.rst
│ ├── aws_xray_sdk.core.streaming.rst
│ ├── aws_xray_sdk.core.utils.rst
│ ├── aws_xray_sdk.ext.aiobotocore.rst
│ ├── aws_xray_sdk.ext.aiohttp.rst
│ ├── aws_xray_sdk.ext.botocore.rst
│ ├── aws_xray_sdk.ext.django.rst
│ ├── aws_xray_sdk.ext.flask.rst
│ ├── aws_xray_sdk.ext.flask_sqlalchemy.rst
│ ├── aws_xray_sdk.ext.httplib.rst
│ ├── aws_xray_sdk.ext.httpx.rst
│ ├── aws_xray_sdk.ext.mysql.rst
│ ├── aws_xray_sdk.ext.pg8000.rst
│ ├── aws_xray_sdk.ext.psycopg2.rst
│ ├── aws_xray_sdk.ext.pymongo.rst
│ ├── aws_xray_sdk.ext.pymysql.rst
│ ├── aws_xray_sdk.ext.pynamodb.rst
│ ├── aws_xray_sdk.ext.requests.rst
│ ├── aws_xray_sdk.ext.rst
│ ├── aws_xray_sdk.ext.sqlalchemy.rst
│ ├── aws_xray_sdk.ext.sqlalchemy.util.rst
│ ├── aws_xray_sdk.ext.sqlalchemy_core.rst
│ ├── aws_xray_sdk.ext.sqlite3.rst
│ ├── aws_xray_sdk.rst
│ ├── basic.rst
│ ├── changes.rst
│ ├── conf.py
│ ├── configurations.rst
│ ├── frameworks.rst
│ ├── index.rst
│ ├── license.rst
│ ├── make.bat
│ ├── modules.rst
│ └── thirdparty.rst
├── sample-apps/
│ ├── LICENSE
│ └── flask/
│ ├── Dockerfile
│ ├── application.py
│ └── requirements.txt
├── setup.cfg
├── setup.py
├── terraform/
│ ├── eb.tf
│ ├── fixtures.us-west-2.tfvars
│ └── variables.tf
├── tests/
│ ├── __init__.py
│ ├── distributioncheck/
│ │ ├── __init__.py
│ │ └── test_sanity.py
│ ├── ext/
│ │ ├── __init__.py
│ │ ├── aiobotocore/
│ │ │ ├── __init__.py
│ │ │ └── test_aiobotocore.py
│ │ ├── aiohttp/
│ │ │ ├── __init__.py
│ │ │ ├── test_client.py
│ │ │ └── test_middleware.py
│ │ ├── botocore/
│ │ │ ├── __init__.py
│ │ │ └── test_botocore.py
│ │ ├── bottle/
│ │ │ ├── __init__.py
│ │ │ ├── test_bottle.py
│ │ │ └── views/
│ │ │ └── index.tpl
│ │ ├── django/
│ │ │ ├── __init__.py
│ │ │ ├── app/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── settings.py
│ │ │ │ ├── templates/
│ │ │ │ │ ├── block.html
│ │ │ │ │ ├── block_user.html
│ │ │ │ │ └── index.html
│ │ │ │ └── views.py
│ │ │ ├── test_db.py
│ │ │ ├── test_middleware.py
│ │ │ └── test_settings.py
│ │ ├── flask/
│ │ │ ├── __init__.py
│ │ │ └── test_flask.py
│ │ ├── flask_sqlalchemy/
│ │ │ ├── __init__.py
│ │ │ └── test_query.py
│ │ ├── httplib/
│ │ │ ├── __init__.py
│ │ │ └── test_httplib.py
│ │ ├── httpx/
│ │ │ ├── __init__.py
│ │ │ ├── test_httpx.py
│ │ │ └── test_httpx_async.py
│ │ ├── pg8000/
│ │ │ ├── __init__.py
│ │ │ └── test_pg8000.py
│ │ ├── psycopg/
│ │ │ ├── __init__.py
│ │ │ └── test_psycopg.py
│ │ ├── psycopg2/
│ │ │ ├── __init__.py
│ │ │ └── test_psycopg2.py
│ │ ├── pymysql/
│ │ │ ├── __init__.py
│ │ │ └── test_pymysql.py
│ │ ├── pynamodb/
│ │ │ ├── __init__.py
│ │ │ └── test_pynamodb.py
│ │ ├── requests/
│ │ │ ├── __init__.py
│ │ │ └── test_requests.py
│ │ ├── sqlalchemy/
│ │ │ ├── __init__.py
│ │ │ └── test_query.py
│ │ ├── sqlalchemy_core/
│ │ │ ├── __init__.py
│ │ │ ├── test_base.py
│ │ │ ├── test_dburl.py
│ │ │ ├── test_postgres.py
│ │ │ ├── test_sqlalchemy_core.py
│ │ │ └── test_sqlalchemy_core_2.py
│ │ └── sqlite3/
│ │ ├── __init__.py
│ │ └── test_sqlite3.py
│ ├── mock_module/
│ │ ├── __init__.py
│ │ ├── mock_file.py
│ │ └── mock_submodule/
│ │ ├── __init__.py
│ │ └── mock_subfile.py
│ ├── mock_sampling_rule.json
│ ├── test_async_local_storage.py
│ ├── test_async_recorder.py
│ ├── test_daemon_config.py
│ ├── test_dummy_entites.py
│ ├── test_facade_segment.py
│ ├── test_lambda_context.py
│ ├── test_local_sampling.py
│ ├── test_local_sampling_benchmark.py
│ ├── test_patcher.py
│ ├── test_plugins.py
│ ├── test_recorder.py
│ ├── test_sampling_rule_cache.py
│ ├── test_sdk_config.py
│ ├── test_serialize_entities.py
│ ├── test_sqs_message_helper.py
│ ├── test_throwable.py
│ ├── test_trace_entities.py
│ ├── test_trace_header.py
│ ├── test_traceid.py
│ ├── test_utils.py
│ ├── test_wildcard_match.py
│ └── util.py
├── tox-distributioncheck.ini
└── tox.ini
================================================
FILE CONTENTS
================================================
================================================
FILE: .github/CODEOWNERS
================================================
#####################################################
#
# List of approvers for this repository
#
#####################################################
#
# Learn about CODEOWNERS file format:
# https://help.github.com/en/articles/about-code-owners
#
* @aws/aws-x-ray
================================================
FILE: .github/PULL_REQUEST_TEMPLATE.md
================================================
*Issue #, if available:*
*Description of changes:*
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
================================================
FILE: .github/dependency-check-suppressions.xml
================================================
================================================
FILE: .github/stale.yml
================================================
# Number of days of inactivity before an issue becomes stale
daysUntilStale: 30
# Number of days of inactivity before a stale issue is closed
daysUntilClose: 7
# Limit to only `issues` or `pulls`
only: issues
# Issues with these labels will never be considered stale
exemptLabels:
- pinned
- bug
- enhancement
- feature-request
- help wanted
- work-in-progress
- pending release
# Label to use when marking an issue as stale
staleLabel: stale
# Comment to post when marking an issue as stale. Set to `false` to disable
markComment: >
This issue has been automatically marked as stale because it has not had
recent activity. It will be closed if no further activity occurs in next 7 days. Thank you
for your contributions.
# Comment to post when closing a stale issue. Set to `false` to disable
closeComment: false
================================================
FILE: .github/trivy/daily-scan.trivyignore.yaml
================================================
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
# Trivy ignore file for daily scans.
# This file is intentionally empty. Daily scans should flag all CVEs.
# See: https://aquasecurity.github.io/trivy/latest/docs/configuration/filtering/
# Format:
# - id:
# statement: " "
# expired_at:
vulnerabilities: []
================================================
FILE: .github/workflows/IntegrationTesting.yaml
================================================
name: Integration Testing
on:
push:
branches:
- master
permissions:
id-token: write
contents: read
jobs:
build_SDK:
name: Build X-Ray Python SDK
runs-on: ubuntu-latest
steps:
- name: Pull in source code from aws-xray-sdk-python Github repository
uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 #v3.6.0
- name: Setup python
uses: actions/setup-python@7f4fc3e22c37d6ff65e88745f38bd3157c663f7c #v4.9.1
with:
python-version: '3.8'
- name: Build X-Ray Python SDK
run: python setup.py sdist
- name: Upload SDK build artifact
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 #v4.6.2
with:
name: sdk-build-artifact
path: .
build_WebApp:
name: Build Web Application
needs: build_SDK
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 #v3.6.0
- name: Setup python
uses: actions/setup-python@7f4fc3e22c37d6ff65e88745f38bd3157c663f7c #v4.9.1
with:
python-version: '3.8'
- name: Download X-Ray SDK build artifact
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 #v4.3.0
with:
name: sdk-build-artifact
path: ./sample-apps/flask
- name: Build WebApp with X-Ray Python SDK
run: pip3 install . -t .
working-directory: ./sample-apps/flask
- name: Zip up the deployment package
run: zip -r deploy.zip . -x '*.git*'
working-directory: ./sample-apps/flask
- name: Upload WebApp with X-Ray SDK build artifact
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 #v4.6.2
with:
name: sdk-flask-build-artifact
path: ./sample-apps/flask/deploy.zip
deploy_WebApp:
name: Deploy Web Application
needs: build_WebApp
runs-on: ubuntu-latest
steps:
- name: Checkout X-Ray SDK to get terraform source
uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 #v3.6.0
- name: Download WebApp with X-Ray SDK build artifact
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 #v4.3.0
with:
name: sdk-flask-build-artifact
- name: Copy deployment package to terraform directory
run: cp deploy.zip ./terraform
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@7474bc4690e29a8392af63c5b98e7449536d5c3a #v4.3.1
with:
role-to-assume: ${{ secrets.AWS_INTEG_TEST_ROLE_ARN }}
aws-region: us-west-2
- name: Setup Terraform
uses: hashicorp/setup-terraform@633666f66e0061ca3b725c73b2ec20cd13a8fdd1 #v2.0.3
- name: Terraform Init
run: terraform init
working-directory: ./terraform
- name: Terraform Validate
run: terraform validate -no-color
working-directory: ./terraform
- name: Terraform Plan
run: terraform plan -var-file="fixtures.us-west-2.tfvars" -no-color
env:
TF_VAR_resource_prefix: '${{ github.run_id }}-${{ github.run_number }}'
continue-on-error: true
working-directory: ./terraform
- name: Terraform Apply
run: terraform apply -var-file="fixtures.us-west-2.tfvars" -auto-approve
env:
TF_VAR_resource_prefix: '${{ github.run_id }}-${{ github.run_number }}'
working-directory: ./terraform
- name: Upload terraform state files for destorying resources
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 #v4.6.2
with:
name: terraform-state-artifact
path: ./terraform
test_WebApp:
name: Test WebApp
needs: deploy_WebApp
runs-on: ubuntu-latest
steps:
- uses: actions/setup-java@17f84c3641ba7b8f6deff6309fc4c864478f5d62 #v3.14.1
with:
distribution: 'zulu'
java-version: 14
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@7474bc4690e29a8392af63c5b98e7449536d5c3a #v4.3.1
with:
role-to-assume: ${{ secrets.AWS_INTEG_TEST_ROLE_ARN }}
aws-region: us-west-2
- name: Checkout test framework
uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 #v3.6.0
with:
repository: aws-observability/aws-otel-test-framework
ref: terraform
- name: Run testing suite
run: ./gradlew :validator:run --args='-c default-xray-trace-validation.yml --endpoint http://${{ github.run_id }}-${{ github.run_number }}-eb-app-env.us-west-2.elasticbeanstalk.com'
cleanup:
name: Resource tear down
needs: test_WebApp
if: always()
runs-on: ubuntu-latest
steps:
- name: Download terraform state artifact
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 #v4.3.0
with:
name: terraform-state-artifact
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@7474bc4690e29a8392af63c5b98e7449536d5c3a #v4.3.1
with:
role-to-assume: ${{ secrets.AWS_INTEG_TEST_ROLE_ARN }}
aws-region: us-west-2
- name: Setup Terraform
uses: hashicorp/setup-terraform@633666f66e0061ca3b725c73b2ec20cd13a8fdd1 #v2.0.3
- name: Terraform Init
run: terraform init
- name: set permissions to terraform plugins
run: chmod -R a+x .terraform/*
- name: Destroy resources
run: terraform destroy -state="terraform.tfstate" -var-file="fixtures.us-west-2.tfvars" -auto-approve
env:
TF_VAR_resource_prefix: '${{ github.run_id }}-${{ github.run_number }}'
================================================
FILE: .github/workflows/Release.yaml
================================================
name: Release X-Ray Python SDK
on:
workflow_dispatch:
inputs:
version:
description: The version to tag the release with, e.g., 1.2.0, 1.3.0
required: true
jobs:
release:
permissions:
contents: write
runs-on: ubuntu-latest
steps:
- name: Checkout master branch
uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 #v3.6.0
- name: Create Release
id: create_release
uses: actions/create-release@0cb9c9b65d5d1901c1f53e5e66eaf4afd303e70e #v1.1.4
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
tag_name: '${{ github.event.inputs.version }}'
release_name: '${{ github.event.inputs.version }} Release'
body: 'See details in [CHANGELOG](https://github.com/aws/aws-xray-sdk-python/blob/master/CHANGELOG.rst)'
draft: true
prerelease: false
================================================
FILE: .github/workflows/UnitTesting.yaml
================================================
name: Unit Testing
permissions:
contents: read
on:
push:
branches:
- master
pull_request:
branches:
- master
jobs:
test:
runs-on: ubuntu-22.04
env:
py37: 3.7
py38: 3.8
py39: 3.9
py310: '3.10'
py311: '3.11'
py312: '3.12'
DB_DATABASE: test_db
DB_USER: root
DB_PASSWORD: root
strategy:
fail-fast: false
matrix:
python-version: [py37, py38, py39, py310, py311, py312]
testenv: [core, ext]
steps:
- name: Checkout repo
uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 #v3.6.0
- name: Start MySQL
if: ${{ matrix.testenv == 'ext' }}
run: |
sudo /etc/init.d/mysql start
mysql -e 'CREATE DATABASE ${{ env.DB_DATABASE }};' -u${{ env.DB_USER }} -p${{ env.DB_PASSWORD }}
mysql -e 'CREATE DATABASE test_dburl;' -u${{ env.DB_USER }} -p${{ env.DB_PASSWORD }}
mysql -e "CREATE USER test_dburl_user@localhost IDENTIFIED BY 'test]password';" -u${{ env.DB_USER }} -p${{ env.DB_PASSWORD }}
mysql -e "GRANT ALL PRIVILEGES ON test_dburl.* TO test_dburl_user@localhost;" -u${{ env.DB_USER }} -p${{ env.DB_PASSWORD }}
mysql -e "FLUSH PRIVILEGES;" -u${{ env.DB_USER }} -p${{ env.DB_PASSWORD }}
- name: Setup Python
uses: actions/setup-python@7f4fc3e22c37d6ff65e88745f38bd3157c663f7c #v4.9.1
with:
python-version: ${{ env[matrix.python-version] }}
- name: Install tox
run: pip install "tox<=3.27.1" -U tox-factor setuptools
- name: Cache tox environment
# Preserves .tox directory between runs for faster installs
uses: actions/cache@6f8efc29b200d32929f49075959781ed54ec270c #v3.5.0
with:
path: |
.tox
~/.cache/pip
key: tox-cache-${{ matrix.python-version }}-${{ matrix.testenv }}-${{ hashFiles('tox.ini') }}
- name: Run tox
run: |
tox -f ${{ matrix.python-version }}-${{ matrix.testenv }}
static-code-checks:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #5.0.0
with:
fetch-depth: 0
- name: Check for versioned GitHub actions
if: always()
run: |
# Get changed GitHub workflow/action files
CHANGED_FILES=$(git diff --name-only origin/${{ github.base_ref }}..HEAD | grep -E "^\.github/(workflows|actions)/.*\.ya?ml$" || true)
if [ -n "$CHANGED_FILES" ]; then
# Check for any versioned actions, excluding comments and this validation script
VIOLATIONS=$(grep -Hn "uses:.*@v" $CHANGED_FILES | grep -v "grep.*uses:.*@v" | grep -v "#.*@v" || true)
if [ -n "$VIOLATIONS" ]; then
echo "Found versioned GitHub actions. Use commit SHAs instead:"
echo "$VIOLATIONS"
exit 1
fi
fi
echo "No versioned actions found in changed files"
================================================
FILE: .github/workflows/continuous-monitoring.yml
================================================
name: Continuous monitoring of distribution channels
on:
workflow_dispatch:
schedule:
- cron: '*/10 * * * *'
permissions:
id-token: write
contents: read
jobs:
smoke-tests:
name: Run smoke tests
runs-on: ubuntu-latest
steps:
- name: Checkout Repository
uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 #v3.6.0
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@7474bc4690e29a8392af63c5b98e7449536d5c3a #v4.3.1
with:
role-to-assume: ${{ secrets.AWS_INTEG_TEST_ROLE_ARN }}
aws-region: us-east-1
- uses: actions/setup-python@7f4fc3e22c37d6ff65e88745f38bd3157c663f7c #v4.9.1
with:
python-version: '3.x'
- run: pip install tox
- name: Run smoke tests
id: distribution-availability
run: tox -c tox-distributioncheck.ini
- name: Publish metric on X-Ray Python SDK distribution availability
if: ${{ always() }}
run: |
if [[ "${{ steps.distribution-availability.outcome }}" == "failure" ]]; then
aws cloudwatch put-metric-data --metric-name XRayPythonSDKDistributionUnavailability --dimensions failure=rate --namespace MonitorSDK --value 1 --timestamp $(date +%s)
else
aws cloudwatch put-metric-data --metric-name XRayPythonSDKDistributionUnavailability --dimensions failure=rate --namespace MonitorSDK --value 0 --timestamp $(date +%s)
fi
================================================
FILE: .github/workflows/daily-scan.yml
================================================
## Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
## SPDX-License-Identifier: Apache-2.0
# Performs a daily scan of:
# * The X-Ray Python SDK published artifact dependencies, using Trivy
# * Project dependencies, using DependencyCheck
#
# Publishes results to CloudWatch Metrics.
name: Daily scan
on:
schedule: # scheduled to run every 6 hours
- cron: '20 */6 * * *' # "At minute 20 past every 6th hour."
workflow_dispatch: # be able to run the workflow on demand
env:
AWS_DEFAULT_REGION: us-east-1
permissions:
id-token: write
contents: read
jobs:
scan_and_report:
runs-on: ubuntu-latest
steps:
- name: Checkout repo for dependency scan
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #5.0.0
with:
fetch-depth: 0
- name: Setup Python for dependency scan
uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b #v5.3.0
with:
python-version: '3.x'
- name: Install published package for scanning
run: |
mkdir -p scan-target
python -m venv scan-venv
source scan-venv/bin/activate
pip install aws-xray-sdk
pip freeze > scan-target/requirements.txt
- name: Install Java for dependency scan
uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 #v5.0.0
with:
java-version: 17
distribution: 'temurin'
- name: Configure AWS credentials for dependency scan
uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #5.0.0
with:
role-to-assume: ${{ secrets.SECRET_MANAGER_ROLE_ARN }}
aws-region: ${{ env.AWS_DEFAULT_REGION }}
- name: Get secrets for dependency scan
uses: aws-actions/aws-secretsmanager-get-secrets@a9a7eb4e2f2871d30dc5b892576fde60a2ecc802 #v2.0.10
id: nvd_api_key
with:
secret-ids: |
${{ secrets.NVD_API_KEY_SECRET_ARN }}
OSS_INDEX, ${{ secrets.OSS_INDEX_SECRET_ARN }}
parse-json-secrets: true
# See http://jeremylong.github.io/DependencyCheck/dependency-check-cli/ for installation explanation
- name: Install and run dependency scan
id: dep_scan
if: always()
run: |
gpg --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys 259A55407DD6C00299E6607EFFDE55BE73A2D1ED
VERSION=$(curl -s https://jeremylong.github.io/DependencyCheck/current.txt | head -n1 | cut -d" " -f1)
curl -Ls "https://github.com/dependency-check/DependencyCheck/releases/download/v$VERSION/dependency-check-$VERSION-release.zip" --output dependency-check.zip
curl -Ls "https://github.com/dependency-check/DependencyCheck/releases/download/v$VERSION/dependency-check-$VERSION-release.zip.asc" --output dependency-check.zip.asc
gpg --verify dependency-check.zip.asc
unzip dependency-check.zip
./dependency-check/bin/dependency-check.sh --enableExperimental --failOnCVSS 0 --nvdApiKey ${{ env.NVD_API_KEY_NVD_API_KEY }} --ossIndexUsername ${{ env.OSS_INDEX_USERNAME }} --ossIndexPassword ${{ env.OSS_INDEX_PASSWORD }} --suppression .github/dependency-check-suppressions.xml -s "scan-target/"
- name: Print dependency scan results on failure
if: ${{ steps.dep_scan.outcome != 'success' }}
run: less dependency-check-report.html
- name: Perform high severity scan on published artifact dependencies
if: always()
id: high_scan_latest
uses: aquasecurity/trivy-action@97e0b3872f55f89b95b2f65b3dbab56962816478 # v0.34.2
with:
scan-type: 'fs'
scan-ref: 'scan-target/'
severity: 'CRITICAL,HIGH'
exit-code: '1'
scanners: 'vuln'
env:
TRIVY_IGNOREFILE: .github/trivy/daily-scan.trivyignore.yaml
- name: Perform low severity scan on published artifact dependencies
if: always()
id: low_scan_latest
uses: aquasecurity/trivy-action@97e0b3872f55f89b95b2f65b3dbab56962816478 # v0.34.2
with:
scan-type: 'fs'
scan-ref: 'scan-target/'
severity: 'MEDIUM,LOW,UNKNOWN'
exit-code: '1'
scanners: 'vuln'
env:
TRIVY_IGNOREFILE: .github/trivy/daily-scan.trivyignore.yaml
- name: Configure AWS Credentials for emitting metrics
if: always()
uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #5.0.0
with:
role-to-assume: ${{ secrets.AWS_INTEG_TEST_ROLE_ARN }}
aws-region: ${{ env.AWS_DEFAULT_REGION }}
- name: Publish high scan status
if: always()
run: |
value="${{ steps.high_scan_latest.outcome == 'success' && '1.0' || '0.0' }}"
aws cloudwatch put-metric-data --namespace 'MonitorSDK' \
--metric-name Success \
--dimensions repository=${{ github.repository }},branch=${{ github.ref_name }},workflow=daily_scan_high \
--value $value
- name: Publish low scan status
if: always()
run: |
value="${{ steps.low_scan_latest.outcome == 'success' && steps.dep_scan.outcome == 'success' && '1.0' || '0.0' }}"
aws cloudwatch put-metric-data --namespace 'MonitorSDK' \
--metric-name Success \
--dimensions repository=${{ github.repository }},branch=${{ github.ref_name }},workflow=daily_scan_low \
--value $value
================================================
FILE: .gitignore
================================================
.DS_Store
*.pyc
.Python
.cache
.pytest_cache
man
build
bin
include
lib
dist
*.egg
*.egg-info
.tox
.python-version
.pytest_cache
pip-selfcheck.json
.coverage*
htmlcov
venv
.idea
================================================
FILE: CHANGELOG.rst
================================================
=========
CHANGELOG
=========
Unreleased
==========
2.15.0
==========
* bugfix: Fix log stack overflow if metadata contains circular reference `https://github.com/aws/aws-xray-sdk-python/pull/464`
2.14.0
==========
* bugfix: Fix warning message condition for subsegment ending `https://github.com/aws/aws-xray-sdk-python/pull/434`
2.13.1
==========
* improvement: Bump idna from 3.6 to 3.7 in /sample-apps/flask `https://github.com/aws/aws-xray-sdk-python/pull/425`
* bugfix: Fix end_time param type docstring from int to float `https://github.com/aws/aws-xray-sdk-python/pull/426`
* improvement: Bump werkzeug from 3.0.1 to 3.0.3 in /sample-apps/flask `https://github.com/aws/aws-xray-sdk-python/pull/428`
* improvement: [LambdaContext] Create dummy segment when trace header is incomplete `https://github.com/aws/aws-xray-sdk-python/pull/429`
* bugfix: [LambdaContext] Fix logging to only happen inside lambda function `https://github.com/aws/aws-xray-sdk-python/pull/431`
2.13.0
==========
* bugfix: Fix passing multiple values in testenv.passenv in tox.ini `https://github.com/aws/aws-xray-sdk-python/pull/399`
* improvement: Pin flask < 3.x for flask sqlalchemy tests `https://github.com/aws/aws-xray-sdk-python/pull/412`
* improvement: Bump werkzeug from 2.2.3 to 3.0.1 in /sample-apps/flask `https://github.com/aws/aws-xray-sdk-python/pull/413`
* improvement: Fix typo in docs `https://github.com/aws/aws-xray-sdk-python/pull/419`
* bugfix: Fix sqlalchemy_core patch errors for unencoded special characters in db url `https://github.com/aws/aws-xray-sdk-python/pull/418`
* bugfix: Fix EB platform version for integration test `https://github.com/aws/aws-xray-sdk-python/pull/420`
2.12.1
==========
* bugfix: set_trace_entity() in lambda adds segment to thread `PR409 https://github.com/aws/aws-xray-sdk-python/pull/409`
* bugfix: Cleanup after drop of support for Python `PR387 https://github.com/aws/aws-xray-sdk-python/pull/387`
2.12.0
==========
* improvement: Default Context Missing Strategy set to Log Error `PR372 https://github.com/aws/aws-xray-sdk-python/pull/372`
* bugfix: Pin tox version to <=3.27.1 to fix CI tests `PR374 https://github.com/aws/aws-xray-sdk-python/pull/374`
* improvement: Sample app dependency update `PR373 https://github.com/aws/aws-xray-sdk-python/pull/373`
* bugfix: Fix pynamodb tests for Python < 3.6 `PR375 https://github.com/aws/aws-xray-sdk-python/pull/375`
* improvement: Use latest GH Actions versions in CI tests `PR365 https://github.com/aws/aws-xray-sdk-python/pull/365`
* improvement: Simplify setup script `PR363 https://github.com/aws/aws-xray-sdk-python/pull/363`
* bugfix: Fix deprecation warnings related to asyncio `PR364 https://github.com/aws/aws-xray-sdk-python/pull/364`
* improvement: Run tests against Python 3.10 and 3.11 `PR376 https://github.com/aws/aws-xray-sdk-python/pull/376`
* improvement: Sample app dependency update `PR380 https://github.com/aws/aws-xray-sdk-python/pull/380`
* bugfix: Pin sqlalchemy version to 1.x to fix tests `PR381 https://github.com/aws/aws-xray-sdk-python/pull/381`
* bugfix: Fix sample app dependencies incompatibility with XRay SDK `PR382 https://github.com/aws/aws-xray-sdk-python/pull/382`
* bugfix: Start MySQL from GH Actions, upgrade Ubuntu, and remove Python versions for unit tests `PR384 https://github.com/aws/aws-xray-sdk-python/pull/384`
2.11.0
==========
* bugfix: Fix TypeError by patching register_default_jsonb from psycopg2 `PR350 https://github.com/aws/aws-xray-sdk-python/pull/350`
* improvement: Add annotations `PR348 https://github.com/aws/aws-xray-sdk-python/pull/348`
* bugfix: Use service parameter to match centralized sampling rules `PR 353 https://github.com/aws/aws-xray-sdk-python/pull/353`
* bugfix: Implement PEP3134 to discover underlying problems with python3 `PR355 https://github.com/aws/aws-xray-sdk-python/pull/355`
* improvement: Allow list TopicArn for SNS PublishBatch request `PR358 https://github.com/aws/aws-xray-sdk-python/pull/358`
* bugfix: Version pinning flask-sqlalchemy version to 2.5.1 or less `PR360 https://github.com/aws/aws-xray-sdk-python/pull/360`
* bugfix: Fix UnboundLocalError when aiohttp server raises a CancelledError `PR356 https://github.com/aws/aws-xray-sdk-python/pull/356`
* improvement: Instrument httpx >= 0.20 `PR357 https://github.com/aws/aws-xray-sdk-python/pull/357`
* improvement: [LambdaContext] persist original trace header `PR362 https://github.com/aws/aws-xray-sdk-python/pull/362`
* bugfix: Run tests against Django 4.x `PR361 https://github.com/aws/aws-xray-sdk-python/pull/361`
* improvement: Oversampling Mitigation `PR366 https://github.com/aws/aws-xray-sdk-python/pull/366`
2.10.0
==========
* bugfix: Only import future for py2. `PR343 `_.
* bugfix: Defensively copy context entities to async thread. `PR340 `_.
* improvement: Added support for IGNORE_ERROR option when context is missing. `PR338 `_.
2.9.0
==========
* bugfix: Change logging behavior to avoid overflow. `PR302 `_.
* improvement: Lazy load samplers to speed up cold start in lambda. `PR312 `_.
* improvement: Replace slow json file name resolver. `PR 306 `_.
2.8.0
==========
* improvement: feat(sqla-core): Add support for rendering Database Specific queries. `PR291 `_.
* bugfix: Fixing broken instrumentation for sqlalchemy >= 1.4.0. `PR289 `_.
* feature: no op trace id generation. `PR293 `_.
* bugfix: Handle exception when sending entity to Daemon. `PR292 `_.
* bugfix: Fixed serialization issue when cause is a string. `PR284 `_.
* improvement: Publish metric on distribution availability. `PR279 `_.
2.7.0
==========
* improvement: Only run integration tests on master. `PR277 `_.
* improvement: Add distribution channel smoke test. `PR276 `_.
* improvement: Replace jsonpickle with json to serialize entity. `PR275 `_.
* bugfix: Always close segment in teardown_request handler. `PR272 `_.
* improvement: Close segment in only _handle_exception in case of Internal Server Error. `PR271 `_.
* bugfix: Handling condition where Entity.cause is not a dict. `PR267 `_.
* improvement: Add ability to ignore some requests from httplib. `PR263 `_.
* feature: Add support for SQLAlchemy Core. `PR264 `_.
* improvement: Added always() to run clean up workflow. `PR259 `_.
* improvement: Allow configuring different Sampler in Django App. `PR252 `_.
* bugfix: Restore python2 compatibility of EC2 plugin. `PR249 `_.
* bugfix: eb solution stack name. `PR251 `_.
* improvement: Integration Test Workflow. `PR246 `_.
* improvement: Include unicode type for annotation value. `PR235 `_.
* improvement: Run tests against Django 3.1 instead of 1.11. `PR240 `_.
* bugfix: Generalize error check for pymysql error type. `PR239 `_.
* bugfix: SqlAlchemy: Close segment even if error was raised. `PR234 `_.
2.6.0
==========
* bugfix: asyncio.Task.current_task PendingDeprecation fix. `PR217 `_.
* bugfix: Added proper TraceID in dummy segments. `PR223 `_.
* improvement: Add testing for current Django versions. `PR200 `_.
* improvement: IMDSv2 support for EC2 plugin. `PR226 `_.
* improvement: Using instance doc to fetch EC2 metadata. Added 2 additional fields. `PR227 `_.
* improvement: Added StaleBot. `PR228 `_.
2.5.0
==========
* bugfix: Downgrade Coverage to 4.5.4. `PR197 `_.
* bugfix: Unwrap context provided to psycopg2.extensions.quote_ident. `PR198 `_.
* feature: extension support as Bottle plugin. `PR204 `_.
* bugfix: streaming_threshold not None check. `PR205 `_.
* bugfix: Add support for Django 2.0 to 3.0. `PR206 `_.
* bugfix: add puttracesegments to boto whitelist avoid a catch 22. `PR210 `_.
* feature: Add patch support for pymysql. `PR215 `_.
2.4.3
==========
* bugfix: Downstream Http Calls should use hostname rather than full URL as subsegment name. `PR192 `_.
* improvement: Whitelist SageMakerRuntime InvokeEndpoint operation. `PR183 `_.
* bugfix: Fix patching for PynamoDB4 with botocore 1.13. `PR181 `_.
* bugfix: Add X-Ray client with default empty credentials. `PR180 `_.
* improvement: Faster implementation of Wildcard Matching. `PR178 `_.
* bugfix: Make patch compatible with PynamoDB4. `PR177 `_.
* bugfix: Fix unit tests for newer versions of psycopg2. `PR163 `_.
* improvement: Enable tests with python 3.7. `PR157 `_.
2.4.2
==========
* bugfix: Fix exception processing in Django running in Lambda. `PR145 `_.
* bugfix: Poller threads block main thread from exiting bug. `PR144 `_.
2.4.1
==========
* bugfix: Middlewares should create subsegments only when in the Lambda context running under a Lambda environment. `PR139 `_.
2.4.0
==========
* feature: Add ability to enable/disable the SDK. `PR119 `_.
* feature: Add Serverless Framework Support `PR127 `_.
* feature: Bring aiobotocore support back. `PR125 `_.
* bugfix: Fix httplib invalid scheme detection for HTTPS. `PR122 `_.
* bugfix: Max_trace_back = 0 returns full exception stack trace bug fix. `PR123 `_.
* bugfix: Rename incorrect config module name to the correct global name. `PR130 `_.
* bugfix: Correctly remove password component from SQLAlchemy URLs, preventing... `PR132 `_.
2.3.0
==========
* feature: Stream Django ORM SQL queries and add flag to toggle their streaming. `PR111 `_.
* feature: Recursively patch any given module functions with capture. `PR113 `_.
* feature: Add patch support for pg8000 (Pure Python Driver). `PR115 `_.
* improvement: Remove the dependency on Requests. `PR112 `_.
* bugfix: Fix psycop2 register type. `PR95 `_.
2.2.0
=====
* feature: Added context managers on segment/subsegment capture. `PR97 `_.
* feature: Added AWS SNS topic ARN to the default whitelist file. `PR93 `_.
* bugfix: Fixed an issue on `psycopg2` to support all keywords. `PR91 `_.
* bugfix: Fixed an issue on `endSegment` when there is context missing. `ISSUE98 `_.
* bugfix: Fixed the package description rendered on PyPI. `PR101 `_.
* bugfix: Fixed an issue where `patch_all` could patch the same module multiple times. `ISSUE99 `_.
* bugfix: Fixed the `datetime` to `epoch` conversion on Windows OS. `ISSUE103 `_.
* bugfix: Fixed a wrong segment json key where it should be `sampling_rule_name` rather than `rule_name`.
2.1.0
=====
* feature: Added support for `psycopg2`. `PR83 `_.
* feature: Added support for `pynamodb` >= 3.3.1. `PR88 `_.
* improvement: Improved stack trace recording when exception is thrown in decorators. `PR70 `_.
* bugfix: Argument `sampling_req` in LocalSampler `should_trace` method now becomes optional. `PR89 `_.
* bugfix: Fixed a wrong test setup and leftover poller threads in recorder unit test.
2.0.1
=====
* bugfix: Fixed a issue where manually `begin_segment` might break when making sampling decisions. `PR82 `_.
2.0.0
=====
* **Breaking**: The default sampler now launches background tasks to poll sampling rules from X-Ray backend. See the new default sampling strategy in more details here: https://docs.aws.amazon.com/xray/latest/devguide/xray-sdk-python-configuration.html#xray-sdk-python-configuration-sampling.
* **Breaking**: The `should_trace` function in the sampler now takes a dictionary for sampling rule matching.
* **Breaking**: The original sampling modules for local defined rules are moved from `models.sampling` to `models.sampling.local`.
* **Breaking**: The default behavior of `patch_all` changed to selectively patches libraries to avoid double patching. You can use `patch_all(double_patch=True)` to force it to patch ALL supported libraries. See more details on `ISSUE63 `_
* **Breaking**: The latest `botocore` that has new X-Ray service API `GetSamplingRules` and `GetSamplingTargets` are required.
* **Breaking**: Version 2.x doesn't support pynamodb and aiobotocore as it requires botocore >= 1.11.3 which isn’t currently supported by the pynamodb and aiobotocore libraries. Please continue to use version 1.x if you’re using pynamodb or aiobotocore until those haven been updated to use botocore > = 1.11.3.
* feature: Environment variable `AWS_XRAY_DAEMON_ADDRESS` now takes an additional notation in `tcp:127.0.0.1:2000 udp:127.0.0.2:2001` to set TCP and UDP destination separately. By default it assumes a X-Ray daemon listening to both UDP and TCP traffic on `127.0.0.1:2000`.
* feature: Added MongoDB python client support. `PR65 `_.
* bugfix: Support binding connection in sqlalchemy as well as engine. `PR78 `_.
* bugfix: Flask middleware safe request teardown. `ISSUE75 `_.
1.1.2
=====
* bugfix: Fixed an issue on PynamoDB patcher where the capture didn't handle client timeout.
1.1.1
=====
* bugfix: Handle Aiohttp Exceptions as valid responses `PR59 `_.
1.1
===
* feature: Added Sqlalchemy parameterized query capture. `PR34 `_
* bugfix: Allow standalone sqlalchemy integrations without flask_sqlalchemy. `PR53 `_
* bugfix: Give up aiohttp client tracing when there is no open segment and LOG_ERROR is configured. `PR58 `_
* bugfix: Handle missing subsegment when rendering a Django template. `PR54 `_
* Typo fixes on comments and docs.
1.0
===
* Changed development status to `5 - Production/Stable` and removed beta tag.
* feature: Added S3 API parameters to the default whitelist.
* feature: Added new recorder APIs to add annotations/metadata.
* feature: The recorder now adds more runtime and version information to sampled segments.
* feature: Django, Flask and Aiohttp middleware now inject trace header to response headers.
* feature: Added a new API to configure maximum captured stack trace.
* feature: Modularized subsegments streaming logic and now it can be overriden with custom implementation.
* bugfix(**Breaking**): Subsegment `set_user` API is removed since this attribute is not supported by X-Ray back-end.
* bugfix: Fixed an issue where arbitrary fields in trace header being dropped when calling downstream.
* bugfix: Fixed a compatibility issue between botocore and httplib patcher. `ISSUE48 `_.
* bugfix: Fixed a typo in sqlalchemy decorators. `PR50 `_.
* Updated `README` with more usage examples.
0.97
====
* feature: Support aiohttp client tracing for aiohttp 3.x. `PR42 `_.
* feature: Use the official middleware pattern for Aiohttp ext. `PR29 `_.
* bugfix: Aiohttp middleware serialized URL values incorrectly. `PR37 `_
* bugfix: Don't overwrite plugins list on each `.configure` call. `PR38 `_
* bugfix: Do not swallow `return_value` when context is missing and `LOG_ERROR` is set. `PR44 `_
* bugfix: Loose entity name validation. `ISSUE36 `_
* bugfix: Fix PyPI project page being rendered incorrectly. `ISSUE30 `_
0.96
====
* feature: Add support for SQLAlchemy and Flask-SQLAlcemy. `PR14 `_.
* feature: Add support for PynamoDB calls to DynamoDB. `PR13 `_.
* feature: Add support for httplib calls. `PR19 `_.
* feature: Make streaming threshold configurable through public interface. `ISSUE21 `_.
* bugfix: Drop invalid annotation keys and log a warning. `PR22 `_.
* bugfix: Respect `with` statement on cursor objects in dbapi2 patcher. `PR17 `_.
* bugfix: Don't throw error from built in subsegment capture when `LOG_ERROR` is set. `ISSUE4 `_.
0.95
====
* **Breaking**: AWS API parameter whitelist json file is moved to path `aws_xray_sdk/ext/resources/aws_para_whitelist.json` in `PR6 `_.
* Added aiobotocore/aioboto3 support and async function capture. `PR6 `_
* Added logic to removing segment/subsegment name invalid characters. `PR9 `_
* Temporarily disabled tests run on Django2.0. `PR10 `_
* Code cleanup. `PR11 `_
0.94
====
* Added aiohttp support. `PR3 `_
0.93
====
* The X-Ray SDK for Python is now an open source project. You can follow the project and submit issues and pull requests on GitHub: https://github.com/aws/aws-xray-sdk-python
0.92.2
======
* bugfix: Fixed an issue that caused the X-Ray recorder to omit the origin when recording segments with a service plugin. This caused the service's type to not appear on the service map in the X-Ray console.
0.92.1
======
* bugfix: Fixed an issue that caused all calls to Amazon DynamoDB tables to be grouped under a single node in the service map. With this update, each table gets a separate node.
0.92
====
* feature: Add Flask support
* feature: Add dynamic naming on segment name
0.91.1
======
* bugfix: The SDK has been released as a universal wheel
================================================
FILE: CODE_OF_CONDUCT.md
================================================
## Code of Conduct
This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct).
For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact
opensource-codeofconduct@amazon.com with any additional questions or comments.
================================================
FILE: CONTRIBUTING.md
================================================
# Contributing Guidelines
Thank you for your interest in contributing to our project. Whether it's a bug report, new feature, correction, or additional
documentation, we greatly value feedback and contributions from our community.
Please read through this document before submitting any issues or pull requests to ensure we have all the necessary
information to effectively respond to your bug report or contribution.
## Reporting Bugs/Feature Requests
We welcome you to use the GitHub issue tracker to report bugs or suggest features.
When filing an issue, please check [existing open](https://github.com/aws/aws-xray-sdk-python/issues), or [recently closed](https://github.com/aws/aws-xray-sdk-python/issues?utf8=%E2%9C%93&q=is%3Aissue%20is%3Aclosed%20), issues to make sure somebody else hasn't already
reported the issue. Please try to include as much information as you can. Details like these are incredibly useful:
* A reproducible test case or series of steps
* The version of our code being used
* Any modifications you've made relevant to the bug
* Anything unusual about your environment or deployment
## Contributing via Pull Requests
Contributions via pull requests are much appreciated. Before sending us a pull request, please ensure that:
1. You are working against the latest source on the *master* branch.
2. You check existing open, and recently merged, pull requests to make sure someone else hasn't addressed the problem already.
3. You open an issue to discuss any significant work - we would hate for your time to be wasted.
To send us a pull request, please:
1. Fork the repository.
2. Modify the source; please focus on the specific change you are contributing. If you also reformat all the code, it will be hard for us to focus on your change.
3. Ensure local tests pass.
4. Commit to your fork using clear commit messages.
5. Send us a pull request, answering any default questions in the pull request interface.
6. Pay attention to any automated CI failures reported in the pull request, and stay involved in the conversation.
GitHub provides additional document on [forking a repository](https://help.github.com/articles/fork-a-repo/) and
[creating a pull request](https://help.github.com/articles/creating-a-pull-request/).
## Finding contributions to work on
Looking at the existing issues is a great way to find something to contribute on. As our projects, by default, use the default GitHub issue labels ((enhancement/bug/duplicate/help wanted/invalid/question/wontfix), looking at any ['help wanted'](https://github.com/aws/aws-xray-sdk-python/labels/help%20wanted) issues is a great place to start.
## Code of Conduct
This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct).
For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact
opensource-codeofconduct@amazon.com with any additional questions or comments.
## Security issue notifications
If you discover a potential security issue in this project we ask that you notify AWS/Amazon Security via our [vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/). Please do **not** create a public github issue.
## Licensing
See the [LICENSE](https://github.com/aws/aws-xray-sdk-python/blob/master/LICENSE) file for our project's licensing. We will ask you to confirm the licensing of your contribution.
We may ask you to sign a [Contributor License Agreement (CLA)](http://en.wikipedia.org/wiki/Contributor_License_Agreement) for larger changes.
================================================
FILE: LICENSE
================================================
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "{}"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright {yyyy} {name of copyright owner}
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
================================================
FILE: MANIFEST.in
================================================
include aws_xray_sdk/ext/resources/*.json
include aws_xray_sdk/core/sampling/local/*.json
include README.md
include LICENSE
include NOTICE
================================================
FILE: NOTICE
================================================
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
================================================
FILE: README.md
================================================

[](https://codecov.io/gh/aws/aws-xray-sdk-python)
# AWS X-Ray SDK for Python
## :mega: Upcoming Maintenance Mode on February 25, 2026
[The AWS X-Ray SDKs will enter maintenance mode on **`February 25, 2026`**][xray-sdk-daemon-timeline]. During maintenance mode, the X-Ray SDKs and Daemon will only receive critical bug fixes and security updates, and will not be updated to support new features.
We recommend that you migrate to [AWS Distro for OpenTelemetry (ADOT) or OpenTelemetry Instrumentation][xray-otel-migration-docs] to generate traces (through manual or zero-code instrumentation) from your application and send them to AWS X-Ray. OpenTelemetry is the industry-wide standard for tracing instrumentation and observability. It has a large open-source community for support and provides more instrumentations and updates. By adopting an OpenTelemetry solution, developers can leverage the latest services and innovations from AWS CloudWatch.
[xray-otel-migration-docs]: https://docs.aws.amazon.com/xray/latest/devguide/xray-sdk-migration.html
[xray-sdk-daemon-timeline]: https://docs.aws.amazon.com/xray/latest/devguide/xray-daemon-eos.html
-------------------------------------
### OpenTelemetry Python with AWS X-Ray
AWS X-Ray supports using OpenTelemetry Python and the AWS Distro for OpenTelemetry (ADOT) Collector to instrument your application and send trace data to X-Ray. The OpenTelemetry SDKs are an industry-wide standard for tracing instrumentation. They provide more instrumentations and have a larger community for support, but may not have complete feature parity with the X-Ray SDKs. See [choosing between the ADOT and X-Ray SDKs](https://docs.aws.amazon.com/xray/latest/devguide/xray-instrumenting-your-app.html#xray-instrumenting-choosing) for more help with choosing between the two.
If you want additional features when tracing your Python applications, please [open an issue on the OpenTelemetry Python Instrumentation repository](https://github.com/open-telemetry/opentelemetry-python-contrib/issues/new?labels=feature-request&template=feature_request.md&title=X-Ray%20Compatible%20Feature%20Request).
### Python Versions End-of-Support Notice
AWS X-Ray SDK for Python versions `>2.11.0` has dropped support for Python 2.7, 3.4, 3.5, and 3.6.
-------------------------------------

## Installing
The AWS X-Ray SDK for Python is compatible with Python 3.7, 3.8, 3.9, 3.10, and 3.11.
Install the SDK using the following command (the SDK's non-testing dependencies will be installed).
```
pip install aws-xray-sdk
```
To install the SDK's testing dependencies, use the following command.
```
pip install tox
```
## Getting Help
Use the following community resources for getting help with the SDK. We use the GitHub
issues for tracking bugs and feature requests.
* Ask a question in the [AWS X-Ray Forum](https://forums.aws.amazon.com/forum.jspa?forumID=241&start=0).
* Open a support ticket with [AWS Support](http://docs.aws.amazon.com/awssupport/latest/user/getting-started.html).
* If you think you may have found a bug, open an [issue](https://github.com/aws/aws-xray-sdk-python/issues/new).
## Opening Issues
If you encounter a bug with the AWS X-Ray SDK for Python, we want to hear about
it. Before opening a new issue, search the [existing issues](https://github.com/aws/aws-xray-sdk-python/issues)
to see if others are also experiencing the issue. Include the version of the AWS X-Ray
SDK for Python, Python language, and botocore/boto3 if applicable. In addition,
include the repro case when appropriate.
The GitHub issues are intended for bug reports and feature requests. For help and
questions about using the AWS SDK for Python, use the resources listed
in the [Getting Help](https://github.com/aws/aws-xray-sdk-python#getting-help) section. Keeping the list of open issues lean helps us respond in a timely manner.
## Documentation
The [developer guide](https://docs.aws.amazon.com/xray/latest/devguide) provides in-depth
guidance about using the AWS X-Ray service.
The [API Reference](http://docs.aws.amazon.com/xray-sdk-for-python/latest/reference/)
provides guidance for using the SDK and module-level documentation.
## Quick Start
### Configuration
```python
from aws_xray_sdk.core import xray_recorder
xray_recorder.configure(
sampling=False,
context_missing='LOG_ERROR',
plugins=('EC2Plugin', 'ECSPlugin', 'ElasticBeanstalkPlugin'),
daemon_address='127.0.0.1:3000',
dynamic_naming='*mysite.com*'
)
```
### Start a custom segment/subsegment
Using context managers for implicit exceptions recording:
```python
from aws_xray_sdk.core import xray_recorder
with xray_recorder.in_segment('segment_name') as segment:
# Add metadata or annotation here if necessary
segment.put_metadata('key', dict, 'namespace')
with xray_recorder.in_subsegment('subsegment_name') as subsegment:
subsegment.put_annotation('key', 'value')
# Do something here
with xray_recorder.in_subsegment('subsegment2') as subsegment:
subsegment.put_annotation('key2', 'value2')
# Do something else
```
async versions of context managers:
```python
from aws_xray_sdk.core import xray_recorder
async with xray_recorder.in_segment_async('segment_name') as segment:
# Add metadata or annotation here if necessary
segment.put_metadata('key', dict, 'namespace')
async with xray_recorder.in_subsegment_async('subsegment_name') as subsegment:
subsegment.put_annotation('key', 'value')
# Do something here
async with xray_recorder.in_subsegment_async('subsegment2') as subsegment:
subsegment.put_annotation('key2', 'value2')
# Do something else
```
Default begin/end functions:
```python
from aws_xray_sdk.core import xray_recorder
# Start a segment
segment = xray_recorder.begin_segment('segment_name')
# Start a subsegment
subsegment = xray_recorder.begin_subsegment('subsegment_name')
# Add metadata or annotation here if necessary
segment.put_metadata('key', dict, 'namespace')
subsegment.put_annotation('key', 'value')
xray_recorder.end_subsegment()
# Close the segment
xray_recorder.end_segment()
```
### Oversampling Mitigation
To modify the sampling decision at the subsegment level, subsegments that inherit the decision of their direct parent (segment or subsegment) can be created using `xray_recorder.begin_subsegment()` and unsampled subsegments can be created using
`xray_recorder.begin_subsegment_without_sampling()`.
The code snippet below demonstrates creating a sampled or unsampled subsegment based on the sampling decision of each SQS message processed by Lambda.
```python
from aws_xray_sdk.core import xray_recorder
from aws_xray_sdk.core.models.subsegment import Subsegment
from aws_xray_sdk.core.utils.sqs_message_helper import SqsMessageHelper
def lambda_handler(event, context):
for message in event['Records']:
if SqsMessageHelper.isSampled(message):
subsegment = xray_recorder.begin_subsegment('sampled_subsegment')
print('sampled - processing SQS message')
else:
subsegment = xray_recorder.begin_subsegment_without_sampling('unsampled_subsegment')
print('unsampled - processing SQS message')
xray_recorder.end_subsegment()
```
The code snippet below demonstrates wrapping a downstream AWS SDK request with an unsampled subsegment.
```python
from aws_xray_sdk.core import xray_recorder, patch_all
import boto3
patch_all()
def lambda_handler(event, context):
subsegment = xray_recorder.begin_subsegment_without_sampling('unsampled_subsegment')
client = boto3.client('sqs')
print(client.list_queues())
xray_recorder.end_subsegment()
```
### Capture
As a decorator:
```python
from aws_xray_sdk.core import xray_recorder
@xray_recorder.capture('subsegment_name')
def myfunc():
# Do something here
myfunc()
```
or as a context manager:
```python
from aws_xray_sdk.core import xray_recorder
with xray_recorder.capture('subsegment_name') as subsegment:
# Do something here
subsegment.put_annotation('mykey', val)
# Do something more
```
Async capture as decorator:
```python
from aws_xray_sdk.core import xray_recorder
@xray_recorder.capture_async('subsegment_name')
async def myfunc():
# Do something here
async def main():
await myfunc()
```
or as context manager:
```python
from aws_xray_sdk.core import xray_recorder
async with xray_recorder.capture_async('subsegment_name') as subsegment:
# Do something here
subsegment.put_annotation('mykey', val)
# Do something more
```
### Adding annotations/metadata using recorder
```python
from aws_xray_sdk.core import xray_recorder
# Start a segment if no segment exist
segment1 = xray_recorder.begin_segment('segment_name')
# This will add the key value pair to segment1 as it is active
xray_recorder.put_annotation('key', 'value')
# Start a subsegment so it becomes the active trace entity
subsegment1 = xray_recorder.begin_subsegment('subsegment_name')
# This will add the key value pair to subsegment1 as it is active
xray_recorder.put_metadata('key', 'value')
if xray_recorder.is_sampled():
# some expensitve annotations/metadata generation code here
val = compute_annotation_val()
metadata = compute_metadata_body()
xray_recorder.put_annotation('mykey', val)
xray_recorder.put_metadata('mykey', metadata)
```
### Generate NoOp Trace and Entity Id
X-Ray Python SDK will by default generate no-op trace and entity id for unsampled requests and secure random trace and entity id for sampled requests. If customer wants to enable generating secure random trace and entity id for all the (sampled/unsampled) requests (this is applicable for trace id injection into logs use case) then they should set the `AWS_XRAY_NOOP_ID` environment variable as False.
### Disabling X-Ray
Often times, it may be useful to be able to disable X-Ray for specific use cases, whether to stop X-Ray from sending traces at any moment, or to test code functionality that originally depended on X-Ray instrumented packages to begin segments prior to the code call. For example, if your application relied on an XRayMiddleware to instrument incoming web requests, and you have a method which begins subsegments based on the segment generated by that middleware, it would be useful to be able to disable X-Ray for your unit tests so that `SegmentNotFound` exceptions are not thrown when you need to test your method.
There are two ways to disable X-Ray, one is through environment variables, and the other is through the SDKConfig module.
**Disabling through the environment variable:**
Prior to running your application, make sure to have the environment variable `AWS_XRAY_SDK_ENABLED` set to `false`.
**Disabling through the SDKConfig module:**
```
from aws_xray_sdk import global_sdk_config
global_sdk_config.set_sdk_enabled(False)
```
**Important Notes:**
* Environment Variables always take precedence over the SDKConfig module when disabling/enabling. If your environment variable is set to `false` while your code calls `global_sdk_config.set_sdk_enabled(True)`, X-Ray will still be disabled.
* If you need to re-enable X-Ray again during runtime and acknowledge disabling/enabling through the SDKConfig module, you may run the following in your application:
```
import os
from aws_xray_sdk import global_sdk_config
del os.environ['AWS_XRAY_SDK_ENABLED']
global_sdk_config.set_sdk_enabled(True)
```
### Trace AWS Lambda functions
```python
from aws_xray_sdk.core import xray_recorder
def lambda_handler(event, context):
# ... some code
subsegment = xray_recorder.begin_subsegment('subsegment_name')
# Code to record
# Add metadata or annotation here, if necessary
subsegment.put_metadata('key', dict, 'namespace')
subsegment.put_annotation('key', 'value')
xray_recorder.end_subsegment()
# ... some other code
```
### Trace ThreadPoolExecutor
```python
import concurrent.futures
import requests
from aws_xray_sdk.core import xray_recorder
from aws_xray_sdk.core import patch
patch(('requests',))
URLS = ['http://www.amazon.com/',
'http://aws.amazon.com/',
'http://example.com/',
'http://www.bilibili.com/',
'http://invalid-domain.com/']
def load_url(url, trace_entity):
# Set the parent X-Ray entity for the worker thread.
xray_recorder.set_trace_entity(trace_entity)
# Subsegment captured from the following HTTP GET will be
# a child of parent entity passed from the main thread.
resp = requests.get(url)
# prevent thread pollution
xray_recorder.clear_trace_entities()
return resp
# Get the current active segment or subsegment from the main thread.
current_entity = xray_recorder.get_trace_entity()
with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor:
# Pass the active entity from main thread to worker threads.
future_to_url = {executor.submit(load_url, url, current_entity): url for url in URLS}
for future in concurrent.futures.as_completed(future_to_url):
url = future_to_url[future]
try:
data = future.result()
except Exception:
pass
```
### Trace SQL queries
By default, if no other value is provided to `.configure()`, SQL trace streaming is enabled
for all the supported DB engines. Those currently are:
- Any engine attached to the Django ORM.
- Any engine attached to SQLAlchemy.
The behaviour can be toggled by sending the appropriate `stream_sql` value, for example:
```python
from aws_xray_sdk.core import xray_recorder
xray_recorder.configure(service='fallback_name', stream_sql=True)
```
### Patch third-party libraries
```python
from aws_xray_sdk.core import patch
libs_to_patch = ('boto3', 'mysql', 'requests')
patch(libs_to_patch)
```
#### Automatic module patching
Full modules in the local codebase can be recursively patched by providing the module references
to the patch function.
```python
from aws_xray_sdk.core import patch
libs_to_patch = ('boto3', 'requests', 'local.module.ref', 'other_module')
patch(libs_to_patch)
```
An `xray_recorder.capture()` decorator will be applied to all functions and class methods in the
given module and all the modules inside them recursively. Some files/modules can be excluded by
providing to the `patch` function a regex that matches them.
```python
from aws_xray_sdk.core import patch
libs_to_patch = ('boto3', 'requests', 'local.module.ref', 'other_module')
ignore = ('local.module.ref.some_file', 'other_module.some_module\.*')
patch(libs_to_patch, ignore_module_patterns=ignore)
```
### Django
#### Add Django middleware
In django settings.py, use the following.
```python
INSTALLED_APPS = [
# ... other apps
'aws_xray_sdk.ext.django',
]
MIDDLEWARE = [
'aws_xray_sdk.ext.django.middleware.XRayMiddleware',
# ... other middlewares
]
```
You can configure the X-Ray recorder in a Django app under the ‘XRAY_RECORDER’ namespace. For a minimal configuration, the 'AWS_XRAY_TRACING_NAME' is required unless it is specified in an environment variable.
```
XRAY_RECORDER = {
'AWS_XRAY_TRACING_NAME': 'My application', # Required - the segment name for segments generated from incoming requests
}
```
For more information about configuring Django with X-Ray read more about it in the [API reference](https://docs.aws.amazon.com/xray-sdk-for-python/latest/reference/frameworks.html)
#### SQL tracing
If Django's ORM is patched - either using the `AUTO_INSTRUMENT = True` in your settings file
or explicitly calling `patch_db()` - the SQL query trace streaming can then be enabled or
disabled updating the `STREAM_SQL` variable in your settings file. It is enabled by default.
#### Automatic patching
The automatic module patching can also be configured through Django settings.
```python
XRAY_RECORDER = {
'PATCH_MODULES': [
'boto3',
'requests',
'local.module.ref',
'other_module',
],
'IGNORE_MODULE_PATTERNS': [
'local.module.ref.some_file',
'other_module.some_module\.*',
],
...
}
```
If `AUTO_PATCH_PARENT_SEGMENT_NAME` is also specified, then a segment parent will be created
with the supplied name, wrapping the automatic patching so that it captures any dangling
subsegments created on the import patching.
### Django in Lambda
X-Ray can't search on http annotations in subsegments. To enable searching the middleware adds the http values as annotations
This allows searching in the X-Ray console like so
This is configurable in settings with `URLS_AS_ANNOTATION` that has 3 valid values
`LAMBDA` - the default, which uses URLs as annotations by default if running in a lambda context
`ALL` - do this for every request (useful if running in a mixed lambda/other deployment)
`NONE` - don't do this for any (avoiding hitting the 50 annotation limit)
```
annotation.url BEGINSWITH "https://your.url.com/here"
```
### Add Flask middleware
```python
from aws_xray_sdk.core import xray_recorder
from aws_xray_sdk.ext.flask.middleware import XRayMiddleware
app = Flask(__name__)
xray_recorder.configure(service='fallback_name', dynamic_naming='*mysite.com*')
XRayMiddleware(app, xray_recorder)
```
### Add Bottle middleware(plugin)
```python
from aws_xray_sdk.core import xray_recorder
from aws_xray_sdk.ext.bottle.middleware import XRayMiddleware
app = Bottle()
xray_recorder.configure(service='fallback_name', dynamic_naming='*mysite.com*')
app.install(XRayMiddleware(xray_recorder))
```
### Serverless Support for Flask & Django & Bottle Using X-Ray
Serverless is an application model that enables you to shift more of your operational responsibilities to AWS. As a result, you can focus only on your applications and services, instead of the infrastructure management tasks such as server provisioning, patching, operating system maintenance, and capacity provisioning. With serverless, you can deploy your web application to [AWS Lambda](https://aws.amazon.com/lambda/) and have customers interact with it through a Lambda-invoking endpoint, such as [Amazon API Gateway](https://aws.amazon.com/api-gateway/).
X-Ray supports the Serverless model out of the box and requires no extra configuration. The middlewares in Lambda generate `Subsegments` instead of `Segments` when an endpoint is reached. This is because `Segments` cannot be generated inside the Lambda function, but it is generated automatically by the Lambda container. Therefore, when using the middlewares with this model, it is important to make sure that your methods only generate `Subsegments`.
The following guide shows an example of setting up a Serverless application that utilizes API Gateway and Lambda:
[Instrumenting Web Frameworks in a Serverless Environment](https://docs.aws.amazon.com/xray/latest/devguide/xray-sdk-python-serverless.html)
### Working with aiohttp
Adding aiohttp middleware. Support aiohttp >= 2.3.
```python
from aiohttp import web
from aws_xray_sdk.ext.aiohttp.middleware import middleware
from aws_xray_sdk.core import xray_recorder
from aws_xray_sdk.core.async_context import AsyncContext
xray_recorder.configure(service='fallback_name', context=AsyncContext())
app = web.Application(middlewares=[middleware])
app.router.add_get("/", handler)
web.run_app(app)
```
Tracing aiohttp client. Support aiohttp >=3.
```python
from aws_xray_sdk.ext.aiohttp.client import aws_xray_trace_config
async def foo():
trace_config = aws_xray_trace_config()
async with ClientSession(loop=loop, trace_configs=[trace_config]) as session:
async with session.get(url) as resp
await resp.read()
```
### Use SQLAlchemy ORM
The SQLAlchemy integration requires you to override the Session and Query Classes for SQL Alchemy
SQLAlchemy integration uses subsegments so you need to have a segment started before you make a query.
```python
from aws_xray_sdk.core import xray_recorder
from aws_xray_sdk.ext.sqlalchemy.query import XRaySessionMaker
xray_recorder.begin_segment('SQLAlchemyTest')
Session = XRaySessionMaker(bind=engine)
session = Session()
xray_recorder.end_segment()
app = Flask(__name__)
xray_recorder.configure(service='fallback_name', dynamic_naming='*mysite.com*')
XRayMiddleware(app, xray_recorder)
```
### Add Flask-SQLAlchemy
```python
from aws_xray_sdk.core import xray_recorder
from aws_xray_sdk.ext.flask.middleware import XRayMiddleware
from aws_xray_sdk.ext.flask_sqlalchemy.query import XRayFlaskSqlAlchemy
app = Flask(__name__)
app.config["SQLALCHEMY_DATABASE_URI"] = "sqlite:///:memory:"
XRayMiddleware(app, xray_recorder)
db = XRayFlaskSqlAlchemy(app)
```
### Ignoring httplib requests
If you want to ignore certain httplib requests you can do so based on the hostname or URL that is being requsted. The hostname is matched using the Python [fnmatch library](https://docs.python.org/3/library/fnmatch.html) which does Unix glob style matching.
```python
from aws_xray_sdk.ext.httplib import add_ignored as xray_add_ignored
# ignore requests to test.myapp.com
xray_add_ignored(hostname='test.myapp.com')
# ignore requests to a subdomain of myapp.com with a glob pattern
xray_add_ignored(hostname='*.myapp.com')
# ignore requests to /test-url and /other-test-url
xray_add_ignored(urls=['/test-path', '/other-test-path'])
# ignore requests to myapp.com for /test-url
xray_add_ignored(hostname='myapp.com', urls=['/test-url'])
```
If you use a subclass of httplib to make your requests, you can also filter on the class name that initiates the request. This must use the complete package name to do the match.
```python
from aws_xray_sdk.ext.httplib import add_ignored as xray_add_ignored
# ignore all requests made by botocore
xray_add_ignored(subclass='botocore.awsrequest.AWSHTTPConnection')
```
## License
The AWS X-Ray SDK for Python is licensed under the Apache 2.0 License. See LICENSE and NOTICE.txt for more information.
================================================
FILE: __init__.py
================================================
================================================
FILE: aws_xray_sdk/__init__.py
================================================
from .sdk_config import SDKConfig
global_sdk_config = SDKConfig()
================================================
FILE: aws_xray_sdk/core/__init__.py
================================================
from .async_recorder import AsyncAWSXRayRecorder
from .patcher import patch, patch_all
from .recorder import AWSXRayRecorder
xray_recorder = AsyncAWSXRayRecorder()
__all__ = [
'patch',
'patch_all',
'xray_recorder',
'AWSXRayRecorder',
]
================================================
FILE: aws_xray_sdk/core/async_context.py
================================================
import asyncio
import copy
from .context import Context as _Context
class AsyncContext(_Context):
"""
Async Context for storing segments.
Inherits nearly everything from the main Context class.
Replaces threading.local with a task based local storage class,
Also overrides clear_trace_entities
"""
def __init__(self, *args, loop=None, use_task_factory=True, **kwargs):
super().__init__(*args, **kwargs)
self._loop = loop
if loop is None:
self._loop = asyncio.get_event_loop()
if use_task_factory:
self._loop.set_task_factory(task_factory)
self._local = TaskLocalStorage(loop=loop)
def clear_trace_entities(self):
"""
Clear all trace_entities stored in the task local context.
"""
if self._local is not None:
self._local.clear()
class TaskLocalStorage:
"""
Simple task local storage
"""
def __init__(self, loop=None):
if loop is None:
loop = asyncio.get_event_loop()
self._loop = loop
def __setattr__(self, name, value):
if name in ('_loop',):
# Set normal attributes
object.__setattr__(self, name, value)
else:
# Set task local attributes
task = asyncio.current_task(loop=self._loop)
if task is None:
return None
if not hasattr(task, 'context'):
task.context = {}
task.context[name] = value
def __getattribute__(self, item):
if item in ('_loop', 'clear'):
# Return references to local objects
return object.__getattribute__(self, item)
task = asyncio.current_task(loop=self._loop)
if task is None:
return None
if hasattr(task, 'context') and item in task.context:
return task.context[item]
raise AttributeError('Task context does not have attribute {0}'.format(item))
def clear(self):
# If were in a task, clear the context dictionary
task = asyncio.current_task(loop=self._loop)
if task is not None and hasattr(task, 'context'):
task.context.clear()
def task_factory(loop, coro):
"""
Task factory function
Fuction closely mirrors the logic inside of
asyncio.BaseEventLoop.create_task. Then if there is a current
task and the current task has a context then share that context
with the new task
"""
task = asyncio.Task(coro, loop=loop)
if task._source_traceback: # flake8: noqa
del task._source_traceback[-1] # flake8: noqa
# Share context with new task if possible
current_task = asyncio.current_task(loop=loop)
if current_task is not None and hasattr(current_task, 'context'):
if current_task.context.get('entities'):
# NOTE: (enowell) Because the `AWSXRayRecorder`'s `Context` decides
# the parent by looking at its `_local.entities`, we must copy the entities
# for concurrent subsegments. Otherwise, the subsegments would be
# modifying the same `entities` list and sugsegments would take other
# subsegments as parents instead of the original `segment`.
#
# See more: https://github.com/aws/aws-xray-sdk-python/blob/0f13101e4dba7b5c735371cb922f727b1d9f46d8/aws_xray_sdk/core/context.py#L90-L101
new_context = copy.copy(current_task.context)
new_context['entities'] = [item for item in current_task.context['entities']]
else:
new_context = current_task.context
setattr(task, 'context', new_context)
return task
================================================
FILE: aws_xray_sdk/core/async_recorder.py
================================================
import time
from aws_xray_sdk.core.recorder import AWSXRayRecorder
from aws_xray_sdk.core.utils import stacktrace
from aws_xray_sdk.core.models.subsegment import SubsegmentContextManager, is_already_recording, subsegment_decorator
from aws_xray_sdk.core.models.segment import SegmentContextManager
class AsyncSegmentContextManager(SegmentContextManager):
async def __aenter__(self):
return self.__enter__()
async def __aexit__(self, exc_type, exc_val, exc_tb):
return self.__exit__(exc_type, exc_val, exc_tb)
class AsyncSubsegmentContextManager(SubsegmentContextManager):
@subsegment_decorator
async def __call__(self, wrapped, instance, args, kwargs):
if is_already_recording(wrapped):
# The wrapped function is already decorated, the subsegment will be created later,
# just return the result
return await wrapped(*args, **kwargs)
func_name = self.name
if not func_name:
func_name = wrapped.__name__
return await self.recorder.record_subsegment_async(
wrapped, instance, args, kwargs,
name=func_name,
namespace='local',
meta_processor=None,
)
async def __aenter__(self):
return self.__enter__()
async def __aexit__(self, exc_type, exc_val, exc_tb):
return self.__exit__(exc_type, exc_val, exc_tb)
class AsyncAWSXRayRecorder(AWSXRayRecorder):
def capture_async(self, name=None):
"""
A decorator that records enclosed function in a subsegment.
It only works with asynchronous functions.
params str name: The name of the subsegment. If not specified
the function name will be used.
"""
return self.in_subsegment_async(name=name)
def in_segment_async(self, name=None, **segment_kwargs):
"""
Return a segment async context manager.
:param str name: the name of the segment
:param dict segment_kwargs: remaining arguments passed directly to `begin_segment`
"""
return AsyncSegmentContextManager(self, name=name, **segment_kwargs)
def in_subsegment_async(self, name=None, **subsegment_kwargs):
"""
Return a subsegment async context manager.
:param str name: the name of the segment
:param dict segment_kwargs: remaining arguments passed directly to `begin_segment`
"""
return AsyncSubsegmentContextManager(self, name=name, **subsegment_kwargs)
async def record_subsegment_async(self, wrapped, instance, args, kwargs, name,
namespace, meta_processor):
subsegment = self.begin_subsegment(name, namespace)
exception = None
stack = None
return_value = None
try:
return_value = await wrapped(*args, **kwargs)
return return_value
except Exception as e:
exception = e
stack = stacktrace.get_stacktrace(limit=self._max_trace_back)
raise
finally:
# No-op if subsegment is `None` due to `LOG_ERROR`.
if subsegment is not None:
end_time = time.time()
if callable(meta_processor):
meta_processor(
wrapped=wrapped,
instance=instance,
args=args,
kwargs=kwargs,
return_value=return_value,
exception=exception,
subsegment=subsegment,
stack=stack,
)
elif exception:
if subsegment:
subsegment.add_exception(exception, stack)
self.end_subsegment(end_time)
================================================
FILE: aws_xray_sdk/core/context.py
================================================
import threading
import logging
import os
from .exceptions.exceptions import SegmentNotFoundException
from .models.dummy_entities import DummySegment
from aws_xray_sdk import global_sdk_config
log = logging.getLogger(__name__)
MISSING_SEGMENT_MSG = 'cannot find the current segment/subsegment, please make sure you have a segment open'
SUPPORTED_CONTEXT_MISSING = ('RUNTIME_ERROR', 'LOG_ERROR', 'IGNORE_ERROR')
CXT_MISSING_STRATEGY_KEY = 'AWS_XRAY_CONTEXT_MISSING'
class Context:
"""
The context storage class to store trace entities(segments/subsegments).
The default implementation uses threadlocal to store these entities.
It also provides interfaces to manually inject trace entities which will
replace the current stored entities and to clean up the storage.
For any data access or data mutation, if there is no active segment present
it will use user-defined behavior to handle such case. By default it throws
an runtime error.
This data structure is thread-safe.
"""
def __init__(self, context_missing='LOG_ERROR'):
self._local = threading.local()
strategy = os.getenv(CXT_MISSING_STRATEGY_KEY, context_missing)
self._context_missing = strategy
def put_segment(self, segment):
"""
Store the segment created by ``xray_recorder`` to the context.
It overrides the current segment if there is already one.
"""
setattr(self._local, 'entities', [segment])
def end_segment(self, end_time=None):
"""
End the current active segment.
:param float end_time: epoch in seconds. If not specified the current
system time will be used.
"""
entity = self.get_trace_entity()
if not entity:
log.warning("No segment to end")
return
if self._is_subsegment(entity):
entity.parent_segment.close(end_time)
else:
entity.close(end_time)
def put_subsegment(self, subsegment):
"""
Store the subsegment created by ``xray_recorder`` to the context.
If you put a new subsegment while there is already an open subsegment,
the new subsegment becomes the child of the existing subsegment.
"""
entity = self.get_trace_entity()
if not entity:
log.warning("Active segment or subsegment not found. Discarded %s." % subsegment.name)
return
entity.add_subsegment(subsegment)
self._local.entities.append(subsegment)
def end_subsegment(self, end_time=None):
"""
End the current active segment. Return False if there is no
subsegment to end.
:param float end_time: epoch in seconds. If not specified the current
system time will be used.
"""
entity = self.get_trace_entity()
if self._is_subsegment(entity):
entity.close(end_time)
self._local.entities.pop()
return True
elif isinstance(entity, DummySegment):
return False
else:
log.warning("No subsegment to end.")
return False
def get_trace_entity(self):
"""
Return the current trace entity(segment/subsegment). If there is none,
it behaves based on pre-defined ``context_missing`` strategy.
If the SDK is disabled, returns a DummySegment
"""
if not getattr(self._local, 'entities', None):
if not global_sdk_config.sdk_enabled():
return DummySegment()
return self.handle_context_missing()
return self._local.entities[-1]
def set_trace_entity(self, trace_entity):
"""
Store the input trace_entity to local context. It will overwrite all
existing ones if there is any.
"""
setattr(self._local, 'entities', [trace_entity])
def clear_trace_entities(self):
"""
clear all trace_entities stored in the local context.
In case of using threadlocal to store trace entites, it will
clean up all trace entities created by the current thread.
"""
self._local.__dict__.clear()
def handle_context_missing(self):
"""
Called whenever there is no trace entity to access or mutate.
"""
if self.context_missing == 'RUNTIME_ERROR':
raise SegmentNotFoundException(MISSING_SEGMENT_MSG)
elif self.context_missing == 'LOG_ERROR':
log.error(MISSING_SEGMENT_MSG)
def _is_subsegment(self, entity):
return hasattr(entity, 'type') and entity.type == 'subsegment'
@property
def context_missing(self):
return self._context_missing
@context_missing.setter
def context_missing(self, value):
if value not in SUPPORTED_CONTEXT_MISSING:
log.warning('specified context_missing not supported, using default.')
return
self._context_missing = value
================================================
FILE: aws_xray_sdk/core/daemon_config.py
================================================
import os
from .exceptions.exceptions import InvalidDaemonAddressException
DAEMON_ADDRESS_KEY = "AWS_XRAY_DAEMON_ADDRESS"
DEFAULT_ADDRESS = '127.0.0.1:2000'
class DaemonConfig:
"""The class that stores X-Ray daemon configuration about
the ip address and port for UDP and TCP port. It gets the address
string from ``AWS_TRACING_DAEMON_ADDRESS`` and then from recorder's
configuration for ``daemon_address``.
A notation of '127.0.0.1:2000' or 'tcp:127.0.0.1:2000 udp:127.0.0.2:2001'
are both acceptable. The former one means UDP and TCP are running at
the same address.
By default it assumes a X-Ray daemon running at 127.0.0.1:2000
listening to both UDP and TCP traffic.
"""
def __init__(self, daemon_address=DEFAULT_ADDRESS):
if daemon_address is None:
daemon_address = DEFAULT_ADDRESS
val = os.getenv(DAEMON_ADDRESS_KEY, daemon_address)
configs = val.split(' ')
if len(configs) == 1:
self._parse_single_form(configs[0])
elif len(configs) == 2:
self._parse_double_form(configs[0], configs[1], val)
else:
raise InvalidDaemonAddressException('Invalid daemon address %s specified.' % val)
def _parse_single_form(self, val):
try:
configs = val.split(':')
self._udp_ip = configs[0]
self._udp_port = int(configs[1])
self._tcp_ip = configs[0]
self._tcp_port = int(configs[1])
except Exception:
raise InvalidDaemonAddressException('Invalid daemon address %s specified.' % val)
def _parse_double_form(self, val1, val2, origin):
try:
configs1 = val1.split(':')
configs2 = val2.split(':')
mapping = {
configs1[0]: configs1,
configs2[0]: configs2,
}
tcp_info = mapping.get('tcp')
udp_info = mapping.get('udp')
self._tcp_ip = tcp_info[1]
self._tcp_port = int(tcp_info[2])
self._udp_ip = udp_info[1]
self._udp_port = int(udp_info[2])
except Exception:
raise InvalidDaemonAddressException('Invalid daemon address %s specified.' % origin)
@property
def udp_ip(self):
return self._udp_ip
@property
def udp_port(self):
return self._udp_port
@property
def tcp_ip(self):
return self._tcp_ip
@property
def tcp_port(self):
return self._tcp_port
================================================
FILE: aws_xray_sdk/core/emitters/__init__.py
================================================
================================================
FILE: aws_xray_sdk/core/emitters/udp_emitter.py
================================================
import logging
import socket
from aws_xray_sdk.core.daemon_config import DaemonConfig
from ..exceptions.exceptions import InvalidDaemonAddressException
log = logging.getLogger(__name__)
PROTOCOL_HEADER = "{\"format\":\"json\",\"version\":1}"
PROTOCOL_DELIMITER = '\n'
DEFAULT_DAEMON_ADDRESS = '127.0.0.1:2000'
class UDPEmitter:
"""
The default emitter the X-Ray recorder uses to send segments/subsegments
to the X-Ray daemon over UDP using a non-blocking socket. If there is an
exception on the actual data transfer between the socket and the daemon,
it logs the exception and continue.
"""
def __init__(self, daemon_address=DEFAULT_DAEMON_ADDRESS):
self._socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self._socket.setblocking(0)
self.set_daemon_address(daemon_address)
def send_entity(self, entity):
"""
Serializes a segment/subsegment and sends it to the X-Ray daemon
over UDP. By default it doesn't retry on failures.
:param entity: a trace entity to send to the X-Ray daemon
"""
try:
message = "%s%s%s" % (PROTOCOL_HEADER,
PROTOCOL_DELIMITER,
entity.serialize())
log.debug("sending: %s to %s:%s." % (message, self._ip, self._port))
self._send_data(message)
except Exception:
log.exception("Failed to send entity to Daemon.")
def set_daemon_address(self, address):
"""
Set up UDP ip and port from the raw daemon address
string using ``DaemonConfig`` class utlities.
"""
if address:
daemon_config = DaemonConfig(address)
self._ip, self._port = daemon_config.udp_ip, daemon_config.udp_port
@property
def ip(self):
return self._ip
@property
def port(self):
return self._port
def _send_data(self, data):
self._socket.sendto(data.encode('utf-8'), (self._ip, self._port))
def _parse_address(self, daemon_address):
try:
val = daemon_address.split(':')
return val[0], int(val[1])
except Exception:
raise InvalidDaemonAddressException('Invalid daemon address %s specified.' % daemon_address)
================================================
FILE: aws_xray_sdk/core/exceptions/__init__.py
================================================
================================================
FILE: aws_xray_sdk/core/exceptions/exceptions.py
================================================
class InvalidSamplingManifestError(Exception):
pass
class SegmentNotFoundException(Exception):
pass
class InvalidDaemonAddressException(Exception):
pass
class SegmentNameMissingException(Exception):
pass
class SubsegmentNameMissingException(Exception):
pass
class FacadeSegmentMutationException(Exception):
pass
class MissingPluginNames(Exception):
pass
class AlreadyEndedException(Exception):
pass
================================================
FILE: aws_xray_sdk/core/lambda_launcher.py
================================================
import os
import logging
import threading
from aws_xray_sdk import global_sdk_config
from .models.dummy_entities import DummySegment
from .models.facade_segment import FacadeSegment
from .models.trace_header import TraceHeader
from .context import Context
log = logging.getLogger(__name__)
LAMBDA_TRACE_HEADER_KEY = '_X_AMZN_TRACE_ID'
LAMBDA_TASK_ROOT_KEY = 'LAMBDA_TASK_ROOT'
TOUCH_FILE_DIR = '/tmp/.aws-xray/'
TOUCH_FILE_PATH = '/tmp/.aws-xray/initialized'
def check_in_lambda():
"""
Return None if SDK is not loaded in AWS Lambda worker.
Otherwise drop a touch file and return a lambda context.
"""
if not os.getenv(LAMBDA_TASK_ROOT_KEY):
return None
try:
os.mkdir(TOUCH_FILE_DIR)
except OSError:
log.debug('directory %s already exists', TOUCH_FILE_DIR)
try:
f = open(TOUCH_FILE_PATH, 'w+')
f.close()
# utime force second parameter in python2.7
os.utime(TOUCH_FILE_PATH, None)
except (IOError, OSError):
log.warning("Unable to write to %s. Failed to signal SDK initialization." % TOUCH_FILE_PATH)
return LambdaContext()
class LambdaContext(Context):
"""
Lambda service will generate a segment for each function invocation which
cannot be mutated. The context doesn't keep any manually created segment
but instead every time ``get_trace_entity()`` gets called it refresh the
segment based on environment variables set by Lambda worker.
"""
def __init__(self):
self._local = threading.local()
def put_segment(self, segment):
"""
No-op.
"""
log.warning('Cannot create segments inside Lambda function. Discarded.')
def end_segment(self, end_time=None):
"""
No-op.
"""
log.warning('Cannot end segment inside Lambda function. Ignored.')
def put_subsegment(self, subsegment):
"""
Refresh the segment every time this function is invoked to prevent
a new subsegment from being attached to a leaked segment/subsegment.
"""
current_entity = self.get_trace_entity()
if not self._is_subsegment(current_entity) and (getattr(current_entity, 'initializing', None) or isinstance(current_entity, DummySegment)):
if global_sdk_config.sdk_enabled() and not os.getenv(LAMBDA_TRACE_HEADER_KEY):
log.warning("Subsegment %s discarded due to Lambda worker still initializing" % subsegment.name)
return
current_entity.add_subsegment(subsegment)
self._local.entities.append(subsegment)
def set_trace_entity(self, trace_entity):
"""
For Lambda context, we additionally store the segment in the thread local.
"""
if self._is_subsegment(trace_entity):
segment = trace_entity.parent_segment
else:
segment = trace_entity
setattr(self._local, 'segment', segment)
setattr(self._local, 'entities', [trace_entity])
def get_trace_entity(self):
self._refresh_context()
if getattr(self._local, 'entities', None):
return self._local.entities[-1]
else:
return self._local.segment
def _refresh_context(self):
"""
Get current segment. To prevent resource leaking in Lambda worker,
every time there is segment present, we compare its trace id to current
environment variables. If it is different we create a new segment
and clean up subsegments stored.
"""
header_str = os.getenv(LAMBDA_TRACE_HEADER_KEY)
trace_header = TraceHeader.from_header_str(header_str)
if not global_sdk_config.sdk_enabled():
trace_header._sampled = False
segment = getattr(self._local, 'segment', None)
if segment:
# Ensure customers don't have leaked subsegments across invocations
if not trace_header.root or trace_header.root == segment.trace_id:
return
else:
self._initialize_context(trace_header)
else:
self._initialize_context(trace_header)
@property
def context_missing(self):
return None
@context_missing.setter
def context_missing(self, value):
pass
def handle_context_missing(self):
"""
No-op.
"""
pass
def _initialize_context(self, trace_header):
"""
Create a segment based on environment variables set by
AWS Lambda and initialize storage for subsegments.
"""
sampled = None
if not global_sdk_config.sdk_enabled():
# Force subsequent subsegments to be disabled and turned into DummySegments.
sampled = False
elif trace_header.sampled == 0:
sampled = False
elif trace_header.sampled == 1:
sampled = True
segment = None
if not trace_header.root or not trace_header.parent or trace_header.sampled is None:
segment = DummySegment()
log.debug("Creating NoOp/Dummy parent segment")
else:
segment = FacadeSegment(
name='facade',
traceid=trace_header.root,
entityid=trace_header.parent,
sampled=sampled,
)
segment.save_origin_trace_header(trace_header)
setattr(self._local, 'segment', segment)
setattr(self._local, 'entities', [])
================================================
FILE: aws_xray_sdk/core/models/__init__.py
================================================
================================================
FILE: aws_xray_sdk/core/models/default_dynamic_naming.py
================================================
from ..utils.search_pattern import wildcard_match
class DefaultDynamicNaming:
"""
Decides what name to use on a segment generated from an incoming request.
By default it takes the host name and compares it to a pre-defined pattern.
If the host name matches that pattern, it returns the host name, otherwise
it returns the fallback name. The host name usually comes from the incoming
request's headers.
"""
def __init__(self, pattern, fallback):
"""
:param str pattern: the regex-like pattern to be compared against.
Right now only ? and * are supported. An asterisk (*) represents
any combination of characters. A question mark (?) represents
any single character.
:param str fallback: the fallback name to be used if the candidate name
doesn't match the provided pattern.
"""
self._pattern = pattern
self._fallback = fallback
def get_name(self, host_name):
"""
Returns the segment name based on the input host name.
"""
if wildcard_match(self._pattern, host_name):
return host_name
else:
return self._fallback
================================================
FILE: aws_xray_sdk/core/models/dummy_entities.py
================================================
import os
from .noop_traceid import NoOpTraceId
from .traceid import TraceId
from .segment import Segment
from .subsegment import Subsegment
class DummySegment(Segment):
"""
A dummy segment is created when ``xray_recorder`` decide to not sample
the segment based on sampling rules.
Adding data to a dummy segment becomes a no-op except for
subsegments. This is to reduce the memory footprint of the SDK.
A dummy segment will not be sent to the X-Ray daemon. Manually creating
dummy segments is not recommended.
"""
def __init__(self, name='dummy'):
no_op_id = os.getenv('AWS_XRAY_NOOP_ID')
if no_op_id and no_op_id.lower() == 'false':
super().__init__(name=name, traceid=TraceId().to_id())
else:
super().__init__(name=name, traceid=NoOpTraceId().to_id(), entityid='0000000000000000')
self.sampled = False
def set_aws(self, aws_meta):
"""
No-op
"""
pass
def put_http_meta(self, key, value):
"""
No-op
"""
pass
def put_annotation(self, key, value):
"""
No-op
"""
pass
def put_metadata(self, key, value, namespace='default'):
"""
No-op
"""
pass
def set_user(self, user):
"""
No-op
"""
pass
def set_service(self, service_info):
"""
No-op
"""
pass
def apply_status_code(self, status_code):
"""
No-op
"""
pass
def add_exception(self, exception, stack, remote=False):
"""
No-op
"""
pass
def serialize(self):
"""
No-op
"""
pass
class DummySubsegment(Subsegment):
"""
A dummy subsegment will be created when ``xray_recorder`` tries
to create a subsegment under a not sampled segment. Adding data
to a dummy subsegment becomes no-op. Dummy subsegment will not
be sent to the X-Ray daemon.
"""
def __init__(self, segment, name='dummy'):
super().__init__(name, 'dummy', segment)
no_op_id = os.getenv('AWS_XRAY_NOOP_ID')
if no_op_id and no_op_id.lower() == 'false':
super(Subsegment, self).__init__(name)
else:
super(Subsegment, self).__init__(name, entity_id='0000000000000000')
self.sampled = False
def set_aws(self, aws_meta):
"""
No-op
"""
pass
def put_http_meta(self, key, value):
"""
No-op
"""
pass
def put_annotation(self, key, value):
"""
No-op
"""
pass
def put_metadata(self, key, value, namespace='default'):
"""
No-op
"""
pass
def set_sql(self, sql):
"""
No-op
"""
pass
def apply_status_code(self, status_code):
"""
No-op
"""
pass
def add_exception(self, exception, stack, remote=False):
"""
No-op
"""
pass
def serialize(self):
"""
No-op
"""
pass
================================================
FILE: aws_xray_sdk/core/models/entity.py
================================================
import logging
import os
import binascii
import time
import string
import json
from ..utils.compat import annotation_value_types
from ..utils.conversion import metadata_to_dict
from .throwable import Throwable
from . import http
from ..exceptions.exceptions import AlreadyEndedException
log = logging.getLogger(__name__)
# Valid characters can be found at http://docs.aws.amazon.com/xray/latest/devguide/xray-api-segmentdocuments.html
_common_invalid_name_characters = '?;*()!$~^<>'
_valid_annotation_key_characters = string.ascii_letters + string.digits + '_'
ORIGIN_TRACE_HEADER_ATTR_KEY = '_origin_trace_header'
class Entity:
"""
The parent class for segment/subsegment. It holds common properties
and methods on segment and subsegment.
"""
def __init__(self, name, entity_id=None):
if not entity_id:
self.id = self._generate_random_id()
else:
self.id = entity_id
# required attributes
self.name = name
self.name = ''.join([c for c in name if c not in _common_invalid_name_characters])
self.start_time = time.time()
self.parent_id = None
if self.name != name:
log.warning("Removing Segment/Subsugment Name invalid characters from {}.".format(name))
# sampling
self.sampled = True
# state
self.in_progress = True
# meta fields
self.http = {}
self.annotations = {}
self.metadata = {}
self.aws = {}
self.cause = {}
# child subsegments
# list is thread-safe
self.subsegments = []
def close(self, end_time=None):
"""
Close the trace entity by setting `end_time`
and flip the in progress flag to False.
:param float end_time: Epoch in seconds. If not specified
current time will be used.
"""
self._check_ended()
if end_time:
self.end_time = end_time
else:
self.end_time = time.time()
self.in_progress = False
def add_subsegment(self, subsegment):
"""
Add input subsegment as a child subsegment.
"""
self._check_ended()
subsegment.parent_id = self.id
if not self.sampled and subsegment.sampled:
log.warning("This sampled subsegment is being added to an unsampled parent segment/subsegment and will be orphaned.")
self.subsegments.append(subsegment)
def remove_subsegment(self, subsegment):
"""
Remove input subsegment from child subsegments.
"""
self.subsegments.remove(subsegment)
def put_http_meta(self, key, value):
"""
Add http related metadata.
:param str key: Currently supported keys are:
* url
* method
* user_agent
* client_ip
* status
* content_length
:param value: status and content_length are int and for other
supported keys string should be used.
"""
self._check_ended()
if value is None:
return
if key == http.STATUS:
if isinstance(value, str):
value = int(value)
self.apply_status_code(value)
if key in http.request_keys:
if 'request' not in self.http:
self.http['request'] = {}
self.http['request'][key] = value
elif key in http.response_keys:
if 'response' not in self.http:
self.http['response'] = {}
self.http['response'][key] = value
else:
log.warning("ignoring unsupported key %s in http meta.", key)
def put_annotation(self, key, value):
"""
Annotate segment or subsegment with a key-value pair.
Annotations will be indexed for later search query.
:param str key: annotation key
:param object value: annotation value. Any type other than
string/number/bool will be dropped
"""
self._check_ended()
if not isinstance(key, str):
log.warning("ignoring non string type annotation key with type %s.", type(key))
return
if not isinstance(value, annotation_value_types):
log.warning("ignoring unsupported annotation value type %s.", type(value))
return
if any(character not in _valid_annotation_key_characters for character in key):
log.warning("ignoring annnotation with unsupported characters in key: '%s'.", key)
return
self.annotations[key] = value
def put_metadata(self, key, value, namespace='default'):
"""
Add metadata to segment or subsegment. Metadata is not indexed
but can be later retrieved by BatchGetTraces API.
:param str namespace: optional. Default namespace is `default`.
It must be a string and prefix `AWS.` is reserved.
:param str key: metadata key under specified namespace
:param object value: any object that can be serialized into JSON string
"""
self._check_ended()
if not isinstance(namespace, str):
log.warning("ignoring non string type metadata namespace")
return
if namespace.startswith('AWS.'):
log.warning("Prefix 'AWS.' is reserved, drop metadata with namespace %s", namespace)
return
if self.metadata.get(namespace, None):
self.metadata[namespace][key] = value
else:
self.metadata[namespace] = {key: value}
def set_aws(self, aws_meta):
"""
set aws section of the entity.
This method is called by global recorder and botocore patcher
to provide additonal information about AWS runtime.
It is not recommended to manually set aws section.
"""
self._check_ended()
self.aws = aws_meta
def add_throttle_flag(self):
self.throttle = True
def add_fault_flag(self):
self.fault = True
def add_error_flag(self):
self.error = True
def apply_status_code(self, status_code):
"""
When a trace entity is generated under the http context,
the status code will affect this entity's fault/error/throttle flags.
Flip these flags based on status code.
"""
self._check_ended()
if not status_code:
return
if status_code >= 500:
self.add_fault_flag()
elif status_code == 429:
self.add_throttle_flag()
self.add_error_flag()
elif status_code >= 400:
self.add_error_flag()
def add_exception(self, exception, stack, remote=False):
"""
Add an exception to trace entities.
:param Exception exception: the caught exception.
:param list stack: the output from python built-in
`traceback.extract_stack()`.
:param bool remote: If False it means it's a client error
instead of a downstream service.
"""
self._check_ended()
self.add_fault_flag()
if hasattr(exception, '_recorded'):
setattr(self, 'cause', getattr(exception, '_cause_id'))
return
if not isinstance(self.cause, dict):
log.warning("The current cause object is not a dict but an id: {}. Resetting the cause and recording the "
"current exception".format(self.cause))
self.cause = {}
if 'exceptions' in self.cause:
exceptions = self.cause['exceptions']
else:
exceptions = []
exceptions.append(Throwable(exception, stack, remote))
self.cause['exceptions'] = exceptions
self.cause['working_directory'] = os.getcwd()
def save_origin_trace_header(self, trace_header):
"""
Temporarily store additional data fields in trace header
to the entity for later propagation. The data will be
cleaned up upon serialization.
"""
setattr(self, ORIGIN_TRACE_HEADER_ATTR_KEY, trace_header)
def get_origin_trace_header(self):
"""
Retrieve saved trace header data.
"""
return getattr(self, ORIGIN_TRACE_HEADER_ATTR_KEY, None)
def serialize(self):
"""
Serialize to JSON document that can be accepted by the
X-Ray backend service. It uses json to perform serialization.
"""
return json.dumps(self.to_dict(), default=str)
def to_dict(self):
"""
Convert Entity(Segment/Subsegment) object to dict
with required properties that have non-empty values.
"""
entity_dict = {}
for key, value in vars(self).items():
if isinstance(value, bool) or value:
if key == 'subsegments':
# child subsegments are stored as List
subsegments = []
for subsegment in value:
subsegments.append(subsegment.to_dict())
entity_dict[key] = subsegments
elif key == 'cause':
if isinstance(self.cause, dict):
entity_dict[key] = {}
entity_dict[key]['working_directory'] = self.cause['working_directory']
# exceptions are stored as List
throwables = []
for throwable in value['exceptions']:
throwables.append(throwable.to_dict())
entity_dict[key]['exceptions'] = throwables
else:
entity_dict[key] = self.cause
elif key == 'metadata':
entity_dict[key] = metadata_to_dict(value)
elif key != 'sampled' and key != ORIGIN_TRACE_HEADER_ATTR_KEY:
entity_dict[key] = value
return entity_dict
def _check_ended(self):
if not self.in_progress:
raise AlreadyEndedException("Already ended segment and subsegment cannot be modified.")
def _generate_random_id(self):
"""
Generate a random 16-digit hex str.
This is used for generating segment/subsegment id.
"""
return binascii.b2a_hex(os.urandom(8)).decode('utf-8')
================================================
FILE: aws_xray_sdk/core/models/facade_segment.py
================================================
from .segment import Segment
from ..exceptions.exceptions import FacadeSegmentMutationException
MUTATION_UNSUPPORTED_MESSAGE = 'FacadeSegments cannot be mutated.'
class FacadeSegment(Segment):
"""
This type of segment should only be used in an AWS Lambda environment.
It holds the same id, traceid and sampling decision as
the segment generated by Lambda service but its properties cannot
be mutated except for its subsegments. If this segment is created
before Lambda worker finishes initializatioin, all the child
subsegments will be discarded.
"""
def __init__(self, name, entityid, traceid, sampled):
self.initializing = self._is_initializing(
entityid=entityid,
traceid=traceid,
sampled=sampled,
)
super().__init__(
name=name,
entityid=entityid,
traceid=traceid,
sampled=sampled,
)
def close(self, end_time=None):
"""
Unsupported operation. Will raise an exception.
"""
raise FacadeSegmentMutationException(MUTATION_UNSUPPORTED_MESSAGE)
def put_http_meta(self, key, value):
"""
Unsupported operation. Will raise an exception.
"""
raise FacadeSegmentMutationException(MUTATION_UNSUPPORTED_MESSAGE)
def put_annotation(self, key, value):
"""
Unsupported operation. Will raise an exception.
"""
raise FacadeSegmentMutationException(MUTATION_UNSUPPORTED_MESSAGE)
def put_metadata(self, key, value, namespace='default'):
"""
Unsupported operation. Will raise an exception.
"""
raise FacadeSegmentMutationException(MUTATION_UNSUPPORTED_MESSAGE)
def set_aws(self, aws_meta):
"""
Unsupported operation. Will raise an exception.
"""
raise FacadeSegmentMutationException(MUTATION_UNSUPPORTED_MESSAGE)
def set_user(self, user):
"""
Unsupported operation. Will raise an exception.
"""
raise FacadeSegmentMutationException(MUTATION_UNSUPPORTED_MESSAGE)
def add_throttle_flag(self):
"""
Unsupported operation. Will raise an exception.
"""
raise FacadeSegmentMutationException(MUTATION_UNSUPPORTED_MESSAGE)
def add_fault_flag(self):
"""
Unsupported operation. Will raise an exception.
"""
raise FacadeSegmentMutationException(MUTATION_UNSUPPORTED_MESSAGE)
def add_error_flag(self):
"""
Unsupported operation. Will raise an exception.
"""
raise FacadeSegmentMutationException(MUTATION_UNSUPPORTED_MESSAGE)
def add_exception(self, exception, stack, remote=False):
"""
Unsupported operation. Will raise an exception.
"""
raise FacadeSegmentMutationException(MUTATION_UNSUPPORTED_MESSAGE)
def apply_status_code(self, status_code):
"""
Unsupported operation. Will raise an exception.
"""
raise FacadeSegmentMutationException(MUTATION_UNSUPPORTED_MESSAGE)
def serialize(self):
"""
Unsupported operation. Will raise an exception.
"""
raise FacadeSegmentMutationException(MUTATION_UNSUPPORTED_MESSAGE)
def ready_to_send(self):
"""
Facade segment should never be sent out. This always
return False.
"""
return False
def increment(self):
"""
Increment total subsegments counter by 1.
"""
self._subsegments_counter.increment()
def decrement_ref_counter(self):
"""
No-op
"""
pass
def _is_initializing(self, entityid, traceid, sampled):
return not entityid or not traceid or sampled is None
================================================
FILE: aws_xray_sdk/core/models/http.py
================================================
URL = "url"
METHOD = "method"
USER_AGENT = "user_agent"
CLIENT_IP = "client_ip"
X_FORWARDED_FOR = "x_forwarded_for"
STATUS = "status"
CONTENT_LENGTH = "content_length"
XRAY_HEADER = "X-Amzn-Trace-Id"
# for proxy header re-write
ALT_XRAY_HEADER = "HTTP_X_AMZN_TRACE_ID"
request_keys = (URL, METHOD, USER_AGENT, CLIENT_IP, X_FORWARDED_FOR)
response_keys = (STATUS, CONTENT_LENGTH)
================================================
FILE: aws_xray_sdk/core/models/noop_traceid.py
================================================
class NoOpTraceId:
"""
A trace ID tracks the path of a request through your application.
A trace collects all the segments generated by a single request.
A trace ID is required for a segment.
"""
VERSION = '1'
DELIMITER = '-'
def __init__(self):
"""
Generate a no-op trace id.
"""
self.start_time = '00000000'
self.__number = '000000000000000000000000'
def to_id(self):
"""
Convert TraceId object to a string.
"""
return "%s%s%s%s%s" % (NoOpTraceId.VERSION, NoOpTraceId.DELIMITER,
self.start_time,
NoOpTraceId.DELIMITER, self.__number)
================================================
FILE: aws_xray_sdk/core/models/segment.py
================================================
import copy
import traceback
from .entity import Entity
from .traceid import TraceId
from ..utils.atomic_counter import AtomicCounter
from ..exceptions.exceptions import SegmentNameMissingException
ORIGIN_TRACE_HEADER_ATTR_KEY = '_origin_trace_header'
class SegmentContextManager:
"""
Wrapper for segment and recorder to provide segment context manager.
"""
def __init__(self, recorder, name=None, **segment_kwargs):
self.name = name
self.segment_kwargs = segment_kwargs
self.recorder = recorder
self.segment = None
def __enter__(self):
self.segment = self.recorder.begin_segment(
name=self.name, **self.segment_kwargs)
return self.segment
def __exit__(self, exc_type, exc_val, exc_tb):
if self.segment is None:
return
if exc_type is not None:
self.segment.add_exception(
exc_val,
traceback.extract_tb(
exc_tb,
limit=self.recorder.max_trace_back,
)
)
self.recorder.end_segment()
class Segment(Entity):
"""
The compute resources running your application logic send data
about their work as segments. A segment provides the resource's name,
details about the request, and details about the work done.
"""
def __init__(self, name, entityid=None, traceid=None,
parent_id=None, sampled=True):
"""
Create a segment object.
:param str name: segment name. If not specified a
SegmentNameMissingException will be thrown.
:param str entityid: hexdigits segment id.
:param str traceid: The trace id of the segment.
:param str parent_id: The parent id of the segment. It comes
from id of an upstream segment or subsegment.
:param bool sampled: If False this segment will not be sent
to the X-Ray daemon.
"""
if not name:
raise SegmentNameMissingException("Segment name is required.")
super().__init__(name)
if not traceid:
traceid = TraceId().to_id()
self.trace_id = traceid
if entityid:
self.id = entityid
self.in_progress = True
self.sampled = sampled
self.user = None
self.ref_counter = AtomicCounter()
self._subsegments_counter = AtomicCounter()
if parent_id:
self.parent_id = parent_id
def add_subsegment(self, subsegment):
"""
Add input subsegment as a child subsegment and increment
reference counter and total subsegments counter.
"""
super().add_subsegment(subsegment)
self.increment()
def increment(self):
"""
Increment reference counter to track on open subsegments
and total subsegments counter to track total size of subsegments
it currently hold.
"""
self.ref_counter.increment()
self._subsegments_counter.increment()
def decrement_ref_counter(self):
"""
Decrement reference counter by 1 when a subsegment is closed.
"""
self.ref_counter.decrement()
def ready_to_send(self):
"""
Return True if the segment doesn't have any open subsegments
and itself is not in progress.
"""
return self.ref_counter.get_current() <= 0 and not self.in_progress
def get_total_subsegments_size(self):
"""
Return the number of total subsegments regardless of open or closed.
"""
return self._subsegments_counter.get_current()
def decrement_subsegments_size(self):
"""
Decrement total subsegments by 1. This usually happens when
a subsegment is streamed out.
"""
return self._subsegments_counter.decrement()
def remove_subsegment(self, subsegment):
"""
Remove the reference of input subsegment.
"""
super().remove_subsegment(subsegment)
self.decrement_subsegments_size()
def set_user(self, user):
"""
set user of a segment. One segment can only have one user.
User is indexed and can be later queried.
"""
super()._check_ended()
self.user = user
def set_service(self, service_info):
"""
Add python runtime and version info.
This method should be only used by the recorder.
"""
self.service = service_info
def set_rule_name(self, rule_name):
"""
Add the matched centralized sampling rule name
if a segment is sampled because of that rule.
This method should be only used by the recorder.
"""
if not self.aws.get('xray', None):
self.aws['xray'] = {}
self.aws['xray']['sampling_rule_name'] = rule_name
def to_dict(self):
"""
Convert Segment object to dict with required properties
that have non-empty values.
"""
segment_dict = super().to_dict()
del segment_dict['ref_counter']
del segment_dict['_subsegments_counter']
return segment_dict
================================================
FILE: aws_xray_sdk/core/models/subsegment.py
================================================
import copy
import traceback
import wrapt
from .entity import Entity
from ..exceptions.exceptions import SegmentNotFoundException
# Attribute starts with _self_ to prevent wrapt proxying to underlying function
SUBSEGMENT_RECORDING_ATTRIBUTE = '_self___SUBSEGMENT_RECORDING_ATTRIBUTE__'
def set_as_recording(decorated_func, wrapped):
# If the wrapped function has the attribute, then it has already been patched
setattr(decorated_func, SUBSEGMENT_RECORDING_ATTRIBUTE, hasattr(wrapped, SUBSEGMENT_RECORDING_ATTRIBUTE))
def is_already_recording(func):
# The function might have the attribute, but its value might still be false
# as it might be the first decorator
return getattr(func, SUBSEGMENT_RECORDING_ATTRIBUTE, False)
@wrapt.decorator
def subsegment_decorator(wrapped, instance, args, kwargs):
decorated_func = wrapt.decorator(wrapped)(*args, **kwargs)
set_as_recording(decorated_func, wrapped)
return decorated_func
class SubsegmentContextManager:
"""
Wrapper for segment and recorder to provide segment context manager.
"""
def __init__(self, recorder, name=None, **subsegment_kwargs):
self.name = name
self.subsegment_kwargs = subsegment_kwargs
self.recorder = recorder
self.subsegment = None
@subsegment_decorator
def __call__(self, wrapped, instance, args, kwargs):
if is_already_recording(wrapped):
# The wrapped function is already decorated, the subsegment will be created later,
# just return the result
return wrapped(*args, **kwargs)
func_name = self.name
if not func_name:
func_name = wrapped.__name__
return self.recorder.record_subsegment(
wrapped, instance, args, kwargs,
name=func_name,
namespace='local',
meta_processor=None,
)
def __enter__(self):
self.subsegment = self.recorder.begin_subsegment(
name=self.name, **self.subsegment_kwargs)
return self.subsegment
def __exit__(self, exc_type, exc_val, exc_tb):
if self.subsegment is None:
return
if exc_type is not None:
self.subsegment.add_exception(
exc_val,
traceback.extract_tb(
exc_tb,
limit=self.recorder.max_trace_back,
)
)
self.recorder.end_subsegment()
class Subsegment(Entity):
"""
The work done in a single segment can be broke down into subsegments.
Subsegments provide more granular timing information and details about
downstream calls that your application made to fulfill the original request.
A subsegment can contain additional details about a call to an AWS service,
an external HTTP API, or an SQL database.
"""
def __init__(self, name, namespace, segment):
"""
Create a new subsegment.
:param str name: Subsegment name is required.
:param str namespace: The namespace of the subsegment. Currently
support `aws`, `remote` and `local`.
:param Segment segment: The parent segment
"""
super().__init__(name)
if not segment:
raise SegmentNotFoundException("A parent segment is required for creating subsegments.")
self.parent_segment = segment
self.trace_id = segment.trace_id
self.type = 'subsegment'
self.namespace = namespace
self.sql = {}
def add_subsegment(self, subsegment):
"""
Add input subsegment as a child subsegment and increment
reference counter and total subsegments counter of the
parent segment.
"""
super().add_subsegment(subsegment)
self.parent_segment.increment()
def remove_subsegment(self, subsegment):
"""
Remove input subsegment from child subsegemnts and
decrement parent segment total subsegments count.
:param Subsegment: subsegment to remove.
"""
super().remove_subsegment(subsegment)
self.parent_segment.decrement_subsegments_size()
def close(self, end_time=None):
"""
Close the trace entity by setting `end_time`
and flip the in progress flag to False. Also decrement
parent segment's ref counter by 1.
:param float end_time: Epoch in seconds. If not specified
current time will be used.
"""
super().close(end_time)
self.parent_segment.decrement_ref_counter()
def set_sql(self, sql):
"""
Set sql related metadata. This function is used by patchers
for database connectors and is not recommended to
invoke manually.
:param dict sql: sql related metadata
"""
self.sql = sql
def to_dict(self):
"""
Convert Subsegment object to dict with required properties
that have non-empty values.
"""
subsegment_dict = super().to_dict()
del subsegment_dict['parent_segment']
return subsegment_dict
================================================
FILE: aws_xray_sdk/core/models/throwable.py
================================================
import copy
import os
import binascii
import logging
log = logging.getLogger(__name__)
class Throwable:
"""
An object recording exception infomation under trace entity
`cause` section. The information includes the stack trace,
working directory and message from the original exception.
"""
def __init__(self, exception, stack, remote=False):
"""
:param Exception exception: the catched exception.
:param list stack: the formatted stack trace gathered
through `traceback` module.
:param bool remote: If False it means it's a client error
instead of a downstream service.
"""
self.id = binascii.b2a_hex(os.urandom(8)).decode('utf-8')
try:
message = str(exception)
# in case there is an exception cannot be converted to str
except Exception:
message = None
# do not record non-string exception message
if isinstance(message, str):
self.message = message
self.type = type(exception).__name__
self.remote = remote
try:
self._normalize_stack_trace(stack)
except Exception:
self.stack = None
log.warning("can not parse stack trace string, ignore stack field.")
if exception:
setattr(exception, '_recorded', True)
setattr(exception, '_cause_id', self.id)
def to_dict(self):
"""
Convert Throwable object to dict with required properties that
have non-empty values.
"""
throwable_dict = {}
for key, value in vars(self).items():
if isinstance(value, bool) or value:
throwable_dict[key] = value
return throwable_dict
def _normalize_stack_trace(self, stack):
if stack is None:
return None
self.stack = []
for entry in stack:
path = entry[0]
line = entry[1]
label = entry[2]
if 'aws_xray_sdk/' in path:
continue
normalized = {}
normalized['path'] = os.path.basename(path).replace('\"', ' ').strip()
normalized['line'] = line
normalized['label'] = label.strip()
self.stack.append(normalized)
================================================
FILE: aws_xray_sdk/core/models/trace_header.py
================================================
import logging
log = logging.getLogger(__name__)
ROOT = 'Root'
PARENT = 'Parent'
SAMPLE = 'Sampled'
SELF = 'Self'
HEADER_DELIMITER = ";"
class TraceHeader:
"""
The sampling decision and trace ID are added to HTTP requests in
tracing headers named ``X-Amzn-Trace-Id``. The first X-Ray-integrated
service that the request hits adds a tracing header, which is read
by the X-Ray SDK and included in the response. Learn more about
`Tracing Header `_.
"""
def __init__(self, root=None, parent=None, sampled=None, data=None):
"""
:param str root: trace id
:param str parent: parent id
:param int sampled: 0 means not sampled, 1 means sampled
:param dict data: arbitrary data fields
"""
self._root = root
self._parent = parent
self._sampled = None
self._data = data
if sampled is not None:
if sampled == '?':
self._sampled = sampled
if sampled is True or sampled == '1' or sampled == 1:
self._sampled = 1
if sampled is False or sampled == '0' or sampled == 0:
self._sampled = 0
@classmethod
def from_header_str(cls, header):
"""
Create a TraceHeader object from a tracing header string
extracted from a http request headers.
"""
if not header:
return cls()
try:
params = header.strip().split(HEADER_DELIMITER)
header_dict = {}
data = {}
for param in params:
entry = param.split('=')
key = entry[0]
if key in (ROOT, PARENT, SAMPLE):
header_dict[key] = entry[1]
# Ignore any "Self=" trace ids injected from ALB.
elif key != SELF:
data[key] = entry[1]
return cls(
root=header_dict.get(ROOT, None),
parent=header_dict.get(PARENT, None),
sampled=header_dict.get(SAMPLE, None),
data=data,
)
except Exception:
log.warning("malformed tracing header %s, ignore.", header)
return cls()
def to_header_str(self):
"""
Convert to a tracing header string that can be injected to
outgoing http request headers.
"""
h_parts = []
if self.root:
h_parts.append(ROOT + '=' + self.root)
if self.parent:
h_parts.append(PARENT + '=' + self.parent)
if self.sampled is not None:
h_parts.append(SAMPLE + '=' + str(self.sampled))
if self.data:
for key in self.data:
h_parts.append(key + '=' + self.data[key])
return HEADER_DELIMITER.join(h_parts)
@property
def root(self):
"""
Return trace id of the header
"""
return self._root
@property
def parent(self):
"""
Return the parent segment id in the header
"""
return self._parent
@property
def sampled(self):
"""
Return the sampling decision in the header.
It's 0 or 1 or '?'.
"""
return self._sampled
@property
def data(self):
"""
Return the arbitrary fields in the trace header.
"""
return self._data
================================================
FILE: aws_xray_sdk/core/models/traceid.py
================================================
import os
import time
import binascii
class TraceId:
"""
A trace ID tracks the path of a request through your application.
A trace collects all the segments generated by a single request.
A trace ID is required for a segment.
"""
VERSION = '1'
DELIMITER = '-'
def __init__(self):
"""
Generate a random trace id.
"""
self.start_time = int(time.time())
self.__number = binascii.b2a_hex(os.urandom(12)).decode('utf-8')
def to_id(self):
"""
Convert TraceId object to a string.
"""
return "%s%s%s%s%s" % (TraceId.VERSION, TraceId.DELIMITER,
format(self.start_time, 'x'),
TraceId.DELIMITER, self.__number)
================================================
FILE: aws_xray_sdk/core/patcher.py
================================================
import importlib
import inspect
import logging
import os
import pkgutil
import re
import sys
import wrapt
from aws_xray_sdk import global_sdk_config
from .utils.compat import is_classmethod, is_instance_method
log = logging.getLogger(__name__)
SUPPORTED_MODULES = (
'aiobotocore',
'botocore',
'pynamodb',
'requests',
'sqlite3',
'mysql',
'httplib',
'pymongo',
'pymysql',
'psycopg2',
'psycopg',
'pg8000',
'sqlalchemy_core',
'httpx',
)
NO_DOUBLE_PATCH = (
'aiobotocore',
'botocore',
'pynamodb',
'requests',
'sqlite3',
'mysql',
'pymongo',
'pymysql',
'psycopg2',
'psycopg',
'pg8000',
'sqlalchemy_core',
'httpx',
)
_PATCHED_MODULES = set()
def patch_all(double_patch=False):
"""
The X-Ray Python SDK supports patching aioboto3, aiobotocore, boto3, botocore, pynamodb, requests,
sqlite3, mysql, httplib, pymongo, pymysql, psycopg2, pg8000, sqlalchemy_core, httpx, and mysql-connector.
To patch all supported libraries::
from aws_xray_sdk.core import patch_all
patch_all()
:param bool double_patch: enable or disable patching of indirect dependencies.
"""
if double_patch:
patch(SUPPORTED_MODULES, raise_errors=False)
else:
patch(NO_DOUBLE_PATCH, raise_errors=False)
def _is_valid_import(module):
module = module.replace('.', '/')
realpath = os.path.realpath(module)
is_module = os.path.isdir(realpath) and (
os.path.isfile('{}/__init__.py'.format(module)) or os.path.isfile('{}/__init__.pyc'.format(module))
)
is_file = not is_module and (
os.path.isfile('{}.py'.format(module)) or os.path.isfile('{}.pyc'.format(module))
)
return is_module or is_file
def patch(modules_to_patch, raise_errors=True, ignore_module_patterns=None):
"""
To patch specific modules::
from aws_xray_sdk.core import patch
i_want_to_patch = ('botocore') # a tuple that contains the libs you want to patch
patch(i_want_to_patch)
:param tuple modules_to_patch: a tuple containing the list of libraries to be patched
"""
enabled = global_sdk_config.sdk_enabled()
if not enabled:
log.debug("Skipped patching modules %s because the SDK is currently disabled." % ', '.join(modules_to_patch))
return # Disable module patching if the SDK is disabled.
modules = set()
for module_to_patch in modules_to_patch:
# boto3 depends on botocore and patching botocore is sufficient
if module_to_patch == 'boto3':
modules.add('botocore')
# aioboto3 depends on aiobotocore and patching aiobotocore is sufficient
elif module_to_patch == 'aioboto3':
modules.add('aiobotocore')
# pynamodb requires botocore to be patched as well
elif module_to_patch == 'pynamodb':
modules.add('botocore')
modules.add(module_to_patch)
else:
modules.add(module_to_patch)
unsupported_modules = set(module for module in modules if module not in SUPPORTED_MODULES)
native_modules = modules - unsupported_modules
external_modules = set(module for module in unsupported_modules if _is_valid_import(module))
unsupported_modules = unsupported_modules - external_modules
if unsupported_modules:
raise Exception('modules %s are currently not supported for patching'
% ', '.join(unsupported_modules))
for m in native_modules:
_patch_module(m, raise_errors)
ignore_module_patterns = [re.compile(pattern) for pattern in ignore_module_patterns or []]
for m in external_modules:
_external_module_patch(m, ignore_module_patterns)
def _patch_module(module_to_patch, raise_errors=True):
try:
_patch(module_to_patch)
except Exception:
if raise_errors:
raise
log.debug('failed to patch module %s', module_to_patch)
def _patch(module_to_patch):
path = 'aws_xray_sdk.ext.%s' % module_to_patch
if module_to_patch in _PATCHED_MODULES:
log.debug('%s already patched', module_to_patch)
return
imported_module = importlib.import_module(path)
imported_module.patch()
_PATCHED_MODULES.add(module_to_patch)
log.info('successfully patched module %s', module_to_patch)
def _patch_func(parent, func_name, func, modifier=lambda x: x):
if func_name not in parent.__dict__:
# Ignore functions not directly defined in parent, i.e. exclude inherited ones
return
from aws_xray_sdk.core import xray_recorder
capture_name = func_name
if func_name.startswith('__') and func_name.endswith('__'):
capture_name = '{}.{}'.format(parent.__name__, capture_name)
setattr(parent, func_name, modifier(xray_recorder.capture(name=capture_name)(func)))
def _patch_class(module, cls):
for member_name, member in inspect.getmembers(cls, inspect.isclass):
if member.__module__ == module.__name__:
# Only patch classes of the module, ignore imports
_patch_class(module, member)
for member_name, member in inspect.getmembers(cls, inspect.ismethod):
if member.__module__ == module.__name__:
# Only patch methods of the class defined in the module, ignore other modules
if is_classmethod(member):
# classmethods are internally generated through descriptors. The classmethod
# decorator must be the last applied, so we cannot apply another one on top
log.warning('Cannot automatically patch classmethod %s.%s, '
'please apply decorator manually', cls.__name__, member_name)
else:
_patch_func(cls, member_name, member)
for member_name, member in inspect.getmembers(cls, inspect.isfunction):
if member.__module__ == module.__name__:
# Only patch static methods of the class defined in the module, ignore other modules
if is_instance_method(cls, member_name, member):
_patch_func(cls, member_name, member)
else:
_patch_func(cls, member_name, member, modifier=staticmethod)
def _on_import(module):
for member_name, member in inspect.getmembers(module, inspect.isfunction):
if member.__module__ == module.__name__:
# Only patch functions of the module, ignore imports
_patch_func(module, member_name, member)
for member_name, member in inspect.getmembers(module, inspect.isclass):
if member.__module__ == module.__name__:
# Only patch classes of the module, ignore imports
_patch_class(module, member)
def _external_module_patch(module, ignore_module_patterns):
if module.startswith('.'):
raise Exception('relative packages not supported for patching: {}'.format(module))
if module in _PATCHED_MODULES:
log.debug('%s already patched', module)
elif any(pattern.match(module) for pattern in ignore_module_patterns):
log.debug('%s ignored due to rules: %s', module, ignore_module_patterns)
else:
if module in sys.modules:
_on_import(sys.modules[module])
else:
wrapt.importer.when_imported(module)(_on_import)
for loader, submodule_name, is_module in pkgutil.iter_modules([module.replace('.', '/')]):
submodule = '.'.join([module, submodule_name])
if is_module:
_external_module_patch(submodule, ignore_module_patterns)
else:
if submodule in _PATCHED_MODULES:
log.debug('%s already patched', submodule)
continue
elif any(pattern.match(submodule) for pattern in ignore_module_patterns):
log.debug('%s ignored due to rules: %s', submodule, ignore_module_patterns)
continue
if submodule in sys.modules:
_on_import(sys.modules[submodule])
else:
wrapt.importer.when_imported(submodule)(_on_import)
_PATCHED_MODULES.add(submodule)
log.info('successfully patched module %s', submodule)
if module not in _PATCHED_MODULES:
_PATCHED_MODULES.add(module)
log.info('successfully patched module %s', module)
================================================
FILE: aws_xray_sdk/core/plugins/__init__.py
================================================
================================================
FILE: aws_xray_sdk/core/plugins/ec2_plugin.py
================================================
import json
import logging
from urllib.request import Request, urlopen
log = logging.getLogger(__name__)
SERVICE_NAME = 'ec2'
ORIGIN = 'AWS::EC2::Instance'
IMDS_URL = 'http://169.254.169.254/latest/'
def initialize():
"""
Try to get EC2 instance-id and AZ if running on EC2
by querying http://169.254.169.254/latest/meta-data/.
If not continue.
"""
global runtime_context
# get session token with 60 seconds TTL to not have the token lying around for a long time
token = get_token()
# get instance metadata
runtime_context = get_metadata(token)
def get_token():
"""
Get the session token for IMDSv2 endpoint valid for 60 seconds
by specifying the X-aws-ec2-metadata-token-ttl-seconds header.
"""
token = None
try:
headers = {"X-aws-ec2-metadata-token-ttl-seconds": "60"}
token = do_request(url=IMDS_URL + "api/token",
headers=headers,
method="PUT")
except Exception:
log.warning("Failed to get token for IMDSv2")
return token
def get_metadata(token=None):
try:
header = None
if token:
header = {"X-aws-ec2-metadata-token": token}
metadata_json = do_request(url=IMDS_URL + "dynamic/instance-identity/document",
headers=header,
method="GET")
return parse_metadata_json(metadata_json)
except Exception:
log.warning("Failed to get EC2 metadata")
return {}
def parse_metadata_json(json_str):
data = json.loads(json_str)
dict = {
'instance_id': data['instanceId'],
'availability_zone': data['availabilityZone'],
'instance_type': data['instanceType'],
'ami_id': data['imageId']
}
return dict
def do_request(url, headers=None, method="GET"):
if headers is None:
headers = {}
if url is None:
return None
req = Request(url=url)
req.headers = headers
req.method = method
res = urlopen(req, timeout=1)
return res.read().decode('utf-8')
================================================
FILE: aws_xray_sdk/core/plugins/ecs_plugin.py
================================================
import socket
import logging
log = logging.getLogger(__name__)
SERVICE_NAME = 'ecs'
ORIGIN = 'AWS::ECS::Container'
def initialize():
global runtime_context
try:
runtime_context = {}
host_name = socket.gethostname()
if host_name:
runtime_context['container'] = host_name
except Exception:
runtime_context = None
log.warning("failed to get ecs container metadata")
================================================
FILE: aws_xray_sdk/core/plugins/elasticbeanstalk_plugin.py
================================================
import logging
import json
log = logging.getLogger(__name__)
CONF_PATH = '/var/elasticbeanstalk/xray/environment.conf'
SERVICE_NAME = 'elastic_beanstalk'
ORIGIN = 'AWS::ElasticBeanstalk::Environment'
def initialize():
global runtime_context
try:
with open(CONF_PATH) as f:
runtime_context = json.load(f)
except Exception:
runtime_context = None
log.warning("failed to load Elastic Beanstalk environment config file")
================================================
FILE: aws_xray_sdk/core/plugins/utils.py
================================================
import importlib
from ..exceptions.exceptions import MissingPluginNames
module_prefix = 'aws_xray_sdk.core.plugins.'
PLUGIN_MAPPING = {
'elasticbeanstalkplugin': 'elasticbeanstalk_plugin',
'ec2plugin': 'ec2_plugin',
'ecsplugin': 'ecs_plugin'
}
def get_plugin_modules(plugins):
"""
Get plugin modules from input strings
:param tuple plugins: a tuple of plugin names in str
"""
if not plugins:
raise MissingPluginNames("input plugin names are required")
modules = []
for plugin in plugins:
short_name = PLUGIN_MAPPING.get(plugin.lower(), plugin.lower())
full_path = '%s%s' % (module_prefix, short_name)
modules.append(importlib.import_module(full_path))
return tuple(modules)
================================================
FILE: aws_xray_sdk/core/recorder.py
================================================
import copy
import json
import logging
import os
import platform
import time
from aws_xray_sdk import global_sdk_config
from aws_xray_sdk.version import VERSION
from .models.segment import Segment, SegmentContextManager
from .models.subsegment import Subsegment, SubsegmentContextManager
from .models.default_dynamic_naming import DefaultDynamicNaming
from .models.dummy_entities import DummySegment, DummySubsegment
from .emitters.udp_emitter import UDPEmitter
from .streaming.default_streaming import DefaultStreaming
from .context import Context
from .daemon_config import DaemonConfig
from .plugins.utils import get_plugin_modules
from .lambda_launcher import check_in_lambda
from .exceptions.exceptions import SegmentNameMissingException, SegmentNotFoundException
from .utils import stacktrace
log = logging.getLogger(__name__)
TRACING_NAME_KEY = 'AWS_XRAY_TRACING_NAME'
DAEMON_ADDR_KEY = 'AWS_XRAY_DAEMON_ADDRESS'
CONTEXT_MISSING_KEY = 'AWS_XRAY_CONTEXT_MISSING'
XRAY_META = {
'xray': {
'sdk': 'X-Ray for Python',
'sdk_version': VERSION
}
}
SERVICE_INFO = {
'runtime': platform.python_implementation(),
'runtime_version': platform.python_version()
}
class AWSXRayRecorder:
"""
A global AWS X-Ray recorder that will begin/end segments/subsegments
and send them to the X-Ray daemon. This recorder is initialized during
loading time so you can use::
from aws_xray_sdk.core import xray_recorder
in your module to access it
"""
def __init__(self):
self._streaming = DefaultStreaming()
context = check_in_lambda()
if context:
# Special handling when running on AWS Lambda.
from .sampling.local.sampler import LocalSampler
self._context = context
self.streaming_threshold = 0
self._sampler = LocalSampler()
else:
from .sampling.sampler import DefaultSampler
self._context = Context()
self._sampler = DefaultSampler()
self._emitter = UDPEmitter()
self._sampling = True
self._max_trace_back = 10
self._plugins = None
self._service = os.getenv(TRACING_NAME_KEY)
self._dynamic_naming = None
self._aws_metadata = copy.deepcopy(XRAY_META)
self._origin = None
self._stream_sql = True
if type(self.sampler).__name__ == 'DefaultSampler':
self.sampler.load_settings(DaemonConfig(), self.context)
def configure(self, sampling=None, plugins=None,
context_missing=None, sampling_rules=None,
daemon_address=None, service=None,
context=None, emitter=None, streaming=None,
dynamic_naming=None, streaming_threshold=None,
max_trace_back=None, sampler=None,
stream_sql=True):
"""Configure global X-Ray recorder.
Configure needs to run before patching thrid party libraries
to avoid creating dangling subsegment.
:param bool sampling: If sampling is enabled, every time the recorder
creates a segment it decides whether to send this segment to
the X-Ray daemon. This setting is not used if the recorder
is running in AWS Lambda. The recorder always respect the incoming
sampling decisions regardless of this setting.
:param sampling_rules: Pass a set of local custom sampling rules.
Can be an absolute path of the sampling rule config json file
or a dictionary that defines those rules. This will also be the
fallback rules in case of centralized sampling opted-in while
the cetralized sampling rules are not available.
:param sampler: The sampler used to make sampling decisions. The SDK
provides two built-in samplers. One is centralized rules based and
the other is local rules based. The former is the default.
:param tuple plugins: plugins that add extra metadata to each segment.
Currently available plugins are EC2Plugin, ECS plugin and
ElasticBeanstalkPlugin.
If you want to disable all previously enabled plugins,
pass an empty tuple ``()``.
:param str context_missing: recorder behavior when it tries to mutate
a segment or add a subsegment but there is no active segment.
RUNTIME_ERROR means the recorder will raise an exception.
LOG_ERROR means the recorder will only log the error and
do nothing.
IGNORE_ERROR means the recorder will do nothing
:param str daemon_address: The X-Ray daemon address where the recorder
sends data to.
:param str service: default segment name if creating a segment without
providing a name.
:param context: You can pass your own implementation of context storage
for active segment/subsegment by overriding the default
``Context`` class.
:param emitter: The emitter that sends a segment/subsegment to
the X-Ray daemon. You can override ``UDPEmitter`` class.
:param dynamic_naming: a string that defines a pattern that host names
should match. Alternatively you can pass a module which
overrides ``DefaultDynamicNaming`` module.
:param streaming: The streaming module to stream out trace documents
when they grow too large. You can override ``DefaultStreaming``
class to have your own implementation of the streaming process.
:param streaming_threshold: If breaks within a single segment it will
start streaming out children subsegments. By default it is the
maximum number of subsegments within a segment.
:param int max_trace_back: The maxinum number of stack traces recorded
by auto-capture. Lower this if a single document becomes too large.
:param bool stream_sql: Whether SQL query texts should be streamed.
Environment variables AWS_XRAY_DAEMON_ADDRESS, AWS_XRAY_CONTEXT_MISSING
and AWS_XRAY_TRACING_NAME respectively overrides arguments
daemon_address, context_missing and service.
"""
if sampling is not None:
self.sampling = sampling
if sampler:
self.sampler = sampler
if service:
self.service = os.getenv(TRACING_NAME_KEY, service)
if sampling_rules:
self._load_sampling_rules(sampling_rules)
if emitter:
self.emitter = emitter
if daemon_address:
self.emitter.set_daemon_address(os.getenv(DAEMON_ADDR_KEY, daemon_address))
if context:
self.context = context
if context_missing:
self.context.context_missing = os.getenv(CONTEXT_MISSING_KEY, context_missing)
if dynamic_naming:
self.dynamic_naming = dynamic_naming
if streaming:
self.streaming = streaming
if streaming_threshold is not None:
self.streaming_threshold = streaming_threshold
if type(max_trace_back) == int and max_trace_back >= 0:
self.max_trace_back = max_trace_back
if stream_sql is not None:
self.stream_sql = stream_sql
if plugins:
plugin_modules = get_plugin_modules(plugins)
for plugin in plugin_modules:
plugin.initialize()
if plugin.runtime_context:
self._aws_metadata[plugin.SERVICE_NAME] = plugin.runtime_context
self._origin = plugin.ORIGIN
# handling explicitly using empty list to clean up plugins.
elif plugins is not None:
self._aws_metadata = copy.deepcopy(XRAY_META)
self._origin = None
if type(self.sampler).__name__ == 'DefaultSampler':
self.sampler.load_settings(DaemonConfig(daemon_address),
self.context, self._origin)
def in_segment(self, name=None, **segment_kwargs):
"""
Return a segment context manager.
:param str name: the name of the segment
:param dict segment_kwargs: remaining arguments passed directly to `begin_segment`
"""
return SegmentContextManager(self, name=name, **segment_kwargs)
def in_subsegment(self, name=None, **subsegment_kwargs):
"""
Return a subsegment context manager.
:param str name: the name of the subsegment
:param dict subsegment_kwargs: remaining arguments passed directly to `begin_subsegment`
"""
return SubsegmentContextManager(self, name=name, **subsegment_kwargs)
def begin_segment(self, name=None, traceid=None,
parent_id=None, sampling=None):
"""
Begin a segment on the current thread and return it. The recorder
only keeps one segment at a time. Create the second one without
closing existing one will overwrite it.
:param str name: the name of the segment
:param str traceid: trace id of the segment
:param int sampling: 0 means not sampled, 1 means sampled
"""
# Disable the recorder; return a generated dummy segment.
if not global_sdk_config.sdk_enabled():
return DummySegment(global_sdk_config.DISABLED_ENTITY_NAME)
seg_name = name or self.service
if not seg_name:
raise SegmentNameMissingException("Segment name is required.")
# Sampling decision is None if not sampled.
# In a sampled case it could be either a string or 1
# depending on if centralized or local sampling rule takes effect.
decision = True
# we respect the input sampling decision
# regardless of recorder configuration.
if sampling == 0:
decision = False
elif sampling:
decision = sampling
elif self.sampling:
decision = self._sampler.should_trace({'service': seg_name})
if not decision:
segment = DummySegment(seg_name)
else:
segment = Segment(name=seg_name, traceid=traceid,
parent_id=parent_id)
self._populate_runtime_context(segment, decision)
self.context.put_segment(segment)
return segment
def end_segment(self, end_time=None):
"""
End the current segment and send it to X-Ray daemon
if it is ready to send. Ready means segment and
all its subsegments are closed.
:param float end_time: segment completion in unix epoch in seconds.
"""
# When the SDK is disabled we return
if not global_sdk_config.sdk_enabled():
return
self.context.end_segment(end_time)
segment = self.current_segment()
if segment and segment.ready_to_send():
self._send_segment()
def current_segment(self):
"""
Return the currently active segment. In a multithreading environment,
this will make sure the segment returned is the one created by the
same thread.
"""
entity = self.get_trace_entity()
if self._is_subsegment(entity):
return entity.parent_segment
else:
return entity
def _begin_subsegment_helper(self, name, namespace='local', beginWithoutSampling=False):
'''
Helper method to begin_subsegment and begin_subsegment_without_sampling
'''
# Generating the parent dummy segment is necessary.
# We don't need to store anything in context. Assumption here
# is that we only work with recorder-level APIs.
if not global_sdk_config.sdk_enabled():
return DummySubsegment(DummySegment(global_sdk_config.DISABLED_ENTITY_NAME))
segment = self.current_segment()
if not segment:
log.warning("No segment found, cannot begin subsegment %s." % name)
return None
current_entity = self.get_trace_entity()
if not current_entity.sampled or beginWithoutSampling:
subsegment = DummySubsegment(segment, name)
else:
subsegment = Subsegment(name, namespace, segment)
self.context.put_subsegment(subsegment)
return subsegment
def begin_subsegment(self, name, namespace='local'):
"""
Begin a new subsegment.
If there is open subsegment, the newly created subsegment will be the
child of latest opened subsegment.
If not, it will be the child of the current open segment.
:param str name: the name of the subsegment.
:param str namespace: currently can only be 'local', 'remote', 'aws'.
"""
return self._begin_subsegment_helper(name, namespace)
def begin_subsegment_without_sampling(self, name):
"""
Begin a new unsampled subsegment.
If there is open subsegment, the newly created subsegment will be the
child of latest opened subsegment.
If not, it will be the child of the current open segment.
:param str name: the name of the subsegment.
"""
return self._begin_subsegment_helper(name, beginWithoutSampling=True)
def current_subsegment(self):
"""
Return the latest opened subsegment. In a multithreading environment,
this will make sure the subsegment returned is one created
by the same thread.
"""
if not global_sdk_config.sdk_enabled():
return DummySubsegment(DummySegment(global_sdk_config.DISABLED_ENTITY_NAME))
entity = self.get_trace_entity()
if self._is_subsegment(entity):
return entity
else:
return None
def end_subsegment(self, end_time=None):
"""
End the current active subsegment. If this is the last one open
under its parent segment, the entire segment will be sent.
:param float end_time: subsegment compeletion in unix epoch in seconds.
"""
if not global_sdk_config.sdk_enabled():
return
if not self.context.end_subsegment(end_time):
return
# if segment is already close, we check if we can send entire segment
# otherwise we check if we need to stream some subsegments
if self.current_segment().ready_to_send():
self._send_segment()
else:
self.stream_subsegments()
def put_annotation(self, key, value):
"""
Annotate current active trace entity with a key-value pair.
Annotations will be indexed for later search query.
:param str key: annotation key
:param object value: annotation value. Any type other than
string/number/bool will be dropped
"""
if not global_sdk_config.sdk_enabled():
return
entity = self.get_trace_entity()
if entity and entity.sampled:
entity.put_annotation(key, value)
def put_metadata(self, key, value, namespace='default'):
"""
Add metadata to the current active trace entity.
Metadata is not indexed but can be later retrieved
by BatchGetTraces API.
:param str namespace: optional. Default namespace is `default`.
It must be a string and prefix `AWS.` is reserved.
:param str key: metadata key under specified namespace
:param object value: any object that can be serialized into JSON string
"""
if not global_sdk_config.sdk_enabled():
return
entity = self.get_trace_entity()
if entity and entity.sampled:
entity.put_metadata(key, value, namespace)
def is_sampled(self):
"""
Check if the current trace entity is sampled or not.
Return `False` if no active entity found.
"""
if not global_sdk_config.sdk_enabled():
# Disabled SDK is never sampled
return False
entity = self.get_trace_entity()
if entity:
return entity.sampled
return False
def get_trace_entity(self):
"""
A pass through method to ``context.get_trace_entity()``.
"""
return self.context.get_trace_entity()
def set_trace_entity(self, trace_entity):
"""
A pass through method to ``context.set_trace_entity()``.
"""
self.context.set_trace_entity(trace_entity)
def clear_trace_entities(self):
"""
A pass through method to ``context.clear_trace_entities()``.
"""
self.context.clear_trace_entities()
def stream_subsegments(self):
"""
Stream all closed subsegments to the daemon
and remove reference to the parent segment.
No-op for a not sampled segment.
"""
segment = self.current_segment()
if self.streaming.is_eligible(segment):
self.streaming.stream(segment, self._stream_subsegment_out)
def capture(self, name=None):
"""
A decorator that records enclosed function in a subsegment.
It only works with synchronous functions.
params str name: The name of the subsegment. If not specified
the function name will be used.
"""
return self.in_subsegment(name=name)
def record_subsegment(self, wrapped, instance, args, kwargs, name,
namespace, meta_processor):
subsegment = self.begin_subsegment(name, namespace)
exception = None
stack = None
return_value = None
try:
return_value = wrapped(*args, **kwargs)
return return_value
except Exception as e:
exception = e
stack = stacktrace.get_stacktrace(limit=self.max_trace_back)
raise
finally:
# No-op if subsegment is `None` due to `LOG_ERROR`.
if subsegment is not None:
end_time = time.time()
if callable(meta_processor):
meta_processor(
wrapped=wrapped,
instance=instance,
args=args,
kwargs=kwargs,
return_value=return_value,
exception=exception,
subsegment=subsegment,
stack=stack,
)
elif exception:
subsegment.add_exception(exception, stack)
self.end_subsegment(end_time)
def _populate_runtime_context(self, segment, sampling_decision):
if self._origin:
setattr(segment, 'origin', self._origin)
segment.set_aws(copy.deepcopy(self._aws_metadata))
segment.set_service(SERVICE_INFO)
if isinstance(sampling_decision, str):
segment.set_rule_name(sampling_decision)
def _send_segment(self):
"""
Send the current segment to X-Ray daemon if it is present and
sampled, then clean up context storage.
The emitter will handle failures.
"""
segment = self.current_segment()
if not segment:
return
if segment.sampled:
self.emitter.send_entity(segment)
self.clear_trace_entities()
def _stream_subsegment_out(self, subsegment):
log.debug("streaming subsegments...")
if subsegment.sampled:
self.emitter.send_entity(subsegment)
def _load_sampling_rules(self, sampling_rules):
if not sampling_rules:
return
if isinstance(sampling_rules, dict):
self.sampler.load_local_rules(sampling_rules)
else:
with open(sampling_rules) as f:
self.sampler.load_local_rules(json.load(f))
def _is_subsegment(self, entity):
return (hasattr(entity, 'type') and entity.type == 'subsegment')
@property
def enabled(self):
return self._enabled
@enabled.setter
def enabled(self, value):
self._enabled = value
@property
def sampling(self):
return self._sampling
@sampling.setter
def sampling(self, value):
self._sampling = value
@property
def sampler(self):
return self._sampler
@sampler.setter
def sampler(self, value):
self._sampler = value
@property
def service(self):
return self._service
@service.setter
def service(self, value):
self._service = value
@property
def dynamic_naming(self):
return self._dynamic_naming
@dynamic_naming.setter
def dynamic_naming(self, value):
if isinstance(value, str):
self._dynamic_naming = DefaultDynamicNaming(value, self.service)
else:
self._dynamic_naming = value
@property
def context(self):
return self._context
@context.setter
def context(self, cxt):
self._context = cxt
@property
def emitter(self):
return self._emitter
@emitter.setter
def emitter(self, value):
self._emitter = value
@property
def streaming(self):
return self._streaming
@streaming.setter
def streaming(self, value):
self._streaming = value
@property
def streaming_threshold(self):
"""
Proxy method to Streaming module's `streaming_threshold` property.
"""
return self.streaming.streaming_threshold
@streaming_threshold.setter
def streaming_threshold(self, value):
"""
Proxy method to Streaming module's `streaming_threshold` property.
"""
self.streaming.streaming_threshold = value
@property
def max_trace_back(self):
return self._max_trace_back
@max_trace_back.setter
def max_trace_back(self, value):
self._max_trace_back = value
@property
def stream_sql(self):
return self._stream_sql
@stream_sql.setter
def stream_sql(self, value):
self._stream_sql = value
================================================
FILE: aws_xray_sdk/core/sampling/__init__.py
================================================
================================================
FILE: aws_xray_sdk/core/sampling/connector.py
================================================
import binascii
import os
import time
from datetime import datetime
import botocore.session
from botocore import UNSIGNED
from botocore.client import Config
from .sampling_rule import SamplingRule
from aws_xray_sdk.core.models.dummy_entities import DummySegment
from aws_xray_sdk.core.context import Context
class ServiceConnector:
"""
Connector class that translates Centralized Sampling poller functions to
actual X-Ray back-end APIs and communicates with X-Ray daemon as the
signing proxy.
"""
def __init__(self):
self._xray_client = self._create_xray_client()
self._client_id = binascii.b2a_hex(os.urandom(12)).decode('utf-8')
self._context = Context()
def _context_wrapped(func):
"""
Wrapping boto calls with dummy segment. This is because botocore
has two dependencies (requests and httplib) that might be
monkey-patched in user code to capture subsegments. The wrapper
makes sure there is always a non-sampled segment present when
the connector makes an AWS API call using botocore.
This context wrapper doesn't work with asyncio based context
as event loop is not thread-safe.
"""
def wrapper(self, *args, **kargs):
if type(self.context).__name__ == 'AsyncContext':
return func(self, *args, **kargs)
segment = DummySegment()
self.context.set_trace_entity(segment)
result = func(self, *args, **kargs)
self.context.clear_trace_entities()
return result
return wrapper
@_context_wrapped
def fetch_sampling_rules(self):
"""
Use X-Ray botocore client to get the centralized sampling rules
from X-Ray service. The call is proxied and signed by X-Ray Daemon.
"""
new_rules = []
resp = self._xray_client.get_sampling_rules()
records = resp['SamplingRuleRecords']
for record in records:
rule_def = record['SamplingRule']
if self._is_rule_valid(rule_def):
rule = SamplingRule(name=rule_def['RuleName'],
priority=rule_def['Priority'],
rate=rule_def['FixedRate'],
reservoir_size=rule_def['ReservoirSize'],
host=rule_def['Host'],
service=rule_def['ServiceName'],
method=rule_def['HTTPMethod'],
path=rule_def['URLPath'],
service_type=rule_def['ServiceType'])
new_rules.append(rule)
return new_rules
@_context_wrapped
def fetch_sampling_target(self, rules):
"""
Report the current statistics of sampling rules and
get back the new assgiend quota/TTL froom the X-Ray service.
The call is proxied and signed via X-Ray Daemon.
"""
now = int(time.time())
report_docs = self._generate_reporting_docs(rules, now)
resp = self._xray_client.get_sampling_targets(
SamplingStatisticsDocuments=report_docs
)
new_docs = resp['SamplingTargetDocuments']
targets_mapping = {}
for doc in new_docs:
TTL = self._dt_to_epoch(doc['ReservoirQuotaTTL']) if doc.get('ReservoirQuotaTTL', None) else None
target = {
'rate': doc['FixedRate'],
'quota': doc.get('ReservoirQuota', None),
'TTL': TTL,
'interval': doc.get('Interval', None),
}
targets_mapping[doc['RuleName']] = target
return targets_mapping, self._dt_to_epoch(resp['LastRuleModification'])
def setup_xray_client(self, ip, port, client):
"""
Setup the xray client based on ip and port.
If a preset client is specified, ip and port
will be ignored.
"""
if not client:
client = self._create_xray_client(ip, port)
self._xray_client = client
@property
def context(self):
return self._context
@context.setter
def context(self, v):
self._context = v
def _generate_reporting_docs(self, rules, now):
report_docs = []
for rule in rules:
statistics = rule.snapshot_statistics()
doc = {
'RuleName': rule.name,
'ClientID': self._client_id,
'RequestCount': statistics['request_count'],
'BorrowCount': statistics['borrow_count'],
'SampledCount': statistics['sampled_count'],
'Timestamp': now,
}
report_docs.append(doc)
return report_docs
def _dt_to_epoch(self, dt):
"""
Convert a offset-aware datetime to POSIX time.
"""
# Added in python 3.3+ and directly returns POSIX time.
return int(dt.timestamp())
def _is_rule_valid(self, record):
# We currently only handle v1 sampling rules.
return record.get('Version', None) == 1 and \
record.get('ResourceARN', None) == '*' and \
record.get('ServiceType', None) and \
not record.get('Attributes', None)
def _create_xray_client(self, ip='127.0.0.1', port='2000'):
session = botocore.session.get_session()
url = 'http://%s:%s' % (ip, port)
return session.create_client('xray', endpoint_url=url,
region_name='us-west-2',
config=Config(signature_version=UNSIGNED),
aws_access_key_id='', aws_secret_access_key=''
)
================================================
FILE: aws_xray_sdk/core/sampling/local/__init__.py
================================================
================================================
FILE: aws_xray_sdk/core/sampling/local/reservoir.py
================================================
import time
import threading
class Reservoir:
"""
Keeps track of the number of sampled segments within
a single second. This class is implemented to be
thread-safe to achieve accurate sampling.
"""
def __init__(self, traces_per_sec=0):
"""
:param int traces_per_sec: number of guranteed
sampled segments.
"""
self._lock = threading.Lock()
self.traces_per_sec = traces_per_sec
self.used_this_sec = 0
self.this_sec = int(time.time())
def take(self):
"""
Returns True if there are segments left within the
current second, otherwise return False.
"""
with self._lock:
now = int(time.time())
if now != self.this_sec:
self.used_this_sec = 0
self.this_sec = now
if self.used_this_sec >= self.traces_per_sec:
return False
self.used_this_sec = self.used_this_sec + 1
return True
================================================
FILE: aws_xray_sdk/core/sampling/local/sampler.py
================================================
import json
import pkgutil
from random import Random
from .sampling_rule import SamplingRule
from ...exceptions.exceptions import InvalidSamplingManifestError
# `.decode('utf-8')` needed for Python 3.4, 3.5.
local_sampling_rule = json.loads(pkgutil.get_data(__name__, 'sampling_rule.json').decode('utf-8'))
SUPPORTED_RULE_VERSION = (1, 2)
class LocalSampler:
"""
The local sampler that holds either custom sampling rules
or default sampling rules defined locally. The X-Ray recorder
use it to calculate if this segment should be sampled or not
when local rules are neccessary.
"""
def __init__(self, rules=local_sampling_rule):
"""
:param dict rules: a dict that defines custom sampling rules.
An example configuration:
{
"version": 2,
"rules": [
{
"description": "Player moves.",
"host": "*",
"http_method": "*",
"url_path": "/api/move/*",
"fixed_target": 0,
"rate": 0.05
}
],
"default": {
"fixed_target": 1,
"rate": 0.1
}
}
This example defines one custom rule and a default rule.
The custom rule applies a five-percent sampling rate with no minimum
number of requests to trace for paths under /api/move/. The default
rule traces the first request each second and 10 percent of additional requests.
The SDK applies custom rules in the order in which they are defined.
If a request matches multiple custom rules, the SDK applies only the first rule.
"""
self.load_local_rules(rules)
self._random = Random()
def should_trace(self, sampling_req=None):
"""
Return True if the sampler decide to sample based on input
information and sampling rules. It will first check if any
custom rule should be applied, if not it falls back to the
default sampling rule.
All optional arugments are extracted from incoming requests by
X-Ray middleware to perform path based sampling.
"""
if sampling_req is None:
return self._should_trace(self._default_rule)
host = sampling_req.get('host', None)
method = sampling_req.get('method', None)
path = sampling_req.get('path', None)
for rule in self._rules:
if rule.applies(host, method, path):
return self._should_trace(rule)
return self._should_trace(self._default_rule)
def load_local_rules(self, rules):
version = rules.get('version', None)
if version not in SUPPORTED_RULE_VERSION:
raise InvalidSamplingManifestError('Manifest version: %s is not supported.', version)
if 'default' not in rules:
raise InvalidSamplingManifestError('A default rule must be provided.')
self._default_rule = SamplingRule(rule_dict=rules['default'],
version=version,
default=True)
self._rules = []
if 'rules' in rules:
for rule in rules['rules']:
self._rules.append(SamplingRule(rule, version))
def _should_trace(self, sampling_rule):
if sampling_rule.reservoir.take():
return True
else:
return self._random.random() < sampling_rule.rate
================================================
FILE: aws_xray_sdk/core/sampling/local/sampling_rule.json
================================================
{
"version": 2,
"default": {
"fixed_target": 1,
"rate": 0.05
},
"rules": [
]
}
================================================
FILE: aws_xray_sdk/core/sampling/local/sampling_rule.py
================================================
from .reservoir import Reservoir
from ...exceptions.exceptions import InvalidSamplingManifestError
from aws_xray_sdk.core.utils.search_pattern import wildcard_match
class SamplingRule:
"""
One SamplingRule represents one rule defined from local rule json file
or from a dictionary. It can be either a custom rule or default rule.
"""
FIXED_TARGET = 'fixed_target'
RATE = 'rate'
HOST = 'host'
METHOD = 'http_method'
PATH = 'url_path'
SERVICE_NAME = 'service_name'
def __init__(self, rule_dict, version=2, default=False):
"""
:param dict rule_dict: The dictionary that defines a single rule.
:param bool default: Indicates if this is the default rule. A default
rule cannot have `host`, `http_method` or `url_path`.
"""
if version == 2:
self._host_key = self.HOST
elif version == 1:
self._host_key = self.SERVICE_NAME
self._fixed_target = rule_dict.get(self.FIXED_TARGET, None)
self._rate = rule_dict.get(self.RATE, None)
self._host = rule_dict.get(self._host_key, None)
self._method = rule_dict.get(self.METHOD, None)
self._path = rule_dict.get(self.PATH, None)
self._default = default
self._validate()
self._reservoir = Reservoir(self.fixed_target)
def applies(self, host, method, path):
"""
Determines whether or not this sampling rule applies to
the incoming request based on some of the request's parameters.
Any None parameters provided will be considered an implicit match.
"""
return (not host or wildcard_match(self.host, host)) \
and (not method or wildcard_match(self.method, method)) \
and (not path or wildcard_match(self.path, path))
@property
def fixed_target(self):
"""
Defines fixed number of sampled segments per second.
This doesn't count for sampling rate.
"""
return self._fixed_target
@property
def rate(self):
"""
A float number less than 1.0 defines the sampling rate.
"""
return self._rate
@property
def host(self):
"""
The host name of the reqest to sample.
"""
return self._host
@property
def method(self):
"""
HTTP method of the request to sample.
"""
return self._method
@property
def path(self):
"""
The url path of the request to sample.
"""
return self._path
@property
def reservoir(self):
"""
Keeps track of used sampled targets within the second.
"""
return self._reservoir
@property
def version(self):
"""
Keeps track of used sampled targets within the second.
"""
return self._version
def _validate(self):
if self.fixed_target < 0 or self.rate < 0:
raise InvalidSamplingManifestError('All rules must have non-negative values for '
'fixed_target and rate')
if self._default:
if self.host or self.method or self.path:
raise InvalidSamplingManifestError('The default rule must not specify values for '
'url_path, %s, or http_method', self._host_key)
else:
if not self.host or not self.method or not self.path:
raise InvalidSamplingManifestError('All non-default rules must have values for '
'url_path, %s, and http_method', self._host_key)
================================================
FILE: aws_xray_sdk/core/sampling/reservoir.py
================================================
import threading
from enum import Enum
class Reservoir:
"""
Centralized thread-safe reservoir which holds fixed sampling
quota, borrowed count and TTL.
"""
def __init__(self):
self._lock = threading.Lock()
self._quota = None
self._TTL = None
self._this_sec = 0
self._taken_this_sec = 0
self._borrowed_this_sec = 0
self._report_interval = 1
self._report_elapsed = 0
def borrow_or_take(self, now, can_borrow):
"""
Decide whether to borrow or take one quota from
the reservoir. Return ``False`` if it can neither
borrow nor take. This method is thread-safe.
"""
with self._lock:
return self._borrow_or_take(now, can_borrow)
def load_quota(self, quota, TTL, interval):
"""
Load new quota with a TTL. If the input is None,
the reservoir will continue using old quota until it
expires or has a non-None quota/TTL in a future load.
"""
if quota is not None:
self._quota = quota
if TTL is not None:
self._TTL = TTL
if interval is not None:
self._report_interval = interval / 10
@property
def quota(self):
return self._quota
@property
def TTL(self):
return self._TTL
def _time_to_report(self):
if self._report_elapsed + 1 >= self._report_interval:
self._report_elapsed = 0
return True
else:
self._report_elapsed += 1
def _borrow_or_take(self, now, can_borrow):
self._adjust_this_sec(now)
# Don't borrow if the quota is available and fresh.
if (self._quota is not None and self._quota >= 0 and
self._TTL is not None and self._TTL >= now):
if(self._taken_this_sec >= self._quota):
return ReservoirDecision.NO
self._taken_this_sec = self._taken_this_sec + 1
return ReservoirDecision.TAKE
# Otherwise try to borrow if the quota is not present or expired.
if can_borrow:
if self._borrowed_this_sec >= 1:
return ReservoirDecision.NO
self._borrowed_this_sec = self._borrowed_this_sec + 1
return ReservoirDecision.BORROW
def _adjust_this_sec(self, now):
if now != self._this_sec:
self._taken_this_sec = 0
self._borrowed_this_sec = 0
self._this_sec = now
class ReservoirDecision(Enum):
"""
An Enum of decisions the reservoir could make based on
assigned quota with TTL and the current timestamp/usage.
"""
TAKE = 'take'
BORROW = 'borrow'
NO = 'no'
================================================
FILE: aws_xray_sdk/core/sampling/rule_cache.py
================================================
import threading
from operator import attrgetter
TTL = 60 * 60 # The cache expires 1 hour after the last refresh time.
class RuleCache:
"""
Cache sampling rules and quota retrieved by ``TargetPoller``
and ``RulePoller``. It will not return anything if it expires.
"""
def __init__(self):
self._last_updated = None
self._rules = []
self._lock = threading.Lock()
def get_matched_rule(self, sampling_req, now):
if self._is_expired(now):
return None
matched_rule = None
for rule in self.rules:
if(not matched_rule and rule.match(sampling_req)):
matched_rule = rule
if(not matched_rule and rule.is_default()):
matched_rule = rule
return matched_rule
def load_rules(self, rules):
# Record the old rules for later merging.
with self._lock:
self._load_rules(rules)
def load_targets(self, targets_dict):
with self._lock:
self._load_targets(targets_dict)
def _load_rules(self, rules):
oldRules = {}
for rule in self.rules:
oldRules[rule.name] = rule
# Update the rules in the cache.
self.rules = rules
# Transfer state information to refreshed rules.
for rule in self.rules:
old = oldRules.get(rule.name, None)
if old:
rule.merge(old)
# The cache should maintain the order of the rules based on
# priority. If priority is the same we sort name by alphabet
# as rule name is unique.
self.rules.sort(key=attrgetter('priority', 'name'))
def _load_targets(self, targets_dict):
for rule in self.rules:
target = targets_dict.get(rule.name, None)
if target:
rule.reservoir.load_quota(target['quota'],
target['TTL'],
target['interval'])
rule.rate = target['rate']
def _is_expired(self, now):
# The cache is treated as expired if it is never loaded.
if not self._last_updated:
return True
return now > self.last_updated + TTL
@property
def rules(self):
return self._rules
@rules.setter
def rules(self, v):
self._rules = v
@property
def last_updated(self):
return self._last_updated
@last_updated.setter
def last_updated(self, v):
self._last_updated = v
================================================
FILE: aws_xray_sdk/core/sampling/rule_poller.py
================================================
import logging
from random import Random
import time
import threading
log = logging.getLogger(__name__)
DEFAULT_INTERVAL = 5 * 60 # 5 minutes on sampling rules fetch
class RulePoller:
def __init__(self, cache, connector):
self._cache = cache
self._random = Random()
self._time_to_wait = 0
self._time_elapsed = 0
self._connector = connector
def start(self):
poller_thread = threading.Thread(target=self._worker)
poller_thread.daemon = True
poller_thread.start()
def _worker(self):
frequency = 1
while True:
if self._time_elapsed >= self._time_to_wait:
self._refresh_cache()
self._time_elapsed = 0
self._reset_time_to_wait()
else:
time.sleep(frequency)
self._time_elapsed = self._time_elapsed + frequency
def wake_up(self):
"""
Force the rule poller to pull the sampling rules from the service
regardless of the polling interval.
This method is intended to be used by ``TargetPoller`` only.
"""
self._time_elapsed = self._time_to_wait + 1000
def _refresh_cache(self):
try:
now = int(time.time())
new_rules = self._connector.fetch_sampling_rules()
if new_rules:
self._cache.load_rules(new_rules)
self._cache.last_updated = now
except Exception:
log.error("Encountered an issue while polling sampling rules.", exc_info=True)
def _reset_time_to_wait(self):
"""
A random jitter of up to 5 seconds is injected after each run
to ensure the calls eventually get evenly distributed over
the 5 minute window.
"""
self._time_to_wait = DEFAULT_INTERVAL + self._random.random() * 5
================================================
FILE: aws_xray_sdk/core/sampling/sampler.py
================================================
import logging
from random import Random
import time
import threading
from .local.sampler import LocalSampler
from .rule_cache import RuleCache
from .rule_poller import RulePoller
from .target_poller import TargetPoller
from .connector import ServiceConnector
from .reservoir import ReservoirDecision
from aws_xray_sdk import global_sdk_config
log = logging.getLogger(__name__)
class DefaultSampler:
"""Making sampling decisions based on centralized sampling rules defined
by X-Ray control plane APIs. It will fall back to local sampler if
centralized sampling rules are not available.
"""
def __init__(self):
self._local_sampler = LocalSampler()
self._cache = RuleCache()
self._connector = ServiceConnector()
self._rule_poller = RulePoller(self._cache, self._connector)
self._target_poller = TargetPoller(self._cache,
self._rule_poller, self._connector)
self._xray_client = None
self._random = Random()
self._started = False
self._origin = None
self._lock = threading.Lock()
def start(self):
"""
Start rule poller and target poller once X-Ray daemon address
and context manager is in place.
"""
if not global_sdk_config.sdk_enabled():
return
with self._lock:
if not self._started:
self._rule_poller.start()
self._target_poller.start()
self._started = True
def should_trace(self, sampling_req=None):
"""
Return the matched sampling rule name if the sampler finds one
and decide to sample. If no sampling rule matched, it falls back
to the local sampler's ``should_trace`` implementation.
All optional arguments are extracted from incoming requests by
X-Ray middleware to perform path based sampling.
"""
if not global_sdk_config.sdk_enabled():
return False
if not self._started:
self.start() # only front-end that actually uses the sampler spawns poller threads
now = int(time.time())
if sampling_req and not sampling_req.get('service_type', None):
sampling_req['service_type'] = self._origin
elif sampling_req is None:
sampling_req = {'service_type': self._origin}
matched_rule = self._cache.get_matched_rule(sampling_req, now)
if matched_rule:
log.debug('Rule %s is selected to make a sampling decision.', matched_rule.name)
return self._process_matched_rule(matched_rule, now)
else:
log.info('No effective centralized sampling rule match. Fallback to local rules.')
return self._local_sampler.should_trace(sampling_req)
def load_local_rules(self, rules):
"""
Load specified local rules to local fallback sampler.
"""
self._local_sampler.load_local_rules(rules)
def load_settings(self, daemon_config, context, origin=None):
"""
The pollers have dependency on the context manager
of the X-Ray recorder. They will respect the customer
specified xray client to poll sampling rules/targets.
Otherwise they falls back to use the same X-Ray daemon
as the emitter.
"""
self._connector.setup_xray_client(ip=daemon_config.tcp_ip,
port=daemon_config.tcp_port,
client=self.xray_client)
self._connector.context = context
self._origin = origin
def _process_matched_rule(self, rule, now):
# As long as a rule is matched we increment request counter.
rule.increment_request_count()
reservoir = rule.reservoir
sample = True
# We check if we can borrow or take from reservoir first.
decision = reservoir.borrow_or_take(now, rule.can_borrow)
if(decision == ReservoirDecision.BORROW):
rule.increment_borrow_count()
elif (decision == ReservoirDecision.TAKE):
rule.increment_sampled_count()
# Otherwise we compute based on fixed rate of this sampling rule.
elif (self._random.random() <= rule.rate):
rule.increment_sampled_count()
else:
sample = False
if sample:
return rule.name
else:
return False
@property
def xray_client(self):
return self._xray_client
@xray_client.setter
def xray_client(self, v):
self._xray_client = v
================================================
FILE: aws_xray_sdk/core/sampling/sampling_rule.py
================================================
import threading
from .reservoir import Reservoir
from aws_xray_sdk.core.utils.search_pattern import wildcard_match
class SamplingRule:
"""
Data model for a single centralized sampling rule definition.
"""
def __init__(self, name, priority, rate, reservoir_size,
host=None, method=None, path=None, service=None,
service_type=None):
self._name = name
self._priority = priority
self._rate = rate
self._can_borrow = not not reservoir_size
self._host = host
self._method = method
self._path = path
self._service = service
self._service_type = service_type
self._reservoir = Reservoir()
self._reset_statistics()
self._lock = threading.Lock()
def match(self, sampling_req):
"""
Determines whether or not this sampling rule applies to the incoming
request based on some of the request's parameters.
Any ``None`` parameter provided will be considered an implicit match.
"""
if sampling_req is None:
return False
host = sampling_req.get('host', None)
method = sampling_req.get('method', None)
path = sampling_req.get('path', None)
service = sampling_req.get('service', None)
service_type = sampling_req.get('service_type', None)
return (not host or wildcard_match(self._host, host)) \
and (not method or wildcard_match(self._method, method)) \
and (not path or wildcard_match(self._path, path)) \
and (not service or wildcard_match(self._service, service)) \
and (not service_type or wildcard_match(self._service_type, service_type))
def is_default(self):
# ``Default`` is a reserved keyword on X-Ray back-end.
return self.name == 'Default'
def snapshot_statistics(self):
"""
Take a snapshot of request/borrow/sampled count for reporting
back to X-Ray back-end by ``TargetPoller`` and reset those counters.
"""
with self._lock:
stats = {
'request_count': self.request_count,
'borrow_count': self.borrow_count,
'sampled_count': self.sampled_count,
}
self._reset_statistics()
return stats
def merge(self, rule):
"""
Migrate all stateful attributes from the old rule
"""
with self._lock:
self._request_count = rule.request_count
self._borrow_count = rule.borrow_count
self._sampled_count = rule.sampled_count
self._reservoir = rule.reservoir
rule.reservoir = None
def ever_matched(self):
"""
Returns ``True`` if this sample rule has ever been matched
with an incoming request within the reporting interval.
"""
return self._request_count > 0
def time_to_report(self):
"""
Returns ``True`` if it is time to report sampling statistics
of this rule to refresh quota information for its reservoir.
"""
return self.reservoir._time_to_report()
def increment_request_count(self):
with self._lock:
self._request_count += 1
def increment_borrow_count(self):
with self._lock:
self._borrow_count += 1
def increment_sampled_count(self):
with self._lock:
self._sampled_count += 1
def _reset_statistics(self):
self._request_count = 0
self._borrow_count = 0
self._sampled_count = 0
@property
def rate(self):
return self._rate
@rate.setter
def rate(self, v):
self._rate = v
@property
def name(self):
return self._name
@property
def priority(self):
return self._priority
@property
def reservoir(self):
return self._reservoir
@reservoir.setter
def reservoir(self, v):
self._reservoir = v
@property
def can_borrow(self):
return self._can_borrow
@property
def request_count(self):
return self._request_count
@property
def borrow_count(self):
return self._borrow_count
@property
def sampled_count(self):
return self._sampled_count
================================================
FILE: aws_xray_sdk/core/sampling/target_poller.py
================================================
import logging
from random import Random
import time
import threading
log = logging.getLogger(__name__)
class TargetPoller:
"""
The poller to report the current statistics of all
centralized sampling rules and retrieve the new allocated
sampling quota and TTL from X-Ray service.
"""
def __init__(self, cache, rule_poller, connector):
self._cache = cache
self._rule_poller = rule_poller
self._connector = connector
self._random = Random()
self._interval = 10 # default 10 seconds interval on sampling targets fetch
def start(self):
poller_thread = threading.Thread(target=self._worker)
poller_thread.daemon = True
poller_thread.start()
def _worker(self):
while True:
try:
time.sleep(self._interval + self._get_jitter())
self._do_work()
except Exception:
log.error("Encountered an issue while polling targets.", exc_info=True)
def _do_work(self):
candidates = self._get_candidates(self._cache.rules)
if not candidates:
log.debug('There is no sampling rule statistics to report. Skipping')
return None
targets, rule_freshness = self._connector.fetch_sampling_target(candidates)
self._cache.load_targets(targets)
if rule_freshness > self._cache.last_updated:
log.info('Performing out-of-band sampling rule polling to fetch updated rules.')
self._rule_poller.wake_up()
def _get_candidates(self, all_rules):
"""
Don't report a rule statistics if any of the conditions is met:
1. The report time hasn't come(some rules might have larger report intervals).
2. The rule is never matched.
"""
candidates = []
for rule in all_rules:
if rule.ever_matched() and rule.time_to_report():
candidates.append(rule)
return candidates
def _get_jitter(self):
"""
A random jitter of up to 0.1 seconds is injected after every run
to ensure all poller calls eventually get evenly distributed
over the polling interval window.
"""
return self._random.random() / self._interval
================================================
FILE: aws_xray_sdk/core/streaming/__init__.py
================================================
================================================
FILE: aws_xray_sdk/core/streaming/default_streaming.py
================================================
import threading
class DefaultStreaming:
"""
The default streaming strategy. It uses the total count of a
segment's children subsegments as a threshold. If the threshold is
breached, it uses subtree streaming to stream out.
"""
def __init__(self, streaming_threshold=30):
self._threshold = streaming_threshold
self._lock = threading.Lock()
def is_eligible(self, segment):
"""
A segment is eligible to have its children subsegments streamed
if it is sampled and it breaches streaming threshold.
"""
if not segment or not segment.sampled:
return False
return segment.get_total_subsegments_size() > self.streaming_threshold
def stream(self, entity, callback):
"""
Stream out all eligible children of the input entity.
:param entity: The target entity to be streamed.
:param callback: The function that takes the node and
actually send it out.
"""
with self._lock:
self._stream(entity, callback)
def _stream(self, entity, callback):
children = entity.subsegments
children_ready = []
if len(children) > 0:
for child in children:
if self._stream(child, callback):
children_ready.append(child)
# If all children subtrees and this root are ready, don't stream yet.
# Mark this root ready and return to parent.
if len(children_ready) == len(children) and not entity.in_progress:
return True
# Otherwise stream all ready children subtrees and return False
for child in children_ready:
callback(child)
entity.remove_subsegment(child)
return False
@property
def streaming_threshold(self):
return self._threshold
@streaming_threshold.setter
def streaming_threshold(self, value):
self._threshold = value
================================================
FILE: aws_xray_sdk/core/utils/__init__.py
================================================
================================================
FILE: aws_xray_sdk/core/utils/atomic_counter.py
================================================
import threading
class AtomicCounter:
"""
A helper class that implements a thread-safe counter.
"""
def __init__(self, initial=0):
self.value = initial
self._lock = threading.Lock()
self._initial = initial
def increment(self, num=1):
with self._lock:
self.value += num
return self.value
def decrement(self, num=1):
with self._lock:
self.value -= num
return self.value
def get_current(self):
with self._lock:
return self.value
def reset(self):
with self._lock:
self.value = self._initial
return self.value
================================================
FILE: aws_xray_sdk/core/utils/compat.py
================================================
import inspect
annotation_value_types = (int, float, bool, str)
def is_classmethod(func):
return getattr(func, '__self__', None) is not None
def is_instance_method(parent_class, func_name, func):
try:
func_from_dict = parent_class.__dict__[func_name]
except KeyError:
for base in inspect.getmro(parent_class):
if func_name in base.__dict__:
func_from_dict = base.__dict__[func_name]
break
else:
return True
return not is_classmethod(func) and not isinstance(func_from_dict, staticmethod)
================================================
FILE: aws_xray_sdk/core/utils/conversion.py
================================================
import logging
log = logging.getLogger(__name__)
def metadata_to_dict(obj):
"""
Convert object to dict with all serializable properties like:
dict, list, set, tuple, str, bool, int, float, type, object, etc.
"""
try:
if isinstance(obj, dict):
metadata = {}
for key, value in obj.items():
metadata[key] = metadata_to_dict(value)
return metadata
elif isinstance(obj, type):
return str(obj)
elif hasattr(obj, "_ast"):
return metadata_to_dict(obj._ast())
elif hasattr(obj, "__iter__") and not isinstance(obj, str):
metadata = []
for item in obj:
metadata.append(metadata_to_dict(item))
return metadata
elif hasattr(obj, "__dict__"):
metadata = {}
for key, value in vars(obj).items():
if not callable(value) and not key.startswith('_'):
metadata[key] = metadata_to_dict(value)
return metadata
else:
return obj
except Exception as e:
import pprint
log.warning("Failed to convert metadata to dict:\n%s", pprint.pformat(getattr(e, "args", None)))
return {}
================================================
FILE: aws_xray_sdk/core/utils/search_pattern.py
================================================
def wildcard_match(pattern, text, case_insensitive=True):
"""
Performs a case-insensitive wildcard match against two strings.
This method works with pseduo-regex chars; specifically ? and * are supported.
An asterisk (*) represents any combination of characters.
A question mark (?) represents any single character.
:param str pattern: the regex-like pattern to be compared against
:param str text: the string to compare against the pattern
:param boolean case_insensitive: dafault is True
return whether the text matches the pattern
"""
if pattern is None or text is None:
return False
if len(pattern) == 0:
return len(text) == 0
# Check the special case of a single * pattern, as it's common
if pattern == '*':
return True
# If elif logic Checking different conditions like match between the first i chars in text
# and the first p chars in pattern, checking pattern has '?' or '*' also check for case_insensitivity
# iStar is introduced to store length of the text and i, p and pStar for indexing
i = 0
p = 0
iStar = len(text)
pStar = 0
while i < len(text):
if p < len(pattern) and text[i] == pattern[p]:
i = i + 1
p = p + 1
elif p < len(pattern) and case_insensitive and text[i].lower() == pattern[p].lower():
i = i + 1
p = p + 1
elif p < len(pattern) and pattern[p] == '?':
i = i + 1
p = p + 1
elif p < len(pattern) and pattern[p] == '*':
iStar = i
pStar = p
p += 1
elif iStar != len(text):
iStar += 1
i = iStar
p = pStar + 1
else:
return False
while p < len(pattern) and pattern[p] == '*':
p = p + 1
return p == len(pattern) and i == len(text)
================================================
FILE: aws_xray_sdk/core/utils/sqs_message_helper.py
================================================
SQS_XRAY_HEADER = "AWSTraceHeader"
class SqsMessageHelper:
@staticmethod
def isSampled(sqs_message):
attributes = sqs_message['attributes']
if SQS_XRAY_HEADER not in attributes:
return False
return 'Sampled=1' in attributes[SQS_XRAY_HEADER]
================================================
FILE: aws_xray_sdk/core/utils/stacktrace.py
================================================
import sys
import traceback
def get_stacktrace(limit=None):
"""
Get a full stacktrace for the current state of execution.
Include the current state of the stack, minus this function.
If there is an active exception, include the stacktrace information from
the exception as well.
:param int limit:
Optionally limit stack trace size results. This parmaeters has the same
meaning as the `limit` parameter in `traceback.print_stack`.
:returns:
List of stack trace objects, in the same form as
`traceback.extract_stack`.
"""
if limit is not None and limit == 0:
# Nothing to return. This is consistent with the behavior of the
# functions in the `traceback` module.
return []
stack = traceback.extract_stack()
# Remove this `get_stacktrace()` function call from the stack info.
# For what we want to report, this is superfluous information and arguably
# adds garbage to the report.
# Also drop the `traceback.extract_stack()` call above from the returned
# stack info, since this is also superfluous.
stack = stack[:-2]
_exc_type, _exc, exc_traceback = sys.exc_info()
if exc_traceback is not None:
# If and only if there is a currently triggered exception, combine the
# exception traceback information with the current stack state to get a
# complete trace.
exc_stack = traceback.extract_tb(exc_traceback)
stack += exc_stack
# Limit the stack trace size, if a limit was specified:
if limit is not None:
# Copy the behavior of `traceback` functions with a `limit` argument.
# See https://docs.python.org/3/library/traceback.html.
if limit > 0:
# limit > 0: include the last `limit` items
stack = stack[-limit:]
else:
# limit < 0: include the first `abs(limit)` items
stack = stack[:abs(limit)]
return stack
================================================
FILE: aws_xray_sdk/ext/__init__.py
================================================
================================================
FILE: aws_xray_sdk/ext/aiobotocore/__init__.py
================================================
from .patch import patch
__all__ = ['patch']
================================================
FILE: aws_xray_sdk/ext/aiobotocore/patch.py
================================================
import aiobotocore.client
import wrapt
from aws_xray_sdk.core import xray_recorder
from aws_xray_sdk.ext.boto_utils import inject_header, aws_meta_processor
def patch():
"""
Patch aiobotocore client so it generates subsegments
when calling AWS services.
"""
if hasattr(aiobotocore.client, '_xray_enabled'):
return
setattr(aiobotocore.client, '_xray_enabled', True)
wrapt.wrap_function_wrapper(
'aiobotocore.client',
'AioBaseClient._make_api_call',
_xray_traced_aiobotocore,
)
wrapt.wrap_function_wrapper(
'aiobotocore.endpoint',
'AioEndpoint.prepare_request',
inject_header,
)
async def _xray_traced_aiobotocore(wrapped, instance, args, kwargs):
service = instance._service_model.metadata["endpointPrefix"]
result = await xray_recorder.record_subsegment_async(
wrapped, instance, args, kwargs,
name=service,
namespace='aws',
meta_processor=aws_meta_processor,
)
return result
================================================
FILE: aws_xray_sdk/ext/aiohttp/__init__.py
================================================
================================================
FILE: aws_xray_sdk/ext/aiohttp/client.py
================================================
"""
AioHttp Client tracing, only compatible with Aiohttp 3.X versions
"""
import aiohttp
from types import SimpleNamespace
from aws_xray_sdk.core import xray_recorder
from aws_xray_sdk.core.models import http
from aws_xray_sdk.core.utils import stacktrace
from aws_xray_sdk.ext.util import inject_trace_header, strip_url, get_hostname
# All aiohttp calls will entail outgoing HTTP requests, only in some ad-hoc
# exceptions the namespace will be flip back to local.
REMOTE_NAMESPACE = 'remote'
LOCAL_NAMESPACE = 'local'
LOCAL_EXCEPTIONS = (
aiohttp.client_exceptions.ClientConnectionError,
# DNS issues
OSError
)
async def begin_subsegment(session, trace_config_ctx, params):
name = trace_config_ctx.name if trace_config_ctx.name else get_hostname(str(params.url))
subsegment = xray_recorder.begin_subsegment(name, REMOTE_NAMESPACE)
# No-op if subsegment is `None` due to `LOG_ERROR`.
if not subsegment:
trace_config_ctx.give_up = True
else:
trace_config_ctx.give_up = False
subsegment.put_http_meta(http.METHOD, params.method)
subsegment.put_http_meta(http.URL, strip_url(params.url.human_repr()))
inject_trace_header(params.headers, subsegment)
async def end_subsegment(session, trace_config_ctx, params):
if trace_config_ctx.give_up:
return
subsegment = xray_recorder.current_subsegment()
subsegment.put_http_meta(http.STATUS, params.response.status)
xray_recorder.end_subsegment()
async def end_subsegment_with_exception(session, trace_config_ctx, params):
if trace_config_ctx.give_up:
return
subsegment = xray_recorder.current_subsegment()
subsegment.add_exception(
params.exception,
stacktrace.get_stacktrace(limit=xray_recorder._max_trace_back)
)
if isinstance(params.exception, LOCAL_EXCEPTIONS):
subsegment.namespace = LOCAL_NAMESPACE
xray_recorder.end_subsegment()
def aws_xray_trace_config(name=None):
"""
:param name: name used to identify the subsegment, with None internally the URL will
be used as identifier.
:returns: TraceConfig.
"""
def _trace_config_ctx_factory(trace_request_ctx):
return SimpleNamespace(
name=name,
trace_request_ctx=trace_request_ctx
)
trace_config = aiohttp.TraceConfig(trace_config_ctx_factory=_trace_config_ctx_factory)
trace_config.on_request_start.append(begin_subsegment)
trace_config.on_request_end.append(end_subsegment)
trace_config.on_request_exception.append(end_subsegment_with_exception)
return trace_config
================================================
FILE: aws_xray_sdk/ext/aiohttp/middleware.py
================================================
"""
AioHttp Middleware
"""
from aiohttp import web
from aiohttp.web_exceptions import HTTPException
from aws_xray_sdk.core import xray_recorder
from aws_xray_sdk.core.models import http
from aws_xray_sdk.core.utils import stacktrace
from aws_xray_sdk.ext.util import calculate_sampling_decision, \
calculate_segment_name, construct_xray_header, prepare_response_header
@web.middleware
async def middleware(request, handler):
"""
Main middleware function, deals with all the X-Ray segment logic
"""
# Create X-Ray headers
xray_header = construct_xray_header(request.headers)
# Get name of service or generate a dynamic one from host
name = calculate_segment_name(request.headers['host'].split(':', 1)[0], xray_recorder)
sampling_req = {
'host': request.headers['host'],
'method': request.method,
'path': request.path,
'service': name,
}
sampling_decision = calculate_sampling_decision(
trace_header=xray_header,
recorder=xray_recorder,
sampling_req=sampling_req,
)
# Start a segment
segment = xray_recorder.begin_segment(
name=name,
traceid=xray_header.root,
parent_id=xray_header.parent,
sampling=sampling_decision,
)
segment.save_origin_trace_header(xray_header)
# Store request metadata in the current segment
segment.put_http_meta(http.URL, str(request.url))
segment.put_http_meta(http.METHOD, request.method)
if 'User-Agent' in request.headers:
segment.put_http_meta(http.USER_AGENT, request.headers['User-Agent'])
if 'X-Forwarded-For' in request.headers:
segment.put_http_meta(http.CLIENT_IP, request.headers['X-Forwarded-For'])
segment.put_http_meta(http.X_FORWARDED_FOR, True)
elif 'remote_addr' in request.headers:
segment.put_http_meta(http.CLIENT_IP, request.headers['remote_addr'])
else:
segment.put_http_meta(http.CLIENT_IP, request.remote)
try:
# Call next middleware or request handler
response = await handler(request)
except HTTPException as exc:
# Non 2XX responses are raised as HTTPExceptions
response = exc
raise
except BaseException as err:
# Store exception information including the stacktrace to the segment
response = None
segment.put_http_meta(http.STATUS, 500)
stack = stacktrace.get_stacktrace(limit=xray_recorder.max_trace_back)
segment.add_exception(err, stack)
raise
finally:
if response is not None:
segment.put_http_meta(http.STATUS, response.status)
if 'Content-Length' in response.headers:
length = int(response.headers['Content-Length'])
segment.put_http_meta(http.CONTENT_LENGTH, length)
header_str = prepare_response_header(xray_header, segment)
response.headers[http.XRAY_HEADER] = header_str
xray_recorder.end_segment()
return response
================================================
FILE: aws_xray_sdk/ext/boto_utils.py
================================================
import json
import pkgutil
from botocore.exceptions import ClientError
from aws_xray_sdk.core import xray_recorder
from aws_xray_sdk.core.models import http
from aws_xray_sdk.core.exceptions.exceptions import SegmentNotFoundException
from aws_xray_sdk.ext.util import inject_trace_header, to_snake_case
# `.decode('utf-8')` needed for Python 3.4, 3.5
whitelist = json.loads(pkgutil.get_data(__name__, 'resources/aws_para_whitelist.json').decode('utf-8'))
def inject_header(wrapped, instance, args, kwargs):
# skip tracing for SDK built-in centralized sampling pollers
url = args[0].url
if 'GetCentralizedSamplingRules' in url or 'SamplingTargets' in url:
return wrapped(*args, **kwargs)
headers = args[0].headers
# skip if the recorder is unable to open the subsegment
# for the outgoing request
subsegment = None
try:
subsegment = xray_recorder.current_subsegment()
except SegmentNotFoundException:
pass
if subsegment:
inject_trace_header(headers, subsegment)
return wrapped(*args, **kwargs)
def aws_meta_processor(wrapped, instance, args, kwargs,
return_value, exception, subsegment, stack):
region = instance.meta.region_name
if 'operation_name' in kwargs:
operation_name = kwargs['operation_name']
else:
operation_name = args[0]
aws_meta = {
'operation': operation_name,
'region': region,
}
if return_value:
resp_meta = return_value.get('ResponseMetadata')
if resp_meta:
aws_meta['request_id'] = resp_meta.get('RequestId')
subsegment.put_http_meta(http.STATUS,
resp_meta.get('HTTPStatusCode'))
# for service like S3 that returns special request id in response headers
if 'HTTPHeaders' in resp_meta and resp_meta['HTTPHeaders'].get('x-amz-id-2'):
aws_meta['id_2'] = resp_meta['HTTPHeaders']['x-amz-id-2']
elif exception:
_aws_error_handler(exception, stack, subsegment, aws_meta)
_extract_whitelisted_params(subsegment.name, operation_name,
aws_meta, args, kwargs, return_value)
subsegment.set_aws(aws_meta)
def _aws_error_handler(exception, stack, subsegment, aws_meta):
if not exception or not isinstance(exception, ClientError):
return
response_metadata = exception.response.get('ResponseMetadata')
if not response_metadata:
return
aws_meta['request_id'] = response_metadata.get('RequestId')
status_code = response_metadata.get('HTTPStatusCode')
subsegment.put_http_meta(http.STATUS, status_code)
subsegment.add_exception(exception, stack, True)
def _extract_whitelisted_params(service, operation,
aws_meta, args, kwargs, response):
# check if service is whitelisted
if service not in whitelist['services']:
return
operations = whitelist['services'][service]['operations']
# check if operation is whitelisted
if operation not in operations:
return
params = operations[operation]
# record whitelisted request/response parameters
if 'request_parameters' in params:
_record_params(params['request_parameters'], args[1], aws_meta)
if 'request_descriptors' in params:
_record_special_params(params['request_descriptors'],
args[1], aws_meta)
if 'response_parameters' in params and response:
_record_params(params['response_parameters'], response, aws_meta)
if 'response_descriptors' in params and response:
_record_special_params(params['response_descriptors'],
response, aws_meta)
def _record_params(whitelisted, actual, aws_meta):
for key in whitelisted:
if key in actual:
snake_key = to_snake_case(key)
aws_meta[snake_key] = actual[key]
def _record_special_params(whitelisted, actual, aws_meta):
for key in whitelisted:
if key in actual:
_process_descriptor(whitelisted[key], actual[key], aws_meta)
def _process_descriptor(descriptor, value, aws_meta):
# "get_count" = true
if 'get_count' in descriptor and descriptor['get_count']:
value = len(value)
# "get_keys" = true
if 'get_keys' in descriptor and descriptor['get_keys']:
value = value.keys()
aws_meta[descriptor['rename_to']] = value
================================================
FILE: aws_xray_sdk/ext/botocore/__init__.py
================================================
from .patch import patch
__all__ = ['patch']
================================================
FILE: aws_xray_sdk/ext/botocore/patch.py
================================================
import wrapt
import botocore.client
from aws_xray_sdk.core import xray_recorder
from aws_xray_sdk.ext.boto_utils import inject_header, aws_meta_processor
def patch():
"""
Patch botocore client so it generates subsegments
when calling AWS services.
"""
if hasattr(botocore.client, '_xray_enabled'):
return
setattr(botocore.client, '_xray_enabled', True)
wrapt.wrap_function_wrapper(
'botocore.client',
'BaseClient._make_api_call',
_xray_traced_botocore,
)
wrapt.wrap_function_wrapper(
'botocore.endpoint',
'Endpoint.prepare_request',
inject_header,
)
def _xray_traced_botocore(wrapped, instance, args, kwargs):
service = instance._service_model.metadata["endpointPrefix"]
if service == 'xray':
# skip tracing for SDK built-in sampling pollers
if ('GetSamplingRules' in args or
'GetSamplingTargets' in args or
'PutTraceSegments' in args):
return wrapped(*args, **kwargs)
return xray_recorder.record_subsegment(
wrapped, instance, args, kwargs,
name=service,
namespace='aws',
meta_processor=aws_meta_processor,
)
================================================
FILE: aws_xray_sdk/ext/bottle/__init__.py
================================================
================================================
FILE: aws_xray_sdk/ext/bottle/middleware.py
================================================
from bottle import request, response, SimpleTemplate
from aws_xray_sdk.core.lambda_launcher import check_in_lambda, LambdaContext
from aws_xray_sdk.core.models import http
from aws_xray_sdk.core.utils import stacktrace
from aws_xray_sdk.ext.util import calculate_sampling_decision, \
calculate_segment_name, construct_xray_header, prepare_response_header
class XRayMiddleware:
"""
Middleware that wraps each incoming request to a segment.
"""
name = 'xray'
api = 2
def __init__(self, recorder):
self._recorder = recorder
self._in_lambda_ctx = False
if check_in_lambda() and type(self._recorder.context) == LambdaContext:
self._in_lambda_ctx = True
_patch_render(recorder)
def apply(self, callback, route):
"""
Apply middleware directly to each route callback.
"""
def wrapper(*a, **ka):
headers = request.headers
xray_header = construct_xray_header(headers)
name = calculate_segment_name(request.urlparts[1], self._recorder)
sampling_req = {
'host': request.urlparts[1],
'method': request.method,
'path': request.path,
'service': name,
}
sampling_decision = calculate_sampling_decision(
trace_header=xray_header,
recorder=self._recorder,
sampling_req=sampling_req,
)
if self._in_lambda_ctx:
segment = self._recorder.begin_subsegment(name)
else:
segment = self._recorder.begin_segment(
name=name,
traceid=xray_header.root,
parent_id=xray_header.parent,
sampling=sampling_decision,
)
segment.save_origin_trace_header(xray_header)
segment.put_http_meta(http.URL, request.url)
segment.put_http_meta(http.METHOD, request.method)
segment.put_http_meta(http.USER_AGENT, headers.get('User-Agent'))
client_ip = request.environ.get('HTTP_X_FORWARDED_FOR') or request.environ.get('REMOTE_ADDR')
if client_ip:
segment.put_http_meta(http.CLIENT_IP, client_ip)
segment.put_http_meta(http.X_FORWARDED_FOR, True)
else:
segment.put_http_meta(http.CLIENT_IP, request.remote_addr)
try:
rv = callback(*a, **ka)
except Exception as resp:
segment.put_http_meta(http.STATUS, getattr(resp, 'status_code', 500))
stack = stacktrace.get_stacktrace(limit=self._recorder._max_trace_back)
segment.add_exception(resp, stack)
if self._in_lambda_ctx:
self._recorder.end_subsegment()
else:
self._recorder.end_segment()
raise resp
segment.put_http_meta(http.STATUS, response.status_code)
origin_header = segment.get_origin_trace_header()
resp_header_str = prepare_response_header(origin_header, segment)
response.set_header(http.XRAY_HEADER, resp_header_str)
cont_len = response.headers.get('Content-Length')
if cont_len:
segment.put_http_meta(http.CONTENT_LENGTH, int(cont_len))
if self._in_lambda_ctx:
self._recorder.end_subsegment()
else:
self._recorder.end_segment()
return rv
return wrapper
def _patch_render(recorder):
_render = SimpleTemplate.render
@recorder.capture('template_render')
def _traced_render(self, *args, **kwargs):
if self.filename:
recorder.current_subsegment().name = self.filename
return _render(self, *args, **kwargs)
SimpleTemplate.render = _traced_render
================================================
FILE: aws_xray_sdk/ext/dbapi2.py
================================================
import copy
import wrapt
from aws_xray_sdk.core import xray_recorder
class XRayTracedConn(wrapt.ObjectProxy):
_xray_meta = None
def __init__(self, conn, meta={}):
super().__init__(conn)
self._xray_meta = meta
def cursor(self, *args, **kwargs):
cursor = self.__wrapped__.cursor(*args, **kwargs)
return XRayTracedCursor(cursor, self._xray_meta)
class XRayTracedCursor(wrapt.ObjectProxy):
_xray_meta = None
def __init__(self, cursor, meta={}):
super().__init__(cursor)
self._xray_meta = meta
# we preset database type if db is framework built-in
if not self._xray_meta.get('database_type'):
db_type = cursor.__class__.__module__.split('.')[0]
self._xray_meta['database_type'] = db_type
def __enter__(self):
value = self.__wrapped__.__enter__()
if value is not self.__wrapped__:
return value
return self
@xray_recorder.capture()
def execute(self, query, *args, **kwargs):
add_sql_meta(self._xray_meta)
return self.__wrapped__.execute(query, *args, **kwargs)
@xray_recorder.capture()
def executemany(self, query, *args, **kwargs):
add_sql_meta(self._xray_meta)
return self.__wrapped__.executemany(query, *args, **kwargs)
@xray_recorder.capture()
def callproc(self, proc, args):
add_sql_meta(self._xray_meta)
return self.__wrapped__.callproc(proc, args)
def add_sql_meta(meta):
subsegment = xray_recorder.current_subsegment()
if not subsegment:
return
if meta.get('name', None):
subsegment.name = meta['name']
sql_meta = copy.copy(meta)
if sql_meta.get('name', None):
del sql_meta['name']
subsegment.set_sql(sql_meta)
subsegment.namespace = 'remote'
================================================
FILE: aws_xray_sdk/ext/django/__init__.py
================================================
default_app_config = 'aws_xray_sdk.ext.django.apps.XRayConfig'
================================================
FILE: aws_xray_sdk/ext/django/apps.py
================================================
import logging
from django.apps import AppConfig
from .conf import settings
from .db import patch_db
from .templates import patch_template
from aws_xray_sdk.core import patch, xray_recorder
from aws_xray_sdk.core.exceptions.exceptions import SegmentNameMissingException
log = logging.getLogger(__name__)
class XRayConfig(AppConfig):
name = 'aws_xray_sdk.ext.django'
def ready(self):
"""
Configure global XRay recorder based on django settings
under XRAY_RECORDER namespace.
This method could be called twice during server startup
because of base command and reload command.
So this function must be idempotent
"""
if not settings.AWS_XRAY_TRACING_NAME:
raise SegmentNameMissingException('Segment name is required.')
xray_recorder.configure(
daemon_address=settings.AWS_XRAY_DAEMON_ADDRESS,
sampling=settings.SAMPLING,
sampling_rules=settings.SAMPLING_RULES,
sampler=settings.SAMPLER,
context_missing=settings.AWS_XRAY_CONTEXT_MISSING,
plugins=settings.PLUGINS,
service=settings.AWS_XRAY_TRACING_NAME,
dynamic_naming=settings.DYNAMIC_NAMING,
streaming_threshold=settings.STREAMING_THRESHOLD,
max_trace_back=settings.MAX_TRACE_BACK,
stream_sql=settings.STREAM_SQL,
)
if settings.PATCH_MODULES:
if settings.AUTO_PATCH_PARENT_SEGMENT_NAME is not None:
with xray_recorder.in_segment(settings.AUTO_PATCH_PARENT_SEGMENT_NAME):
patch(settings.PATCH_MODULES, ignore_module_patterns=settings.IGNORE_MODULE_PATTERNS)
else:
patch(settings.PATCH_MODULES, ignore_module_patterns=settings.IGNORE_MODULE_PATTERNS)
# if turned on subsegment will be generated on
# built-in database and template rendering
if settings.AUTO_INSTRUMENT:
try:
patch_db()
except Exception:
log.debug('failed to patch Django built-in database')
try:
patch_template()
except Exception:
log.debug('failed to patch Django built-in template engine')
================================================
FILE: aws_xray_sdk/ext/django/conf.py
================================================
import os
from django.conf import settings as django_settings
from django.test.signals import setting_changed
DEFAULTS = {
'AWS_XRAY_DAEMON_ADDRESS': '127.0.0.1:2000',
'AUTO_INSTRUMENT': True,
'AWS_XRAY_CONTEXT_MISSING': 'LOG_ERROR',
'PLUGINS': (),
'SAMPLING': True,
'SAMPLING_RULES': None,
'SAMPLER': None,
'AWS_XRAY_TRACING_NAME': None,
'DYNAMIC_NAMING': None,
'STREAMING_THRESHOLD': None,
'MAX_TRACE_BACK': None,
'STREAM_SQL': True,
'PATCH_MODULES': [],
'AUTO_PATCH_PARENT_SEGMENT_NAME': None,
'IGNORE_MODULE_PATTERNS': [],
'URLS_AS_ANNOTATION': 'LAMBDA', # 3 valid values, NONE -> don't ever, LAMBDA -> only for AWS Lambdas, ALL -> every time
}
XRAY_NAMESPACE = 'XRAY_RECORDER'
SUPPORTED_ENV_VARS = ('AWS_XRAY_DAEMON_ADDRESS',
'AWS_XRAY_CONTEXT_MISSING',
'AWS_XRAY_TRACING_NAME',
)
class XRaySettings:
"""
A object of Django settings to easily modify certain fields.
The precedence for configurations at different places is as follows:
environment variables > user settings in settings.py > default settings
"""
def __init__(self, user_settings=None):
self.defaults = DEFAULTS
if user_settings:
self._user_settings = user_settings
@property
def user_settings(self):
if not hasattr(self, '_user_settings'):
self._user_settings = getattr(django_settings, XRAY_NAMESPACE, {})
return self._user_settings
def __getattr__(self, attr):
if attr not in self.defaults:
raise AttributeError('Invalid setting: %s' % attr)
if self.user_settings.get(attr, None) is not None:
if attr in SUPPORTED_ENV_VARS:
return os.getenv(attr, self.user_settings[attr])
else:
return self.user_settings[attr]
elif attr in SUPPORTED_ENV_VARS:
return os.getenv(attr, self.defaults[attr])
else:
return self.defaults[attr]
settings = XRaySettings()
def reload_settings(*args, **kwargs):
"""
Reload X-Ray user settings upon Django server hot restart
"""
global settings
setting, value = kwargs['setting'], kwargs['value']
if setting == XRAY_NAMESPACE:
settings = XRaySettings(value)
setting_changed.connect(reload_settings)
================================================
FILE: aws_xray_sdk/ext/django/db.py
================================================
import copy
import logging
import importlib
from django.db import connections
from aws_xray_sdk.core import xray_recorder
from aws_xray_sdk.ext.dbapi2 import XRayTracedCursor
log = logging.getLogger(__name__)
def patch_db():
for conn in connections.all():
module = importlib.import_module(conn.__module__)
_patch_conn(getattr(module, conn.__class__.__name__))
class DjangoXRayTracedCursor(XRayTracedCursor):
def execute(self, query, *args, **kwargs):
if xray_recorder.stream_sql:
_previous_meta = copy.copy(self._xray_meta)
self._xray_meta['sanitized_query'] = query
result = super().execute(query, *args, **kwargs)
if xray_recorder.stream_sql:
self._xray_meta = _previous_meta
return result
def executemany(self, query, *args, **kwargs):
if xray_recorder.stream_sql:
_previous_meta = copy.copy(self._xray_meta)
self._xray_meta['sanitized_query'] = query
result = super().executemany(query, *args, **kwargs)
if xray_recorder.stream_sql:
self._xray_meta = _previous_meta
return result
def callproc(self, proc, args):
if xray_recorder.stream_sql:
_previous_meta = copy.copy(self._xray_meta)
self._xray_meta['sanitized_query'] = proc
result = super().callproc(proc, args)
if xray_recorder.stream_sql:
self._xray_meta = _previous_meta
return result
def _patch_cursor(cursor_name, conn):
attr = '_xray_original_{}'.format(cursor_name)
if hasattr(conn, attr):
log.debug('django built-in db {} already patched'.format(cursor_name))
return
if not hasattr(conn, cursor_name):
log.debug('django built-in db does not have {}'.format(cursor_name))
return
setattr(conn, attr, getattr(conn, cursor_name))
meta = {}
if hasattr(conn, 'vendor'):
meta['database_type'] = conn.vendor
def cursor(self, *args, **kwargs):
host = None
user = None
if hasattr(self, 'settings_dict'):
settings = self.settings_dict
host = settings.get('HOST', None)
user = settings.get('USER', None)
if host:
meta['name'] = host
if user:
meta['user'] = user
original_cursor = getattr(self, attr)(*args, **kwargs)
return DjangoXRayTracedCursor(original_cursor, meta)
setattr(conn, cursor_name, cursor)
def _patch_conn(conn):
_patch_cursor('cursor', conn)
_patch_cursor('chunked_cursor', conn)
================================================
FILE: aws_xray_sdk/ext/django/middleware.py
================================================
import logging
from .conf import settings
from aws_xray_sdk.core import xray_recorder
from aws_xray_sdk.core.models import http
from aws_xray_sdk.core.utils import stacktrace
from aws_xray_sdk.ext.util import calculate_sampling_decision, \
calculate_segment_name, construct_xray_header, prepare_response_header
from aws_xray_sdk.core.lambda_launcher import check_in_lambda, LambdaContext
log = logging.getLogger(__name__)
# Django will rewrite some http request headers.
USER_AGENT_KEY = 'HTTP_USER_AGENT'
X_FORWARDED_KEY = 'HTTP_X_FORWARDED_FOR'
REMOTE_ADDR_KEY = 'REMOTE_ADDR'
HOST_KEY = 'HTTP_HOST'
CONTENT_LENGTH_KEY = 'content-length'
class XRayMiddleware:
"""
Middleware that wraps each incoming request to a segment.
"""
def __init__(self, get_response):
self.get_response = get_response
self.in_lambda_ctx = False
if check_in_lambda() and type(xray_recorder.context) == LambdaContext:
self.in_lambda_ctx = True
def _urls_as_annotation(self):
if settings.URLS_AS_ANNOTATION == "LAMBDA" and self.in_lambda_ctx:
return True
elif settings.URLS_AS_ANNOTATION == "ALL":
return True
return False
# hooks for django version >= 1.10
def __call__(self, request):
sampling_decision = None
meta = request.META
xray_header = construct_xray_header(meta)
# a segment name is required
name = calculate_segment_name(meta.get(HOST_KEY), xray_recorder)
sampling_req = {
'host': meta.get(HOST_KEY),
'method': request.method,
'path': request.path,
'service': name,
}
sampling_decision = calculate_sampling_decision(
trace_header=xray_header,
recorder=xray_recorder,
sampling_req=sampling_req,
)
if self.in_lambda_ctx:
segment = xray_recorder.begin_subsegment(name)
# X-Ray can't search/filter subsegments on URL but it can search annotations
# So for lambda to be able to filter by annotation we add these as annotations
else:
segment = xray_recorder.begin_segment(
name=name,
traceid=xray_header.root,
parent_id=xray_header.parent,
sampling=sampling_decision,
)
segment.save_origin_trace_header(xray_header)
segment.put_http_meta(http.URL, request.build_absolute_uri())
segment.put_http_meta(http.METHOD, request.method)
if self._urls_as_annotation():
segment.put_annotation(http.URL, request.build_absolute_uri())
segment.put_annotation(http.METHOD, request.method)
if meta.get(USER_AGENT_KEY):
segment.put_http_meta(http.USER_AGENT, meta.get(USER_AGENT_KEY))
if self._urls_as_annotation():
segment.put_annotation(http.USER_AGENT, meta.get(USER_AGENT_KEY))
if meta.get(X_FORWARDED_KEY):
# X_FORWARDED_FOR may come from untrusted source so we
# need to set the flag to true as additional information
segment.put_http_meta(http.CLIENT_IP, meta.get(X_FORWARDED_KEY))
segment.put_http_meta(http.X_FORWARDED_FOR, True)
if self._urls_as_annotation():
segment.put_annotation(http.CLIENT_IP, meta.get(X_FORWARDED_KEY))
segment.put_annotation(http.X_FORWARDED_FOR, True)
elif meta.get(REMOTE_ADDR_KEY):
segment.put_http_meta(http.CLIENT_IP, meta.get(REMOTE_ADDR_KEY))
if self._urls_as_annotation():
segment.put_annotation(http.CLIENT_IP, meta.get(REMOTE_ADDR_KEY))
response = self.get_response(request)
segment.put_http_meta(http.STATUS, response.status_code)
if self._urls_as_annotation():
segment.put_annotation(http.STATUS, response.status_code)
if response.has_header(CONTENT_LENGTH_KEY):
length = int(response[CONTENT_LENGTH_KEY])
segment.put_http_meta(http.CONTENT_LENGTH, length)
if self._urls_as_annotation():
segment.put_annotation(http.CONTENT_LENGTH, length)
response[http.XRAY_HEADER] = prepare_response_header(xray_header, segment)
if self.in_lambda_ctx:
xray_recorder.end_subsegment()
else:
xray_recorder.end_segment()
return response
def process_exception(self, request, exception):
"""
Add exception information and fault flag to the
current segment.
"""
if self.in_lambda_ctx:
segment = xray_recorder.current_subsegment()
else:
segment = xray_recorder.current_segment()
segment.put_http_meta(http.STATUS, 500)
stack = stacktrace.get_stacktrace(limit=xray_recorder._max_trace_back)
segment.add_exception(exception, stack)
================================================
FILE: aws_xray_sdk/ext/django/templates.py
================================================
import logging
from django.template import Template
from django.utils.safestring import SafeString
from aws_xray_sdk.core import xray_recorder
log = logging.getLogger(__name__)
def patch_template():
attr = '_xray_original_render'
if getattr(Template, attr, None):
log.debug("already patched")
return
setattr(Template, attr, Template.render)
@xray_recorder.capture('template_render')
def xray_render(self, context):
template_name = self.name or getattr(context, 'template_name', None)
if template_name:
name = str(template_name)
# SafeString are not properly serialized by jsonpickle,
# turn them back to str by adding a non-safe str.
if isinstance(name, SafeString):
name += ''
subsegment = xray_recorder.current_subsegment()
if subsegment:
subsegment.name = name
return Template._xray_original_render(self, context)
Template.render = xray_render
================================================
FILE: aws_xray_sdk/ext/flask/__init__.py
================================================
================================================
FILE: aws_xray_sdk/ext/flask/middleware.py
================================================
import flask.templating
from flask import request
from aws_xray_sdk.core.models import http
from aws_xray_sdk.core.utils import stacktrace
from aws_xray_sdk.ext.util import calculate_sampling_decision, \
calculate_segment_name, construct_xray_header, prepare_response_header
from aws_xray_sdk.core.lambda_launcher import check_in_lambda, LambdaContext
class XRayMiddleware:
def __init__(self, app, recorder):
self.app = app
self.app.logger.info("initializing xray middleware")
self._recorder = recorder
self.app.before_request(self._before_request)
self.app.after_request(self._after_request)
self.app.teardown_request(self._teardown_request)
self.in_lambda_ctx = False
if check_in_lambda() and type(self._recorder.context) == LambdaContext:
self.in_lambda_ctx = True
_patch_render(recorder)
def _before_request(self):
headers = request.headers
xray_header = construct_xray_header(headers)
req = request._get_current_object()
name = calculate_segment_name(req.host, self._recorder)
sampling_req = {
'host': req.host,
'method': req.method,
'path': req.path,
'service': name,
}
sampling_decision = calculate_sampling_decision(
trace_header=xray_header,
recorder=self._recorder,
sampling_req=sampling_req,
)
if self.in_lambda_ctx:
segment = self._recorder.begin_subsegment(name)
else:
segment = self._recorder.begin_segment(
name=name,
traceid=xray_header.root,
parent_id=xray_header.parent,
sampling=sampling_decision,
)
segment.save_origin_trace_header(xray_header)
segment.put_http_meta(http.URL, req.base_url)
segment.put_http_meta(http.METHOD, req.method)
segment.put_http_meta(http.USER_AGENT, headers.get('User-Agent'))
client_ip = headers.get('X-Forwarded-For') or headers.get('HTTP_X_FORWARDED_FOR')
if client_ip:
segment.put_http_meta(http.CLIENT_IP, client_ip)
segment.put_http_meta(http.X_FORWARDED_FOR, True)
else:
segment.put_http_meta(http.CLIENT_IP, req.remote_addr)
def _after_request(self, response):
if self.in_lambda_ctx:
segment = self._recorder.current_subsegment()
else:
segment = self._recorder.current_segment()
segment.put_http_meta(http.STATUS, response.status_code)
origin_header = segment.get_origin_trace_header()
resp_header_str = prepare_response_header(origin_header, segment)
response.headers[http.XRAY_HEADER] = resp_header_str
cont_len = response.headers.get('Content-Length')
if cont_len:
segment.put_http_meta(http.CONTENT_LENGTH, int(cont_len))
return response
def _teardown_request(self, exception):
segment = None
try:
if self.in_lambda_ctx:
segment = self._recorder.current_subsegment()
else:
segment = self._recorder.current_segment()
except Exception:
pass
if not segment:
return
if exception:
segment.put_http_meta(http.STATUS, 500)
stack = stacktrace.get_stacktrace(limit=self._recorder._max_trace_back)
segment.add_exception(exception, stack)
if self.in_lambda_ctx:
self._recorder.end_subsegment()
else:
self._recorder.end_segment()
def _patch_render(recorder):
_render = flask.templating._render
@recorder.capture('template_render')
def _traced_render(template, context, app):
if template.name:
recorder.current_subsegment().name = template.name
return _render(template, context, app)
flask.templating._render = _traced_render
================================================
FILE: aws_xray_sdk/ext/flask_sqlalchemy/__init__.py
================================================
================================================
FILE: aws_xray_sdk/ext/flask_sqlalchemy/query.py
================================================
from builtins import super
from flask_sqlalchemy.model import Model
from sqlalchemy.orm.session import sessionmaker
from flask_sqlalchemy import SQLAlchemy, BaseQuery, _SessionSignalEvents, get_state
from aws_xray_sdk.ext.sqlalchemy.query import XRaySession, XRayQuery
from aws_xray_sdk.ext.sqlalchemy.util.decorators import xray_on_call, decorate_all_functions
@decorate_all_functions(xray_on_call)
class XRayBaseQuery(BaseQuery):
BaseQuery.__bases__ = (XRayQuery,)
class XRaySignallingSession(XRaySession):
"""
.. versionadded:: 2.0
.. versionadded:: 2.1
The signalling session is the default session that Flask-SQLAlchemy
uses. It extends the default session system with bind selection and
modification tracking.
If you want to use a different session you can override the
:meth:`SQLAlchemy.create_session` function.
The `binds` option was added, which allows a session to be joined
to an external transaction.
"""
def __init__(self, db, autocommit=False, autoflush=True, **options):
#: The application that this session belongs to.
self.app = app = db.get_app()
track_modifications = app.config['SQLALCHEMY_TRACK_MODIFICATIONS']
bind = options.pop('bind', None) or db.engine
binds = options.pop('binds', db.get_binds(app))
if track_modifications is None or track_modifications:
_SessionSignalEvents.register(self)
XRaySession.__init__(
self, autocommit=autocommit, autoflush=autoflush,
bind=bind, binds=binds, **options
)
def get_bind(self, mapper=None, clause=None):
# mapper is None if someone tries to just get a connection
if mapper is not None:
info = getattr(mapper.mapped_table, 'info', {})
bind_key = info.get('bind_key')
if bind_key is not None:
state = get_state(self.app)
return state.db.get_engine(self.app, bind=bind_key)
return XRaySession.get_bind(self, mapper, clause)
class XRayFlaskSqlAlchemy(SQLAlchemy):
def __init__(self, app=None, use_native_unicode=True, session_options=None,
metadata=None, query_class=XRayBaseQuery, model_class=Model):
super().__init__(app, use_native_unicode, session_options,
metadata, query_class, model_class)
def create_session(self, options):
return sessionmaker(class_=XRaySignallingSession, db=self, **options)
================================================
FILE: aws_xray_sdk/ext/httplib/__init__.py
================================================
from .patch import patch, unpatch, add_ignored, reset_ignored
__all__ = ['patch', 'unpatch', 'add_ignored', 'reset_ignored']
================================================
FILE: aws_xray_sdk/ext/httplib/patch.py
================================================
import fnmatch
from collections import namedtuple
import urllib3.connection
import wrapt
from aws_xray_sdk.core import xray_recorder
from aws_xray_sdk.core.exceptions.exceptions import SegmentNotFoundException
from aws_xray_sdk.core.models import http
from aws_xray_sdk.core.patcher import _PATCHED_MODULES
from aws_xray_sdk.ext.util import get_hostname, inject_trace_header, strip_url, unwrap
httplib_client_module = 'http.client'
import http.client as httplib
_XRAY_PROP = '_xray_prop'
_XRay_Data = namedtuple('xray_data', ['method', 'host', 'url'])
_XRay_Ignore = namedtuple('xray_ignore', ['subclass', 'hostname', 'urls'])
# A flag indicates whether this module is X-Ray patched or not
PATCH_FLAG = '__xray_patched'
# Calls that should be ignored
_XRAY_IGNORE = set()
def add_ignored(subclass=None, hostname=None, urls=None):
global _XRAY_IGNORE
if subclass is not None or hostname is not None or urls is not None:
urls = urls if urls is None else tuple(urls)
_XRAY_IGNORE.add(_XRay_Ignore(subclass=subclass, hostname=hostname, urls=urls))
def reset_ignored():
global _XRAY_IGNORE
_XRAY_IGNORE.clear()
_ignored_add_default()
def _ignored_add_default():
# skip httplib tracing for SDK built-in centralized sampling pollers
add_ignored(subclass='botocore.awsrequest.AWSHTTPConnection', urls=['/GetSamplingRules', '/SamplingTargets'])
# make sure we have the default rules
_ignored_add_default()
def http_response_processor(wrapped, instance, args, kwargs, return_value,
exception, subsegment, stack):
xray_data = getattr(instance, _XRAY_PROP, None)
if not xray_data:
return
subsegment.put_http_meta(http.METHOD, xray_data.method)
subsegment.put_http_meta(http.URL, strip_url(xray_data.url))
if return_value:
subsegment.put_http_meta(http.STATUS, return_value.status)
# propagate to response object
xray_data = _XRay_Data('READ', xray_data.host, xray_data.url)
setattr(return_value, _XRAY_PROP, xray_data)
if exception:
subsegment.add_exception(exception, stack)
def _xray_traced_http_getresponse(wrapped, instance, args, kwargs):
xray_data = getattr(instance, _XRAY_PROP, None)
if not xray_data:
return wrapped(*args, **kwargs)
return xray_recorder.record_subsegment(
wrapped, instance, args, kwargs,
name=get_hostname(xray_data.url),
namespace='remote',
meta_processor=http_response_processor,
)
def http_send_request_processor(wrapped, instance, args, kwargs, return_value,
exception, subsegment, stack):
xray_data = getattr(instance, _XRAY_PROP, None)
if not xray_data:
return
# we don't delete the attr as we can have multiple reads
subsegment.put_http_meta(http.METHOD, xray_data.method)
subsegment.put_http_meta(http.URL, strip_url(xray_data.url))
if exception:
subsegment.add_exception(exception, stack)
def _ignore_request(instance, hostname, url):
global _XRAY_IGNORE
module = instance.__class__.__module__
if module is None or module == str.__class__.__module__:
subclass = instance.__class__.__name__
else:
subclass = module + '.' + instance.__class__.__name__
for rule in _XRAY_IGNORE:
subclass_match = subclass == rule.subclass if rule.subclass is not None else True
host_match = fnmatch.fnmatch(hostname, rule.hostname) if rule.hostname is not None else True
url_match = url in rule.urls if rule.urls is not None else True
if url_match and host_match and subclass_match:
return True
return False
def _send_request(wrapped, instance, args, kwargs):
def decompose_args(method, url, body, headers, encode_chunked=False):
# skip any ignored requests
if _ignore_request(instance, instance.host, url):
return wrapped(*args, **kwargs)
# Only injects headers when the subsegment for the outgoing
# calls are opened successfully.
subsegment = None
try:
subsegment = xray_recorder.current_subsegment()
except SegmentNotFoundException:
pass
if subsegment:
inject_trace_header(headers, subsegment)
if issubclass(instance.__class__, urllib3.connection.HTTPSConnection):
ssl_cxt = getattr(instance, 'ssl_context', None)
elif issubclass(instance.__class__, httplib.HTTPSConnection):
ssl_cxt = getattr(instance, '_context', None)
else:
# In this case, the patcher can't determine which module the connection instance is from.
# We default to it to check ssl_context but may be None so that the default scheme would be
# (and may falsely be) http.
ssl_cxt = getattr(instance, 'ssl_context', None)
scheme = 'https' if ssl_cxt and type(ssl_cxt).__name__ == 'SSLContext' else 'http'
xray_url = '{}://{}{}'.format(scheme, instance.host, url)
xray_data = _XRay_Data(method, instance.host, xray_url)
setattr(instance, _XRAY_PROP, xray_data)
# we add a segment here in case connect fails
return xray_recorder.record_subsegment(
wrapped, instance, args, kwargs,
name=get_hostname(xray_data.url),
namespace='remote',
meta_processor=http_send_request_processor
)
return decompose_args(*args, **kwargs)
def http_read_processor(wrapped, instance, args, kwargs, return_value,
exception, subsegment, stack):
xray_data = getattr(instance, _XRAY_PROP, None)
if not xray_data:
return
# we don't delete the attr as we can have multiple reads
subsegment.put_http_meta(http.METHOD, xray_data.method)
subsegment.put_http_meta(http.URL, strip_url(xray_data.url))
subsegment.put_http_meta(http.STATUS, instance.status)
if exception:
subsegment.add_exception(exception, stack)
def _xray_traced_http_client_read(wrapped, instance, args, kwargs):
xray_data = getattr(instance, _XRAY_PROP, None)
if not xray_data:
return wrapped(*args, **kwargs)
return xray_recorder.record_subsegment(
wrapped, instance, args, kwargs,
name=get_hostname(xray_data.url),
namespace='remote',
meta_processor=http_read_processor
)
def patch():
"""
patch the built-in `urllib/httplib/httplib.client` methods for tracing.
"""
if getattr(httplib, PATCH_FLAG, False):
return
# we set an attribute to avoid multiple wrapping
setattr(httplib, PATCH_FLAG, True)
wrapt.wrap_function_wrapper(
httplib_client_module,
'HTTPConnection._send_request',
_send_request
)
wrapt.wrap_function_wrapper(
httplib_client_module,
'HTTPConnection.getresponse',
_xray_traced_http_getresponse
)
wrapt.wrap_function_wrapper(
httplib_client_module,
'HTTPResponse.read',
_xray_traced_http_client_read
)
def unpatch():
"""
Unpatch any previously patched modules.
This operation is idempotent.
"""
_PATCHED_MODULES.discard('httplib')
setattr(httplib, PATCH_FLAG, False)
# _send_request encapsulates putrequest, putheader[s], and endheaders
unwrap(httplib.HTTPConnection, '_send_request')
unwrap(httplib.HTTPConnection, 'getresponse')
unwrap(httplib.HTTPResponse, 'read')
================================================
FILE: aws_xray_sdk/ext/httpx/__init__.py
================================================
from .patch import patch
__all__ = ['patch']
================================================
FILE: aws_xray_sdk/ext/httpx/patch.py
================================================
import httpx
from aws_xray_sdk.core import xray_recorder
from aws_xray_sdk.core.models import http
from aws_xray_sdk.ext.util import inject_trace_header, get_hostname
def patch():
httpx.Client = _InstrumentedClient
httpx.AsyncClient = _InstrumentedAsyncClient
httpx._api.Client = _InstrumentedClient
class _InstrumentedClient(httpx.Client):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._original_transport = self._transport
self._transport = SyncInstrumentedTransport(self._transport)
class _InstrumentedAsyncClient(httpx.AsyncClient):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._original_transport = self._transport
self._transport = AsyncInstrumentedTransport(self._transport)
class SyncInstrumentedTransport(httpx.BaseTransport):
def __init__(self, transport: httpx.BaseTransport):
self._wrapped_transport = transport
def handle_request(self, request: httpx.Request) -> httpx.Response:
with xray_recorder.in_subsegment(
get_hostname(str(request.url)), namespace="remote"
) as subsegment:
if subsegment is not None:
subsegment.put_http_meta(http.METHOD, request.method)
subsegment.put_http_meta(
http.URL,
str(request.url.copy_with(password=None, query=None, fragment=None)),
)
inject_trace_header(request.headers, subsegment)
response = self._wrapped_transport.handle_request(request)
if subsegment is not None:
subsegment.put_http_meta(http.STATUS, response.status_code)
return response
class AsyncInstrumentedTransport(httpx.AsyncBaseTransport):
def __init__(self, transport: httpx.AsyncBaseTransport):
self._wrapped_transport = transport
async def handle_async_request(self, request: httpx.Request) -> httpx.Response:
async with xray_recorder.in_subsegment_async(
get_hostname(str(request.url)), namespace="remote"
) as subsegment:
if subsegment is not None:
subsegment.put_http_meta(http.METHOD, request.method)
subsegment.put_http_meta(
http.URL,
str(request.url.copy_with(password=None, query=None, fragment=None)),
)
inject_trace_header(request.headers, subsegment)
response = await self._wrapped_transport.handle_async_request(request)
if subsegment is not None:
subsegment.put_http_meta(http.STATUS, response.status_code)
return response
================================================
FILE: aws_xray_sdk/ext/mysql/__init__.py
================================================
from .patch import patch
__all__ = ['patch']
================================================
FILE: aws_xray_sdk/ext/mysql/patch.py
================================================
import wrapt
import mysql.connector
from aws_xray_sdk.ext.dbapi2 import XRayTracedConn
MYSQL_ATTR = {
'_host': 'name',
'_user': 'user',
}
def patch():
wrapt.wrap_function_wrapper(
'mysql.connector',
'connect',
_xray_traced_connect
)
# patch alias
if hasattr(mysql.connector, 'Connect'):
mysql.connector.Connect = mysql.connector.connect
def _xray_traced_connect(wrapped, instance, args, kwargs):
conn = wrapped(*args, **kwargs)
meta = {}
for attr, key in MYSQL_ATTR.items():
if hasattr(conn, attr):
meta[key] = getattr(conn, attr)
if hasattr(conn, '_server_version'):
version = sanitize_db_ver(getattr(conn, '_server_version'))
if version:
meta['database_version'] = version
return XRayTracedConn(conn, meta)
def sanitize_db_ver(raw):
if not raw or not isinstance(raw, tuple):
return raw
return '.'.join(str(num) for num in raw)
================================================
FILE: aws_xray_sdk/ext/pg8000/README.md
================================================
## Requirements
Only compatible with `pg8000 <= 1.20.0`.
================================================
FILE: aws_xray_sdk/ext/pg8000/__init__.py
================================================
from .patch import patch, unpatch
__all__ = ['patch', 'unpatch']
================================================
FILE: aws_xray_sdk/ext/pg8000/patch.py
================================================
import pg8000
import wrapt
from aws_xray_sdk.ext.dbapi2 import XRayTracedConn
from aws_xray_sdk.core.patcher import _PATCHED_MODULES
from aws_xray_sdk.ext.util import unwrap
def patch():
wrapt.wrap_function_wrapper(
'pg8000',
'connect',
_xray_traced_connect
)
def _xray_traced_connect(wrapped, instance, args, kwargs):
conn = wrapped(*args, **kwargs)
meta = {
'database_type': 'PostgreSQL',
'user': conn.user.decode('utf-8'),
'driver_version': 'Pg8000'
}
if hasattr(conn, '_server_version'):
version = getattr(conn, '_server_version')
if version:
meta['database_version'] = str(version)
return XRayTracedConn(conn, meta)
def unpatch():
"""
Unpatch any previously patched modules.
This operation is idempotent.
"""
_PATCHED_MODULES.discard('pg8000')
unwrap(pg8000, 'connect')
================================================
FILE: aws_xray_sdk/ext/psycopg/__init__.py
================================================
from .patch import patch
__all__ = ['patch']
================================================
FILE: aws_xray_sdk/ext/psycopg/patch.py
================================================
import wrapt
from operator import methodcaller
from aws_xray_sdk.ext.dbapi2 import XRayTracedConn
def patch():
wrapt.wrap_function_wrapper(
'psycopg',
'connect',
_xray_traced_connect
)
wrapt.wrap_function_wrapper(
'psycopg_pool.pool',
'ConnectionPool._connect',
_xray_traced_connect
)
def _xray_traced_connect(wrapped, instance, args, kwargs):
conn = wrapped(*args, **kwargs)
parameterized_dsn = {c[0]: c[-1] for c in map(methodcaller('split', '='), conn.info.dsn.split(' '))}
meta = {
'database_type': 'PostgreSQL',
'url': 'postgresql://{}@{}:{}/{}'.format(
parameterized_dsn.get('user', 'unknown'),
parameterized_dsn.get('host', 'unknown'),
parameterized_dsn.get('port', 'unknown'),
parameterized_dsn.get('dbname', 'unknown'),
),
'user': parameterized_dsn.get('user', 'unknown'),
'database_version': str(conn.info.server_version),
'driver_version': 'Psycopg 3'
}
return XRayTracedConn(conn, meta)
================================================
FILE: aws_xray_sdk/ext/psycopg2/__init__.py
================================================
from .patch import patch
__all__ = ['patch']
================================================
FILE: aws_xray_sdk/ext/psycopg2/patch.py
================================================
import copy
import re
import wrapt
from operator import methodcaller
from aws_xray_sdk.ext.dbapi2 import XRayTracedConn, XRayTracedCursor
def patch():
wrapt.wrap_function_wrapper(
'psycopg2',
'connect',
_xray_traced_connect
)
wrapt.wrap_function_wrapper(
'psycopg2.extensions',
'register_type',
_xray_register_type_fix
)
wrapt.wrap_function_wrapper(
'psycopg2.extensions',
'quote_ident',
_xray_register_type_fix
)
wrapt.wrap_function_wrapper(
'psycopg2.extras',
'register_default_jsonb',
_xray_register_default_jsonb_fix
)
def _xray_traced_connect(wrapped, instance, args, kwargs):
conn = wrapped(*args, **kwargs)
parameterized_dsn = {c[0]: c[-1] for c in map(methodcaller('split', '='), conn.dsn.split(' '))}
meta = {
'database_type': 'PostgreSQL',
'url': 'postgresql://{}@{}:{}/{}'.format(
parameterized_dsn.get('user', 'unknown'),
parameterized_dsn.get('host', 'unknown'),
parameterized_dsn.get('port', 'unknown'),
parameterized_dsn.get('dbname', 'unknown'),
),
'user': parameterized_dsn.get('user', 'unknown'),
'database_version': str(conn.server_version),
'driver_version': 'Psycopg 2'
}
return XRayTracedConn(conn, meta)
def _xray_register_type_fix(wrapped, instance, args, kwargs):
"""Send the actual connection or curser to register type."""
our_args = list(copy.copy(args))
if len(our_args) == 2 and isinstance(our_args[1], (XRayTracedConn, XRayTracedCursor)):
our_args[1] = our_args[1].__wrapped__
return wrapped(*our_args, **kwargs)
def _xray_register_default_jsonb_fix(wrapped, instance, args, kwargs):
our_kwargs = dict()
for key, value in kwargs.items():
if key == "conn_or_curs" and isinstance(value, (XRayTracedConn, XRayTracedCursor)):
# unwrap the connection or cursor to be sent to register_default_jsonb
value = value.__wrapped__
our_kwargs[key] = value
return wrapped(*args, **our_kwargs)
================================================
FILE: aws_xray_sdk/ext/pymongo/__init__.py
================================================
# Copyright © 2018 Clarity Movement Co. All rights reserved.
from .patch import patch
__all__ = ['patch']
================================================
FILE: aws_xray_sdk/ext/pymongo/patch.py
================================================
# Copyright © 2018 Clarity Movement Co. All rights reserved.
from pymongo import monitoring
from aws_xray_sdk.core import xray_recorder
class XrayCommandListener(monitoring.CommandListener):
"""
A listener that traces all pymongo db commands to AWS Xray.
Creates a subsegment for each mongo db conmmand.
name: 'mydb@127.0.0.1:27017'
records all available information provided by pymongo,
except for `command` and `reply`. They may contain business secrets.
If you insist to record them, specify `record_full_documents=True`.
"""
def __init__(self, record_full_documents):
super().__init__()
self.record_full_documents = record_full_documents
def started(self, event):
host, port = event.connection_id
host_and_port_str = f'{host}:{port}'
subsegment = xray_recorder.begin_subsegment(
f'{event.database_name}@{host_and_port_str}', 'remote')
subsegment.put_annotation('mongodb_command_name', event.command_name)
subsegment.put_annotation('mongodb_connection_id', host_and_port_str)
subsegment.put_annotation('mongodb_database_name', event.database_name)
subsegment.put_annotation('mongodb_operation_id', event.operation_id)
subsegment.put_annotation('mongodb_request_id', event.request_id)
if self.record_full_documents:
subsegment.put_metadata('mongodb_command', event.command)
def succeeded(self, event):
subsegment = xray_recorder.current_subsegment()
subsegment.put_annotation('mongodb_duration_micros', event.duration_micros)
if self.record_full_documents:
subsegment.put_metadata('mongodb_reply', event.reply)
xray_recorder.end_subsegment()
def failed(self, event):
subsegment = xray_recorder.current_subsegment()
subsegment.add_fault_flag()
subsegment.put_annotation('mongodb_duration_micros', event.duration_micros)
subsegment.put_metadata('failure', event.failure)
xray_recorder.end_subsegment()
def patch(record_full_documents=False):
# ensure `patch()` is idempotent
if hasattr(monitoring, '_xray_enabled'):
return
setattr(monitoring, '_xray_enabled', True)
monitoring.register(XrayCommandListener(record_full_documents))
================================================
FILE: aws_xray_sdk/ext/pymysql/__init__.py
================================================
from .patch import patch, unpatch
__all__ = ['patch', 'unpatch']
================================================
FILE: aws_xray_sdk/ext/pymysql/patch.py
================================================
import pymysql
import wrapt
from aws_xray_sdk.ext.dbapi2 import XRayTracedConn
from aws_xray_sdk.core.patcher import _PATCHED_MODULES
from aws_xray_sdk.ext.util import unwrap
def patch():
wrapt.wrap_function_wrapper(
'pymysql',
'connect',
_xray_traced_connect
)
# patch alias
if hasattr(pymysql, 'Connect'):
pymysql.Connect = pymysql.connect
def _xray_traced_connect(wrapped, instance, args, kwargs):
conn = wrapped(*args, **kwargs)
meta = {
'database_type': 'MySQL',
'user': conn.user.decode('utf-8'),
'driver_version': 'PyMySQL'
}
if hasattr(conn, 'server_version'):
version = sanitize_db_ver(getattr(conn, 'server_version'))
if version:
meta['database_version'] = version
return XRayTracedConn(conn, meta)
def sanitize_db_ver(raw):
if not raw or not isinstance(raw, tuple):
return raw
return '.'.join(str(num) for num in raw)
def unpatch():
"""
Unpatch any previously patched modules.
This operation is idempotent.
"""
_PATCHED_MODULES.discard('pymysql')
unwrap(pymysql, 'connect')
================================================
FILE: aws_xray_sdk/ext/pynamodb/__init__.py
================================================
from .patch import patch
__all__ = ['patch']
================================================
FILE: aws_xray_sdk/ext/pynamodb/patch.py
================================================
import json
import wrapt
import pynamodb
from aws_xray_sdk.core import xray_recorder
from aws_xray_sdk.core.models import http
from aws_xray_sdk.ext.boto_utils import _extract_whitelisted_params
PYNAMODB4 = int(pynamodb.__version__.split('.')[0]) >= 4
if PYNAMODB4:
import botocore.httpsession
else:
import botocore.vendored.requests.sessions
def patch():
"""Patch PynamoDB so it generates subsegements when calling DynamoDB."""
if PYNAMODB4:
if hasattr(botocore.httpsession, '_xray_enabled'):
return
setattr(botocore.httpsession, '_xray_enabled', True)
module = 'botocore.httpsession'
name = 'URLLib3Session.send'
else:
if hasattr(botocore.vendored.requests.sessions, '_xray_enabled'):
return
setattr(botocore.vendored.requests.sessions, '_xray_enabled', True)
module = 'botocore.vendored.requests.sessions'
name = 'Session.send'
wrapt.wrap_function_wrapper(
module, name, _xray_traced_pynamodb,
)
def _xray_traced_pynamodb(wrapped, instance, args, kwargs):
# Check if it's a request to DynamoDB and return otherwise.
try:
service = args[0].headers['X-Amz-Target'].decode('utf-8').split('_')[0]
except KeyError:
return wrapped(*args, **kwargs)
if service.lower() != 'dynamodb':
return wrapped(*args, **kwargs)
return xray_recorder.record_subsegment(
wrapped, instance, args, kwargs,
name='dynamodb',
namespace='aws',
meta_processor=pynamodb_meta_processor,
)
def pynamodb_meta_processor(wrapped, instance, args, kwargs, return_value,
exception, subsegment, stack):
operation_name = args[0].headers['X-Amz-Target'].decode('utf-8').split('.')[1]
region = args[0].url.split('.')[1]
aws_meta = {
'operation': operation_name,
'region': region
}
# in case of client timeout the return value will be empty
if return_value is not None:
aws_meta['request_id'] = return_value.headers.get('x-amzn-RequestId')
subsegment.put_http_meta(http.STATUS, return_value.status_code)
if exception:
subsegment.add_error_flag()
subsegment.add_exception(exception, stack, True)
if PYNAMODB4:
resp = json.loads(return_value.text) if return_value else None
else:
resp = return_value.json() if return_value else None
_extract_whitelisted_params(subsegment.name, operation_name, aws_meta,
[None, json.loads(args[0].body.decode('utf-8'))],
None, resp)
subsegment.set_aws(aws_meta)
================================================
FILE: aws_xray_sdk/ext/requests/__init__.py
================================================
from .patch import patch
__all__ = ['patch']
================================================
FILE: aws_xray_sdk/ext/requests/patch.py
================================================
import wrapt
from aws_xray_sdk.core import xray_recorder
from aws_xray_sdk.core.models import http
from aws_xray_sdk.ext.util import inject_trace_header, strip_url, get_hostname
def patch():
wrapt.wrap_function_wrapper(
'requests',
'Session.request',
_xray_traced_requests
)
wrapt.wrap_function_wrapper(
'requests',
'Session.prepare_request',
_inject_header
)
def _xray_traced_requests(wrapped, instance, args, kwargs):
url = kwargs.get('url') or args[1]
return xray_recorder.record_subsegment(
wrapped, instance, args, kwargs,
name=get_hostname(url),
namespace='remote',
meta_processor=requests_processor,
)
def _inject_header(wrapped, instance, args, kwargs):
request = args[0]
headers = getattr(request, 'headers', {})
inject_trace_header(headers, xray_recorder.current_subsegment())
setattr(request, 'headers', headers)
return wrapped(*args, **kwargs)
def requests_processor(wrapped, instance, args, kwargs,
return_value, exception, subsegment, stack):
method = kwargs.get('method') or args[0]
url = kwargs.get('url') or args[1]
subsegment.put_http_meta(http.METHOD, method)
subsegment.put_http_meta(http.URL, strip_url(url))
if return_value is not None:
subsegment.put_http_meta(http.STATUS, return_value.status_code)
elif exception:
subsegment.add_exception(exception, stack)
================================================
FILE: aws_xray_sdk/ext/resources/aws_para_whitelist.json
================================================
{
"services": {
"sns": {
"operations": {
"Publish": {
"request_parameters": [
"TopicArn"
]
},
"PublishBatch": {
"request_parameters": [
"TopicArn"
]
}
}
},
"dynamodb": {
"operations": {
"BatchGetItem": {
"request_descriptors": {
"RequestItems": {
"map": true,
"get_keys": true,
"rename_to": "table_names"
}
},
"response_parameters": [
"ConsumedCapacity"
]
},
"BatchWriteItem": {
"request_descriptors": {
"RequestItems": {
"map": true,
"get_keys": true,
"rename_to": "table_names"
}
},
"response_parameters": [
"ConsumedCapacity",
"ItemCollectionMetrics"
]
},
"CreateTable": {
"request_parameters": [
"GlobalSecondaryIndexes",
"LocalSecondaryIndexes",
"ProvisionedThroughput",
"TableName"
]
},
"DeleteItem": {
"request_parameters": [
"TableName"
],
"response_parameters": [
"ConsumedCapacity",
"ItemCollectionMetrics"
]
},
"DeleteTable": {
"request_parameters": [
"TableName"
]
},
"DescribeTable": {
"request_parameters": [
"TableName"
]
},
"GetItem": {
"request_parameters": [
"ConsistentRead",
"ProjectionExpression",
"TableName"
],
"response_parameters": [
"ConsumedCapacity"
]
},
"ListTables": {
"request_parameters": [
"ExclusiveStartTableName",
"Limit"
],
"response_descriptors": {
"TableNames": {
"list": true,
"get_count": true,
"rename_to": "table_count"
}
}
},
"PutItem": {
"request_parameters": [
"TableName"
],
"response_parameters": [
"ConsumedCapacity",
"ItemCollectionMetrics"
]
},
"Query": {
"request_parameters": [
"AttributesToGet",
"ConsistentRead",
"IndexName",
"Limit",
"ProjectionExpression",
"ScanIndexForward",
"Select",
"TableName"
],
"response_parameters": [
"ConsumedCapacity"
]
},
"Scan": {
"request_parameters": [
"AttributesToGet",
"ConsistentRead",
"IndexName",
"Limit",
"ProjectionExpression",
"Segment",
"Select",
"TableName",
"TotalSegments"
],
"response_parameters": [
"ConsumedCapacity",
"Count",
"ScannedCount"
]
},
"UpdateItem": {
"request_parameters": [
"TableName"
],
"response_parameters": [
"ConsumedCapacity",
"ItemCollectionMetrics"
]
},
"UpdateTable": {
"request_parameters": [
"AttributeDefinitions",
"GlobalSecondaryIndexUpdates",
"ProvisionedThroughput",
"TableName"
]
}
}
},
"sqs": {
"operations": {
"AddPermission": {
"request_parameters": [
"Label",
"QueueUrl"
]
},
"ChangeMessageVisibility": {
"request_parameters": [
"QueueUrl",
"VisibilityTimeout"
]
},
"ChangeMessageVisibilityBatch": {
"request_parameters": [
"QueueUrl"
],
"response_parameters": [
"Failed"
]
},
"CreateQueue": {
"request_parameters": [
"Attributes",
"QueueName"
]
},
"DeleteMessage": {
"request_parameters": [
"QueueUrl"
]
},
"DeleteMessageBatch": {
"request_parameters": [
"QueueUrl"
],
"response_parameters": [
"Failed"
]
},
"DeleteQueue": {
"request_parameters": [
"QueueUrl"
]
},
"GetQueueAttributes": {
"request_parameters": [
"QueueUrl"
],
"response_parameters": [
"Attributes"
]
},
"GetQueueUrl": {
"request_parameters": [
"QueueName",
"QueueOwnerAWSAccountId"
],
"response_parameters": [
"QueueUrl"
]
},
"ListDeadLetterSourceQueues": {
"request_parameters": [
"QueueUrl"
],
"response_parameters": [
"QueueUrls"
]
},
"ListQueues": {
"request_parameters": [
"QueueNamePrefix"
],
"response_descriptors": {
"QueueUrls": {
"list": true,
"get_count": true,
"rename_to": "queue_count"
}
}
},
"PurgeQueue": {
"request_parameters": [
"QueueUrl"
]
},
"ReceiveMessage": {
"request_parameters": [
"AttributeNames",
"MaxNumberOfMessages",
"MessageAttributeNames",
"QueueUrl",
"VisibilityTimeout",
"WaitTimeSeconds"
],
"response_descriptors": {
"Messages": {
"list": true,
"get_count": true,
"rename_to": "message_count"
}
}
},
"RemovePermission": {
"request_parameters": [
"QueueUrl"
]
},
"SendMessage": {
"request_parameters": [
"DelaySeconds",
"QueueUrl"
],
"request_descriptors": {
"MessageAttributes": {
"map": true,
"get_keys": true,
"rename_to": "message_attribute_names"
}
},
"response_parameters": [
"MessageId"
]
},
"SendMessageBatch": {
"request_parameters": [
"QueueUrl"
],
"request_descriptors": {
"Entries": {
"list": true,
"get_count": true,
"rename_to": "message_count"
}
},
"response_descriptors": {
"Failed": {
"list": true,
"get_count": true,
"rename_to": "failed_count"
},
"Successful": {
"list": true,
"get_count": true,
"rename_to": "successful_count"
}
}
},
"SetQueueAttributes": {
"request_parameters": [
"QueueUrl"
],
"request_descriptors": {
"Attributes": {
"map": true,
"get_keys": true,
"rename_to": "attribute_names"
}
}
}
}
},
"lambda": {
"operations": {
"Invoke": {
"request_parameters": [
"FunctionName",
"InvocationType",
"LogType",
"Qualifier"
],
"response_parameters": [
"FunctionError",
"StatusCode"
]
},
"InvokeAsync": {
"request_parameters": [
"FunctionName"
],
"response_parameters": [
"Status"
]
}
}
},
"s3": {
"operations": {
"CopyObject": {
"request_parameters": [
"CopySource",
"Bucket",
"Key"
]
},
"GetObject": {
"request_parameters": [
"Key",
"VersionId"
],
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"PutObject": {
"request_parameters": [
"Key"
],
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"GetObjectAcl": {
"request_parameters": [
"Key",
"VersionId"
],
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"CreateBucket": {
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"ListObjectsV2": {
"request_parameters": [
"Prefix"
],
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"ListObjects": {
"request_parameters": [
"Prefix"
],
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"GetObjectTagging": {
"request_parameters": [
"Key",
"VersionId"
],
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"PutObjectTagging": {
"request_parameters": [
"Key",
"VersionId"
],
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"ListVersions": {
"request_parameters": [
"Prefix"
],
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"SetObjectAcl": {
"request_parameters": [
"Key",
"VersionId"
],
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"GetBucketAcl": {
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"PutBucketAcl": {
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"HeadBucket": {
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"UploadPart": {
"request_parameters": [
"Key"
],
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"DeleteObject": {
"request_parameters": [
"Key"
],
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"DeleteBucket": {
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"DeleteObjects": {
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"DeleteVersion": {
"request_parameters": [
"Key",
"VersionId"
],
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"GetBucketPolicy": {
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"PutBucketPolicy": {
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"ListParts": {
"request_parameters": [
"Key"
],
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"RestoreObject": {
"request_parameters": [
"Key",
"VersionId"
],
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"RestoreObjectV2": {
"request_parameters": [
"Key",
"VersionId"
],
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"PutBucketNotificationConfiguration": {
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"DeleteBucketLifecycleConfiguration": {
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"GetBucketNotificationConfiguration": {
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"DeleteBucketCors": {
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"PutBucketCors": {
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"GetBucketCors": {
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"ListBucketInventoryConfigurations": {
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"GetBucketReplicationConfiguration": {
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"PutBucketReplicationConfiguration": {
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"DeleteBucketReplicationConfiguration": {
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"DeleteBucketAnalyticsConfiguration": {
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"DeleteBucketInventoryConfiguration": {
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"ListBucketAnalyticsConfigurations": {
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"DeleteObjectTagging": {
"request_parameters": [
"Key",
"VersionId"
],
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"PutBucketVersioning": {
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"GetBucketVersioning": {
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"GetBucketWebsite": {
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"GetBucketLifecycleConfiguration": {
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"SetBucketLifecycleConfiguration": {
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"GetBucketTagging": {
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"PutBucketTagging": {
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"GetBucketLocation": {
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"GetBucketLogging": {
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"ListMultipartUploads": {
"request_parameters": [
"Prefix"
],
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"DeleteBucketPolicy": {
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"DeleteBucketEncryption": {
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"PutBucketAccelerateConfiguration": {
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"PutBucketWebsite": {
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"CompleteMultipartUpload": {
"request_parameters": [
"Key"
],
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"InitiateMultipartUpload": {
"request_parameters": [
"Key"
],
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"PutBucketEncryption": {
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"SetBucketLogging": {
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"DeleteBucketWebsite": {
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"GetBucketEncryption": {
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"AbortMultipartUpload": {
"request_parameters": [
"Key"
],
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"GeneratePresignedUrl": {
"request_parameters": [
"Key",
"VersionId"
],
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"DeleteBucketTagging": {
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"GetBucketAccelerateConfiguration": {
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"GetBucketMetricsConfiguration": {
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"ListBucketMetricsConfigurations": {
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"PutBucketInventoryConfiguration": {
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"PutBucketMetricsConfiguration": {
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"PutBucketAnalyticsConfiguration": {
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"DeleteBucketMetricsConfiguration": {
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"GetBucketAnalyticsConfiguration": {
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
},
"GetBucketInventoryConfiguration": {
"request_descriptors": {
"Bucket": {
"rename_to": "bucket_name"
}
}
}
}
},
"runtime.sagemaker": {
"operations": {
"InvokeEndpoint": {
"request_parameters": [
"EndpointName"
]
}
}
}
}
}
================================================
FILE: aws_xray_sdk/ext/sqlalchemy/__init__.py
================================================
================================================
FILE: aws_xray_sdk/ext/sqlalchemy/query.py
================================================
from builtins import super
from sqlalchemy.orm.query import Query
from sqlalchemy.orm.session import Session, sessionmaker
from .util.decorators import xray_on_call, decorate_all_functions
@decorate_all_functions(xray_on_call)
class XRaySession(Session):
pass
@decorate_all_functions(xray_on_call)
class XRayQuery(Query):
pass
@decorate_all_functions(xray_on_call)
class XRaySessionMaker(sessionmaker):
def __init__(self, bind=None, class_=XRaySession, autoflush=True,
autocommit=False,
expire_on_commit=True,
info=None, **kw):
kw['query_cls'] = XRayQuery
super().__init__(bind, class_, autoflush, autocommit, expire_on_commit,
info, **kw)
================================================
FILE: aws_xray_sdk/ext/sqlalchemy/util/__init__.py
================================================
================================================
FILE: aws_xray_sdk/ext/sqlalchemy/util/decorators.py
================================================
import re
import types
from urllib.parse import urlparse, uses_netloc
from sqlalchemy.engine.base import Connection
from aws_xray_sdk.core import xray_recorder
from aws_xray_sdk.ext.util import strip_url
def decorate_all_functions(function_decorator):
def decorator(cls):
for c in cls.__bases__:
for name, obj in vars(c).items():
if name.startswith("_"):
continue
if isinstance(obj, types.FunctionType):
try:
obj = obj.__func__ # unwrap Python 2 unbound method
except AttributeError:
pass # not needed in Python 3
setattr(c, name, function_decorator(c, obj))
return cls
return decorator
def xray_on_call(cls, func):
def wrapper(*args, **kw):
from ..query import XRayQuery, XRaySession
try:
from ...flask_sqlalchemy.query import XRaySignallingSession
has_sql_alchemy = True
except ImportError:
has_sql_alchemy = False
class_name = str(cls.__module__)
c = xray_recorder._context
sql = None
subsegment = None
if class_name == "sqlalchemy.orm.session":
for arg in args:
if isinstance(arg, XRaySession):
sql = parse_bind(arg.bind)
if has_sql_alchemy and isinstance(arg, XRaySignallingSession):
sql = parse_bind(arg.bind)
if class_name == 'sqlalchemy.orm.query':
for arg in args:
if isinstance(arg, XRayQuery):
try:
sql = parse_bind(arg.session.bind)
if xray_recorder.stream_sql:
sql['sanitized_query'] = str(arg)
except Exception:
sql = None
if sql is not None:
if getattr(c._local, 'entities', None) is not None:
# Strip URL of ? and following text
sub_name = strip_url(sql['url'])
subsegment = xray_recorder.begin_subsegment(sub_name, namespace='remote')
else:
subsegment = None
try:
res = func(*args, **kw)
finally:
if subsegment is not None:
subsegment.set_sql(sql)
subsegment.put_annotation("sqlalchemy", class_name+'.'+func.__name__)
xray_recorder.end_subsegment()
return res
return wrapper
# URL Parse output
# scheme 0 URL scheme specifier scheme parameter
# netloc 1 Network location part empty string
# path 2 Hierarchical path empty string
# query 3 Query component empty string
# fragment 4 Fragment identifier empty string
# username User name None
# password Password None
# hostname Host name (lower case) None
# port Port number as integer, if present None
#
# XRAY Trace SQL metaData Sample
# "sql" : {
# "url": "jdbc:postgresql://aawijb5u25wdoy.cpamxznpdoq8.us-west-2.rds.amazonaws.com:5432/ebdb",
# "preparation": "statement",
# "database_type": "PostgreSQL",
# "database_version": "9.5.4",
# "driver_version": "PostgreSQL 9.4.1211.jre7",
# "user" : "dbuser",
# "sanitized_query" : "SELECT * FROM customers WHERE customer_id=?;"
# }
def parse_bind(bind):
"""Parses a connection string and creates SQL trace metadata"""
if isinstance(bind, Connection):
engine = bind.engine
else:
engine = bind
m = re.match(r"Engine\((.*?)\)", str(engine))
if m is not None:
u = urlparse(m.group(1))
# Add Scheme to uses_netloc or // will be missing from url.
uses_netloc.append(u.scheme)
safe_url = ""
if u.password is None:
safe_url = u.geturl()
else:
# Strip password from URL
host_info = u.netloc.rpartition('@')[-1]
parts = u._replace(netloc='{}@{}'.format(u.username, host_info))
safe_url = parts.geturl()
sql = {}
sql['database_type'] = u.scheme
sql['url'] = safe_url
if u.username is not None:
sql['user'] = "{}".format(u.username)
return sql
================================================
FILE: aws_xray_sdk/ext/sqlalchemy_core/__init__.py
================================================
from .patch import patch, unpatch
__all__ = ['patch', 'unpatch']
================================================
FILE: aws_xray_sdk/ext/sqlalchemy_core/patch.py
================================================
import logging
import sys
from urllib.parse import urlparse, uses_netloc, quote_plus
import wrapt
from sqlalchemy.sql.expression import ClauseElement
from aws_xray_sdk.core import xray_recorder
from aws_xray_sdk.core.patcher import _PATCHED_MODULES
from aws_xray_sdk.core.utils import stacktrace
from aws_xray_sdk.ext.util import unwrap
def _sql_meta(engine_instance, args):
try:
metadata = {}
# Workaround for https://github.com/sqlalchemy/sqlalchemy/issues/10662
# sqlalchemy.engine.url.URL's __repr__ does not url encode username nor password.
# This will continue to work once sqlalchemy fixes the bug.
sa_url = engine_instance.engine.url
username = sa_url.username
sa_url = sa_url._replace(username=None, password=None)
url = urlparse(str(sa_url))
name = url.netloc
if username:
# Restore url encoded username
quoted_username = quote_plus(username)
url = url._replace(netloc='{}@{}'.format(quoted_username, url.netloc))
# Add Scheme to uses_netloc or // will be missing from url.
uses_netloc.append(url.scheme)
metadata['url'] = url.geturl()
metadata['user'] = url.username
metadata['database_type'] = engine_instance.engine.name
try:
version = getattr(engine_instance.dialect, '{}_version'.format(engine_instance.engine.driver))
version_str = '.'.join(map(str, version))
metadata['driver_version'] = "{}-{}".format(engine_instance.engine.driver, version_str)
except AttributeError:
metadata['driver_version'] = engine_instance.engine.driver
if engine_instance.dialect.server_version_info is not None:
metadata['database_version'] = '.'.join(map(str, engine_instance.dialect.server_version_info))
if xray_recorder.stream_sql:
try:
if isinstance(args[0], ClauseElement):
metadata['sanitized_query'] = str(args[0].compile(engine_instance.engine))
else:
metadata['sanitized_query'] = str(args[0])
except Exception:
logging.getLogger(__name__).exception('Error getting the sanitized query')
except Exception:
metadata = None
name = None
logging.getLogger(__name__).exception('Error parsing sql metadata.')
return name, metadata
def _xray_traced_sqlalchemy_execute(wrapped, instance, args, kwargs):
return _process_request(wrapped, instance, args, kwargs)
def _xray_traced_sqlalchemy_session(wrapped, instance, args, kwargs):
return _process_request(wrapped, instance.bind, args, kwargs)
def _process_request(wrapped, engine_instance, args, kwargs):
name, sql = _sql_meta(engine_instance, args)
if sql is not None:
subsegment = xray_recorder.begin_subsegment(name, namespace='remote')
else:
subsegment = None
try:
res = wrapped(*args, **kwargs)
except Exception:
if subsegment is not None:
exception = sys.exc_info()[1]
stack = stacktrace.get_stacktrace(limit=xray_recorder._max_trace_back)
subsegment.add_exception(exception, stack)
raise
finally:
if subsegment is not None:
subsegment.set_sql(sql)
xray_recorder.end_subsegment()
return res
def patch():
wrapt.wrap_function_wrapper(
'sqlalchemy.engine.base',
'Connection.execute',
_xray_traced_sqlalchemy_execute
)
wrapt.wrap_function_wrapper(
'sqlalchemy.orm.session',
'Session.execute',
_xray_traced_sqlalchemy_session
)
def unpatch():
"""
Unpatch any previously patched modules.
This operation is idempotent.
"""
_PATCHED_MODULES.discard('sqlalchemy_core')
import sqlalchemy
unwrap(sqlalchemy.engine.base.Connection, 'execute')
unwrap(sqlalchemy.orm.session.Session, 'execute')
================================================
FILE: aws_xray_sdk/ext/sqlite3/__init__.py
================================================
from .patch import patch
__all__ = ['patch']
================================================
FILE: aws_xray_sdk/ext/sqlite3/patch.py
================================================
import wrapt
import sqlite3
from aws_xray_sdk.ext.dbapi2 import XRayTracedConn
def patch():
wrapt.wrap_function_wrapper(
'sqlite3',
'connect',
_xray_traced_connect
)
def _xray_traced_connect(wrapped, instance, args, kwargs):
conn = wrapped(*args, **kwargs)
meta = {}
meta['name'] = args[0]
meta['database_version'] = sqlite3.sqlite_version
traced_conn = XRayTracedSQLite(conn, meta)
return traced_conn
class XRayTracedSQLite(XRayTracedConn):
def execute(self, *args, **kwargs):
return self.cursor().execute(*args, **kwargs)
def executemany(self, *args, **kwargs):
return self.cursor().executemany(*args, **kwargs)
================================================
FILE: aws_xray_sdk/ext/util.py
================================================
import re
from urllib.parse import urlparse
import wrapt
from aws_xray_sdk.core.models import http
from aws_xray_sdk.core.models.trace_header import TraceHeader
first_cap_re = re.compile('(.)([A-Z][a-z]+)')
all_cap_re = re.compile('([a-z0-9])([A-Z])')
UNKNOWN_HOSTNAME = "UNKNOWN HOST"
def inject_trace_header(headers, entity):
"""
Extract trace id, entity id and sampling decision
from the input entity and inject these information
to headers.
:param dict headers: http headers to inject
:param Entity entity: trace entity that the trace header
value generated from.
"""
if not entity:
return
if hasattr(entity, 'type') and entity.type == 'subsegment':
header = entity.parent_segment.get_origin_trace_header()
else:
header = entity.get_origin_trace_header()
data = header.data if header else None
to_insert = TraceHeader(
root=entity.trace_id,
parent=entity.id,
sampled=entity.sampled,
data=data,
)
value = to_insert.to_header_str()
headers[http.XRAY_HEADER] = value
def calculate_sampling_decision(trace_header, recorder, sampling_req):
"""
Return 1 or the matched rule name if should sample and 0 if should not.
The sampling decision coming from ``trace_header`` always has
the highest precedence. If the ``trace_header`` doesn't contain
sampling decision then it checks if sampling is enabled or not
in the recorder. If not enbaled it returns 1. Otherwise it uses user
defined sampling rules to decide.
"""
if trace_header.sampled is not None and trace_header.sampled != '?':
return trace_header.sampled
elif not recorder.sampling:
return 1
else:
decision = recorder.sampler.should_trace(sampling_req)
return decision if decision else 0
def construct_xray_header(headers):
"""
Construct a ``TraceHeader`` object from dictionary headers
of the incoming request. This method should always return
a ``TraceHeader`` object regardless of tracing header's presence
in the incoming request.
"""
header_str = headers.get(http.XRAY_HEADER) or headers.get(http.ALT_XRAY_HEADER)
if header_str:
return TraceHeader.from_header_str(header_str)
else:
return TraceHeader()
def calculate_segment_name(host_name, recorder):
"""
Returns the segment name based on recorder configuration and
input host name. This is a helper generally used in web framework
middleware where a host name is available from incoming request's headers.
"""
if recorder.dynamic_naming:
return recorder.dynamic_naming.get_name(host_name)
else:
return recorder.service
def prepare_response_header(origin_header, segment):
"""
Prepare a trace header to be inserted into response
based on original header and the request segment.
"""
if origin_header and origin_header.sampled == '?':
new_header = TraceHeader(root=segment.trace_id,
sampled=segment.sampled)
else:
new_header = TraceHeader(root=segment.trace_id)
return new_header.to_header_str()
def to_snake_case(name):
"""
Convert the input string to snake-cased string.
"""
s1 = first_cap_re.sub(r'\1_\2', name)
# handle acronym words
return all_cap_re.sub(r'\1_\2', s1).lower()
# ? is not a valid entity, and we don't want things after the ? for the segment name
def strip_url(url):
"""
Will generate a valid url string for use as a segment name
:param url: url to strip
:return: validated url string
"""
return url.partition('?')[0] if url else url
def get_hostname(url):
if url is None:
return UNKNOWN_HOSTNAME
url_parse = urlparse(url)
hostname = url_parse.hostname
if hostname is None:
return UNKNOWN_HOSTNAME
return hostname if hostname else url # If hostname is none, we return the regular URL; indication of malformed url
def unwrap(obj, attr):
"""
Will unwrap a `wrapt` attribute
:param obj: base object
:param attr: attribute on `obj` to unwrap
"""
f = getattr(obj, attr, None)
if f and hasattr(f, '__wrapped__'):
setattr(obj, attr, f.__wrapped__)
================================================
FILE: aws_xray_sdk/sdk_config.py
================================================
import os
import logging
log = logging.getLogger(__name__)
class SDKConfig:
"""
Global Configuration Class that defines SDK-level configuration properties.
Enabling/Disabling the SDK:
By default, the SDK is enabled unless if an environment variable AWS_XRAY_SDK_ENABLED
is set. If it is set, it needs to be a valid string boolean, otherwise, it will default
to true. If the environment variable is set, all calls to set_sdk_enabled() will
prioritize the value of the environment variable.
Disabling the SDK affects the recorder, patcher, and middlewares in the following ways:
For the recorder, disabling automatically generates DummySegments for subsequent segments
and DummySubsegments for subsegments created and thus not send any traces to the daemon.
For the patcher, module patching will automatically be disabled. The SDK must be disabled
before calling patcher.patch() method in order for this to function properly.
For the middleware, no modification is made on them, but since the recorder automatically
generates DummySegments for all subsequent calls, they will not generate segments/subsegments
to be sent.
Environment variables:
"AWS_XRAY_SDK_ENABLED" - If set to 'false' disables the SDK and causes the explained above
to occur.
"""
XRAY_ENABLED_KEY = 'AWS_XRAY_SDK_ENABLED'
DISABLED_ENTITY_NAME = 'dummy'
__SDK_ENABLED = None
@classmethod
def __get_enabled_from_env(cls):
"""
Searches for the environment variable to see if the SDK should be disabled.
If no environment variable is found, it returns True by default.
:return: bool - True if it is enabled, False otherwise.
"""
env_var_str = os.getenv(cls.XRAY_ENABLED_KEY, 'true').lower()
if env_var_str in ('y', 'yes', 't', 'true', 'on', '1'):
return True
elif env_var_str in ('n', 'no', 'f', 'false', 'off', '0'):
return False
else:
log.warning("Invalid literal passed into environment variable `AWS_XRAY_SDK_ENABLED`. Defaulting to True...")
return True # If an invalid parameter is passed in, we return True.
@classmethod
def sdk_enabled(cls):
"""
Returns whether the SDK is enabled or not.
"""
if cls.__SDK_ENABLED is None:
cls.__SDK_ENABLED = cls.__get_enabled_from_env()
return cls.__SDK_ENABLED
@classmethod
def set_sdk_enabled(cls, value):
"""
Modifies the enabled flag if the "AWS_XRAY_SDK_ENABLED" environment variable is not set,
otherwise, set the enabled flag to be equal to the environment variable. If the
env variable is an invalid string boolean, it will default to true.
:param bool value: Flag to set whether the SDK is enabled or disabled.
Environment variables AWS_XRAY_SDK_ENABLED overrides argument value.
"""
# Environment Variables take precedence over hardcoded configurations.
if cls.XRAY_ENABLED_KEY in os.environ:
cls.__SDK_ENABLED = cls.__get_enabled_from_env()
else:
if type(value) == bool:
cls.__SDK_ENABLED = value
else:
cls.__SDK_ENABLED = True
log.warning("Invalid parameter type passed into set_sdk_enabled(). Defaulting to True...")
================================================
FILE: aws_xray_sdk/version.py
================================================
VERSION = '2.15.0'
================================================
FILE: docs/.gitignore
================================================
_build
================================================
FILE: docs/Makefile
================================================
# Minimal makefile for Sphinx documentation
#
# You can set these variables from the command line.
SPHINXOPTS =
SPHINXBUILD = python -msphinx
SPHINXPROJ = aws-xray-sdk
SOURCEDIR = .
BUILDDIR = _build
# Put it first so that "make" without argument is like "make help".
help:
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
.PHONY: help Makefile
# Catch-all target: route all unknown targets to Sphinx using the new
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
%: Makefile
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
================================================
FILE: docs/_templates/layout.html
================================================
{% extends '!layout.html' %}
{% block footer %}
{% endblock %}
================================================
FILE: docs/aws_xray_sdk.core.emitters.rst
================================================
aws\_xray\_sdk.core.emitters package
====================================
Submodules
----------
aws\_xray\_sdk.core.emitters.udp\_emitter module
------------------------------------------------
.. automodule:: aws_xray_sdk.core.emitters.udp_emitter
:members:
:undoc-members:
:show-inheritance:
Module contents
---------------
.. automodule:: aws_xray_sdk.core.emitters
:members:
:undoc-members:
:show-inheritance:
================================================
FILE: docs/aws_xray_sdk.core.exceptions.rst
================================================
aws\_xray\_sdk.core.exceptions package
======================================
Submodules
----------
aws\_xray\_sdk.core.exceptions.exceptions module
------------------------------------------------
.. automodule:: aws_xray_sdk.core.exceptions.exceptions
:members:
:undoc-members:
:show-inheritance:
Module contents
---------------
.. automodule:: aws_xray_sdk.core.exceptions
:members:
:undoc-members:
:show-inheritance:
================================================
FILE: docs/aws_xray_sdk.core.models.rst
================================================
aws\_xray\_sdk.core.models package
==================================
Submodules
----------
aws\_xray\_sdk.core.models.default\_dynamic\_naming module
----------------------------------------------------------
.. automodule:: aws_xray_sdk.core.models.default_dynamic_naming
:members:
:undoc-members:
:show-inheritance:
aws\_xray\_sdk.core.models.dummy\_entities module
-------------------------------------------------
.. automodule:: aws_xray_sdk.core.models.dummy_entities
:members:
:undoc-members:
:show-inheritance:
aws\_xray\_sdk.core.models.entity module
----------------------------------------
.. automodule:: aws_xray_sdk.core.models.entity
:members:
:undoc-members:
:show-inheritance:
aws\_xray\_sdk.core.models.facade\_segment module
-------------------------------------------------
.. automodule:: aws_xray_sdk.core.models.facade_segment
:members:
:undoc-members:
:show-inheritance:
aws\_xray\_sdk.core.models.http module
--------------------------------------
.. automodule:: aws_xray_sdk.core.models.http
:members:
:undoc-members:
:show-inheritance:
aws\_xray\_sdk.core.models.segment module
-----------------------------------------
.. automodule:: aws_xray_sdk.core.models.segment
:members:
:undoc-members:
:show-inheritance:
aws\_xray\_sdk.core.models.subsegment module
--------------------------------------------
.. automodule:: aws_xray_sdk.core.models.subsegment
:members:
:undoc-members:
:show-inheritance:
aws\_xray\_sdk.core.models.throwable module
-------------------------------------------
.. automodule:: aws_xray_sdk.core.models.throwable
:members:
:undoc-members:
:show-inheritance:
aws\_xray\_sdk.core.models.trace\_header module
-----------------------------------------------
.. automodule:: aws_xray_sdk.core.models.trace_header
:members:
:undoc-members:
:show-inheritance:
aws\_xray\_sdk.core.models.traceid module
-----------------------------------------
.. automodule:: aws_xray_sdk.core.models.traceid
:members:
:undoc-members:
:show-inheritance:
Module contents
---------------
.. automodule:: aws_xray_sdk.core.models
:members:
:undoc-members:
:show-inheritance:
================================================
FILE: docs/aws_xray_sdk.core.plugins.rst
================================================
aws\_xray\_sdk.core.plugins package
===================================
Submodules
----------
aws\_xray\_sdk.core.plugins.ec2\_plugin module
----------------------------------------------
.. automodule:: aws_xray_sdk.core.plugins.ec2_plugin
:members:
:undoc-members:
:show-inheritance:
aws\_xray\_sdk.core.plugins.ecs\_plugin module
----------------------------------------------
.. automodule:: aws_xray_sdk.core.plugins.ecs_plugin
:members:
:undoc-members:
:show-inheritance:
aws\_xray\_sdk.core.plugins.elasticbeanstalk\_plugin module
-----------------------------------------------------------
.. automodule:: aws_xray_sdk.core.plugins.elasticbeanstalk_plugin
:members:
:undoc-members:
:show-inheritance:
aws\_xray\_sdk.core.plugins.utils module
----------------------------------------
.. automodule:: aws_xray_sdk.core.plugins.utils
:members:
:undoc-members:
:show-inheritance:
Module contents
---------------
.. automodule:: aws_xray_sdk.core.plugins
:members:
:undoc-members:
:show-inheritance:
================================================
FILE: docs/aws_xray_sdk.core.rst
================================================
aws\_xray\_sdk.core package
===========================
Subpackages
-----------
.. toctree::
aws_xray_sdk.core.emitters
aws_xray_sdk.core.exceptions
aws_xray_sdk.core.models
aws_xray_sdk.core.plugins
aws_xray_sdk.core.sampling
aws_xray_sdk.core.streaming
aws_xray_sdk.core.utils
Submodules
----------
aws\_xray\_sdk.core.async\_context module
-----------------------------------------
.. automodule:: aws_xray_sdk.core.async_context
:members:
:undoc-members:
:show-inheritance:
aws\_xray\_sdk.core.async\_recorder module
------------------------------------------
.. automodule:: aws_xray_sdk.core.async_recorder
:members:
:undoc-members:
:show-inheritance:
aws\_xray\_sdk.core.context module
----------------------------------
.. automodule:: aws_xray_sdk.core.context
:members:
:undoc-members:
:show-inheritance:
aws\_xray\_sdk.core.lambda\_launcher module
-------------------------------------------
.. automodule:: aws_xray_sdk.core.lambda_launcher
:members:
:undoc-members:
:show-inheritance:
aws\_xray\_sdk.core.patcher module
----------------------------------
.. automodule:: aws_xray_sdk.core.patcher
:members:
:undoc-members:
:show-inheritance:
aws\_xray\_sdk.core.recorder module
-----------------------------------
.. automodule:: aws_xray_sdk.core.recorder
:members:
:undoc-members:
:show-inheritance:
Module contents
---------------
.. automodule:: aws_xray_sdk.core
:members:
:undoc-members:
:show-inheritance:
================================================
FILE: docs/aws_xray_sdk.core.sampling.rst
================================================
aws\_xray\_sdk.core.sampling package
====================================
Submodules
----------
aws\_xray\_sdk.core.sampling.default\_sampler module
----------------------------------------------------
.. automodule:: aws_xray_sdk.core.sampling.default_sampler
:members:
:undoc-members:
:show-inheritance:
aws\_xray\_sdk.core.sampling.reservoir module
---------------------------------------------
.. automodule:: aws_xray_sdk.core.sampling.reservoir
:members:
:undoc-members:
:show-inheritance:
aws\_xray\_sdk.core.sampling.sampling\_rule module
--------------------------------------------------
.. automodule:: aws_xray_sdk.core.sampling.sampling_rule
:members:
:undoc-members:
:show-inheritance:
Module contents
---------------
.. automodule:: aws_xray_sdk.core.sampling
:members:
:undoc-members:
:show-inheritance:
================================================
FILE: docs/aws_xray_sdk.core.streaming.rst
================================================
aws\_xray\_sdk.core.streaming package
=====================================
Submodules
----------
aws\_xray\_sdk.core.streaming.default\_streaming module
-------------------------------------------------------
.. automodule:: aws_xray_sdk.core.streaming.default_streaming
:members:
:undoc-members:
:show-inheritance:
Module contents
---------------
.. automodule:: aws_xray_sdk.core.streaming
:members:
:undoc-members:
:show-inheritance:
================================================
FILE: docs/aws_xray_sdk.core.utils.rst
================================================
aws\_xray\_sdk.core.utils package
=================================
Submodules
----------
aws\_xray\_sdk.core.utils.atomic\_counter module
------------------------------------------------
.. automodule:: aws_xray_sdk.core.utils.atomic_counter
:members:
:undoc-members:
:show-inheritance:
aws\_xray\_sdk.core.utils.compat module
---------------------------------------
.. automodule:: aws_xray_sdk.core.utils.compat
:members:
:undoc-members:
:show-inheritance:
aws\_xray\_sdk.core.utils.search\_pattern module
------------------------------------------------
.. automodule:: aws_xray_sdk.core.utils.search_pattern
:members:
:undoc-members:
:show-inheritance:
Module contents
---------------
.. automodule:: aws_xray_sdk.core.utils
:members:
:undoc-members:
:show-inheritance:
================================================
FILE: docs/aws_xray_sdk.ext.aiobotocore.rst
================================================
aws\_xray\_sdk.ext.aiobotocore package
======================================
Submodules
----------
aws\_xray\_sdk.ext.aiobotocore.patch module
-------------------------------------------
.. automodule:: aws_xray_sdk.ext.aiobotocore.patch
:members:
:undoc-members:
:show-inheritance:
Module contents
---------------
.. automodule:: aws_xray_sdk.ext.aiobotocore
:members:
:undoc-members:
:show-inheritance:
================================================
FILE: docs/aws_xray_sdk.ext.aiohttp.rst
================================================
aws\_xray\_sdk.ext.aiohttp package
==================================
Submodules
----------
aws\_xray\_sdk.ext.aiohttp.client module
----------------------------------------
.. automodule:: aws_xray_sdk.ext.aiohttp.client
:members:
:undoc-members:
:show-inheritance:
aws\_xray\_sdk.ext.aiohttp.middleware module
--------------------------------------------
.. automodule:: aws_xray_sdk.ext.aiohttp.middleware
:members:
:undoc-members:
:show-inheritance:
Module contents
---------------
.. automodule:: aws_xray_sdk.ext.aiohttp
:members:
:undoc-members:
:show-inheritance:
================================================
FILE: docs/aws_xray_sdk.ext.botocore.rst
================================================
aws\_xray\_sdk.ext.botocore package
===================================
Submodules
----------
aws\_xray\_sdk.ext.botocore.patch module
----------------------------------------
.. automodule:: aws_xray_sdk.ext.botocore.patch
:members:
:undoc-members:
:show-inheritance:
Module contents
---------------
.. automodule:: aws_xray_sdk.ext.botocore
:members:
:undoc-members:
:show-inheritance:
================================================
FILE: docs/aws_xray_sdk.ext.django.rst
================================================
aws\_xray\_sdk.ext.django package
=================================
Submodules
----------
aws\_xray\_sdk.ext.django.apps module
-------------------------------------
.. automodule:: aws_xray_sdk.ext.django.apps
:members:
:undoc-members:
:show-inheritance:
aws\_xray\_sdk.ext.django.conf module
-------------------------------------
.. automodule:: aws_xray_sdk.ext.django.conf
:members:
:undoc-members:
:show-inheritance:
aws\_xray\_sdk.ext.django.db module
-----------------------------------
.. automodule:: aws_xray_sdk.ext.django.db
:members:
:undoc-members:
:show-inheritance:
aws\_xray\_sdk.ext.django.middleware module
-------------------------------------------
.. automodule:: aws_xray_sdk.ext.django.middleware
:members:
:undoc-members:
:show-inheritance:
aws\_xray\_sdk.ext.django.templates module
------------------------------------------
.. automodule:: aws_xray_sdk.ext.django.templates
:members:
:undoc-members:
:show-inheritance:
Module contents
---------------
.. automodule:: aws_xray_sdk.ext.django
:members:
:undoc-members:
:show-inheritance:
================================================
FILE: docs/aws_xray_sdk.ext.flask.rst
================================================
aws\_xray\_sdk.ext.flask package
================================
Submodules
----------
aws\_xray\_sdk.ext.flask.middleware module
------------------------------------------
.. automodule:: aws_xray_sdk.ext.flask.middleware
:members:
:undoc-members:
:show-inheritance:
Module contents
---------------
.. automodule:: aws_xray_sdk.ext.flask
:members:
:undoc-members:
:show-inheritance:
================================================
FILE: docs/aws_xray_sdk.ext.flask_sqlalchemy.rst
================================================
aws\_xray\_sdk.ext.flask\_sqlalchemy package
============================================
Submodules
----------
aws\_xray\_sdk.ext.flask\_sqlalchemy.query module
-------------------------------------------------
.. automodule:: aws_xray_sdk.ext.flask_sqlalchemy.query
:members:
:undoc-members:
:show-inheritance:
Module contents
---------------
.. automodule:: aws_xray_sdk.ext.flask_sqlalchemy
:members:
:undoc-members:
:show-inheritance:
================================================
FILE: docs/aws_xray_sdk.ext.httplib.rst
================================================
aws\_xray\_sdk.ext.httplib package
==================================
Submodules
----------
aws\_xray\_sdk.ext.httplib.patch module
---------------------------------------
.. automodule:: aws_xray_sdk.ext.httplib.patch
:members:
:undoc-members:
:show-inheritance:
Module contents
---------------
.. automodule:: aws_xray_sdk.ext.httplib
:members:
:undoc-members:
:show-inheritance:
================================================
FILE: docs/aws_xray_sdk.ext.httpx.rst
================================================
aws\_xray\_sdk.ext.httpx package
================================
Submodules
----------
aws\_xray\_sdk.ext.httpx.patch module
-------------------------------------
.. automodule:: aws_xray_sdk.ext.httpx.patch
:members:
:undoc-members:
:show-inheritance:
Module contents
---------------
.. automodule:: aws_xray_sdk.ext.httpx
:members:
:undoc-members:
:show-inheritance:
================================================
FILE: docs/aws_xray_sdk.ext.mysql.rst
================================================
aws\_xray\_sdk.ext.mysql package
================================
Submodules
----------
aws\_xray\_sdk.ext.mysql.patch module
-------------------------------------
.. automodule:: aws_xray_sdk.ext.mysql.patch
:members:
:undoc-members:
:show-inheritance:
Module contents
---------------
.. automodule:: aws_xray_sdk.ext.mysql
:members:
:undoc-members:
:show-inheritance:
================================================
FILE: docs/aws_xray_sdk.ext.pg8000.rst
================================================
aws\_xray\_sdk.ext.pg8000 package
=================================
Submodules
----------
aws\_xray\_sdk.ext.pg8000.patch module
--------------------------------------
.. automodule:: aws_xray_sdk.ext.pg8000.patch
:members:
:undoc-members:
:show-inheritance:
Module contents
---------------
.. automodule:: aws_xray_sdk.ext.pg8000
:members:
:undoc-members:
:show-inheritance:
================================================
FILE: docs/aws_xray_sdk.ext.psycopg2.rst
================================================
aws\_xray\_sdk.ext.psycopg2 package
===================================
Submodules
----------
aws\_xray\_sdk.ext.psycopg2.patch module
----------------------------------------
.. automodule:: aws_xray_sdk.ext.psycopg2.patch
:members:
:undoc-members:
:show-inheritance:
Module contents
---------------
.. automodule:: aws_xray_sdk.ext.psycopg2
:members:
:undoc-members:
:show-inheritance:
================================================
FILE: docs/aws_xray_sdk.ext.pymongo.rst
================================================
aws\_xray\_sdk.ext.pymongo package
==================================
Submodules
----------
aws\_xray\_sdk.ext.pymongo.patch module
---------------------------------------
.. automodule:: aws_xray_sdk.ext.pymongo.patch
:members:
:undoc-members:
:show-inheritance:
Module contents
---------------
.. automodule:: aws_xray_sdk.ext.pymongo
:members:
:undoc-members:
:show-inheritance:
================================================
FILE: docs/aws_xray_sdk.ext.pymysql.rst
================================================
aws\_xray\_sdk.ext.pymysql package
==================================
Submodules
----------
aws\_xray\_sdk.ext.pymysql.patch module
---------------------------------------
.. automodule:: aws_xray_sdk.ext.pymysql.patch
:members:
:undoc-members:
:show-inheritance:
Module contents
---------------
.. automodule:: aws_xray_sdk.ext.pymysql
:members:
:undoc-members:
:show-inheritance:
================================================
FILE: docs/aws_xray_sdk.ext.pynamodb.rst
================================================
aws\_xray\_sdk.ext.pynamodb package
===================================
Submodules
----------
aws\_xray\_sdk.ext.pynamodb.patch module
----------------------------------------
.. automodule:: aws_xray_sdk.ext.pynamodb.patch
:members:
:undoc-members:
:show-inheritance:
Module contents
---------------
.. automodule:: aws_xray_sdk.ext.pynamodb
:members:
:undoc-members:
:show-inheritance:
================================================
FILE: docs/aws_xray_sdk.ext.requests.rst
================================================
aws\_xray\_sdk.ext.requests package
===================================
Submodules
----------
aws\_xray\_sdk.ext.requests.patch module
----------------------------------------
.. automodule:: aws_xray_sdk.ext.requests.patch
:members:
:undoc-members:
:show-inheritance:
Module contents
---------------
.. automodule:: aws_xray_sdk.ext.requests
:members:
:undoc-members:
:show-inheritance:
================================================
FILE: docs/aws_xray_sdk.ext.rst
================================================
aws\_xray\_sdk.ext package
==========================
Subpackages
-----------
.. toctree::
aws_xray_sdk.ext.aiobotocore
aws_xray_sdk.ext.aiohttp
aws_xray_sdk.ext.botocore
aws_xray_sdk.ext.django
aws_xray_sdk.ext.flask
aws_xray_sdk.ext.flask_sqlalchemy
aws_xray_sdk.ext.httplib
aws_xray_sdk.ext.mysql
aws_xray_sdk.ext.pynamodb
aws_xray_sdk.ext.requests
aws_xray_sdk.ext.sqlalchemy
aws_xray_sdk.ext.sqlite3
Submodules
----------
aws\_xray\_sdk.ext.boto\_utils module
-------------------------------------
.. automodule:: aws_xray_sdk.ext.boto_utils
:members:
:undoc-members:
:show-inheritance:
aws\_xray\_sdk.ext.dbapi2 module
--------------------------------
.. automodule:: aws_xray_sdk.ext.dbapi2
:members:
:undoc-members:
:show-inheritance:
aws\_xray\_sdk.ext.util module
------------------------------
.. automodule:: aws_xray_sdk.ext.util
:members:
:undoc-members:
:show-inheritance:
Module contents
---------------
.. automodule:: aws_xray_sdk.ext
:members:
:undoc-members:
:show-inheritance:
================================================
FILE: docs/aws_xray_sdk.ext.sqlalchemy.rst
================================================
aws\_xray\_sdk.ext.sqlalchemy package
=====================================
Subpackages
-----------
.. toctree::
aws_xray_sdk.ext.sqlalchemy.util
Submodules
----------
aws\_xray\_sdk.ext.sqlalchemy.query module
------------------------------------------
.. automodule:: aws_xray_sdk.ext.sqlalchemy.query
:members:
:undoc-members:
:show-inheritance:
Module contents
---------------
.. automodule:: aws_xray_sdk.ext.sqlalchemy
:members:
:undoc-members:
:show-inheritance:
================================================
FILE: docs/aws_xray_sdk.ext.sqlalchemy.util.rst
================================================
aws\_xray\_sdk.ext.sqlalchemy.util package
==========================================
Submodules
----------
aws\_xray\_sdk.ext.sqlalchemy.util.decorators module
----------------------------------------------------
.. automodule:: aws_xray_sdk.ext.sqlalchemy.util.decorators
:members:
:undoc-members:
:show-inheritance:
Module contents
---------------
.. automodule:: aws_xray_sdk.ext.sqlalchemy.util
:members:
:undoc-members:
:show-inheritance:
================================================
FILE: docs/aws_xray_sdk.ext.sqlalchemy_core.rst
================================================
aws\_xray\_sdk.ext.sqlalchemy\_core package
===========================================
Submodules
----------
aws\_xray\_sdk.ext.sqlalchemy\_core.patch module
------------------------------------------------
.. automodule:: aws_xray_sdk.ext.sqlalchemy_core.patch
:members:
:undoc-members:
:show-inheritance:
Module contents
---------------
.. automodule:: aws_xray_sdk.ext.sqlalchemy_core
:members:
:undoc-members:
:show-inheritance:
================================================
FILE: docs/aws_xray_sdk.ext.sqlite3.rst
================================================
aws\_xray\_sdk.ext.sqlite3 package
==================================
Submodules
----------
aws\_xray\_sdk.ext.sqlite3.patch module
---------------------------------------
.. automodule:: aws_xray_sdk.ext.sqlite3.patch
:members:
:undoc-members:
:show-inheritance:
Module contents
---------------
.. automodule:: aws_xray_sdk.ext.sqlite3
:members:
:undoc-members:
:show-inheritance:
================================================
FILE: docs/aws_xray_sdk.rst
================================================
aws\_xray\_sdk package
======================
Subpackages
-----------
.. toctree::
aws_xray_sdk.core
aws_xray_sdk.ext
Submodules
----------
aws\_xray\_sdk.version module
-----------------------------
.. automodule:: aws_xray_sdk.version
:members:
:undoc-members:
:show-inheritance:
Module contents
---------------
.. automodule:: aws_xray_sdk
:members:
:undoc-members:
:show-inheritance:
================================================
FILE: docs/basic.rst
================================================
.. _basic:
Basic Usage
===========
The SDK provides a global recorder, ``xray_recorder``, to generate segments and subsegments.
Manually create segment/subsegment
----------------------------------
If you're using a web framework or library that is not supported, or you want to define
your own structure on segments/subsegments, you can manually create
segments and subsegments by using code like the following::
from aws_xray_sdk.core import xray_recorder
xray_recorder.begin_segment('name')
# your code here
xray_recorder.begin_subsegment('name')
# some code block you want to record
xray_recorder.end_subsegment()
xray_recorder.end_segment()
The ``xray_recorder`` keeps one segment per thread.
Therefore, in manual mode, call ``xray_recorder.end_segment()`` before creating a new segment,
otherwise the new segment overwrites the existing one.
To trace a particular code block inside a segment, use a subsegment.
If you open a new subsegment while there is already an open subsegment,
the new subsegment becomes the child of the existing subsegment.
Decorator for function auto-capture
-----------------------------------
A decorator is provided to easily capture basic information as a subsegment on
user defined functions. You can use the decorator like the following::
@xray_recorder.capture('name')
def my_func():
#do something
``xray_recorder`` generates a subsegment for the decorated function, where the name is optional.
If the name argument is not provided, the function name is used as the subsegment name.
If the function is called without an open segment in the context storage, the subsegment is discarded.
Currently the decorator only works with synchronous functions.
Set annotation or metadata
--------------------------
You can add annotations and metadata to an active segment/subsegment.
Annotations are simple key-value pairs that are indexed for use with
`filter expressions `_.
Use annotations to record data that you want to use to group traces in the console,
or when calling the GetTraceSummaries API. Annotation keys should only use ASCII letters, numbers, and
the underscore(_) character.
Metadata are key-value pairs with values of any type, including objects and lists, but that are not indexed.
Use metadata to record data you want to store in the trace but don't need to use for searching traces.
You can add annotations/metadata like the following::
from aws_xray_sdk.core import xray_recorder
segment = xray_recorder.current_segment()
# value can be string, number or bool
segment.put_annotation('key', value)
# namespace and key must be string and value is an object
# that can be serialized to json
segment.put_metadata('key', json, 'namespace')
The ``current_segment`` and ``current_subsegment`` functions get the current
open segment or subsegment, respectively, from context storage.
Put these calls between segment or subsegment begin and end statements.
AWS Lambda Integration
----------------------
To integrate with Lambda you must
first enable active tracing on a Lambda function.
See http://docs.aws.amazon.com/lambda/latest/dg/lambda-x-ray.html#using-x-ray for details.
In your Lambda function, you can only begin and end a subsegment.
The Lambda service emits a segment as the root.
This segment cannot be mutated.
Instrument the SDK as you would in any Python script.
Subsegments generated outside of the Lambda handler are discarded.
================================================
FILE: docs/changes.rst
================================================
.. _changes:
.. include:: ../CHANGELOG.rst
================================================
FILE: docs/conf.py
================================================
# -*- coding: utf-8 -*-
#
# aws-xray-sdk documentation build configuration file, created by
# sphinx-quickstart on Wed Aug 2 15:33:56 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__name__), '..'))
sys.path.insert(0, os.path.join(os.path.abspath('.'), '../tests/ext/django'))
os.environ['DJANGO_SETTINGS_MODULE'] = 'app.settings'
import django
django.setup()
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.coverage']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'aws-xray-sdk'
copyright = u'2017, Amazon Web Services'
author = u'Amazon Web Services'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'2.15.0'
# The full version, including alpha/beta/rc tags.
release = u'2.15.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# This is required for the alabaster theme
# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars
html_sidebars = {
'**': [
'about.html',
'navigation.html',
'relations.html', # needs 'show_related': True theme option to display
'searchbox.html',
'donate.html',
]
}
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'aws-xray-sdkdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'aws-xray-sdk.tex', u'aws-xray-sdk Documentation',
u'Amazon Web Services', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'aws-xray-sdk', u'aws-xray-sdk Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'aws-xray-sdk', u'aws-xray-sdk Documentation',
author, 'aws-xray-sdk', 'One line description of project.',
'Miscellaneous'),
]
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
'python': ('https://docs.python.org/', None),
}
================================================
FILE: docs/configurations.rst
================================================
.. _configurations:
Configure Global Recorder
=========================
Sampling
--------
Sampling is enabled by default.
Whenever the global recorder creates a segment,
it decides whether to sample this segment.
If it does not sample this segment, it is discarded and not sent to the
X-Ray daemon.
To turn off sampling, use code like the following::
from aws_xray_sdk.core import xray_recorder
xray_recorder.configure(sampling=False)
By default, the SDK uses sampling rules configured in your AWS account. You can also configure the backup sampling rules locally::
xray_recorder.configure(sampling_rules=your_rules)
The input can either be an absolute path to your sampling rule
*.json* file or a dictionary.
To use only local rules for sampling, configure the recorder with a ``LocalSampler``::
from aws_xray_sdk.core.sampling.local.sampler import LocalSampler
xray_recorder.configure(sampler=LocalSampler())
The following code is an example of a rule configuration::
{
"version": 1,
"rules": [
{
"description": "Player moves.",
"service_name": "*",
"http_method": "*",
"url_path": "/api/move/*",
"fixed_target": 0,
"rate": 0.05
}
],
"default": {
"fixed_target": 1,
"rate": 0.1
}
}
This example defines one custom rule and a default rule.
The custom rule applies a five-percent sampling rate
with no minimum number of requests to trace for paths under */api/move/*.
The default rule traces the first request each second and 10 percent of
additional requests.
The SDK applies custom rules in the order in which they are defined.
If a request matches multiple custom rules, the SDK applies only the first rule.
You can use wildcard character "*" and "?" in service_name, http_method and
url_path.
"*" represents any combination of characters. "?" represents a single character.
Note that sampling configurations have no effect if the application runs in AWS Lambda.
Plugins
-------
The plugin adds extra metadata for each segment if the app is running on that environment.
The SDK provides three plugins:
* Amazon EC2 – EC2Plugin adds the instance ID and Availability Zone.
* Elastic Beanstalk – ElasticBeanstalkPlugin adds the environment name, version label, and deployment ID.
* Amazon ECS – ECSPlugin adds the container host name
To use plugins, use code like the following::
# a tuple of strings
plugins = ('elasticbeanstalk_plugin', 'ec2_plugin', 'ecs_plugin')
# alternatively you can use
plugins = ('ElasticBeanstalkPlugin', 'EC2Plugin', 'ECSPlugin')
xray_recorder.configure(plugins=plugins)
Order matters in the tuple and the origin of the segment is set from the last plugin.
Therefore, in the previous example, if the program runs on ECS, the segment origin is
'AWS::ECS::CONTAINER'.
Plugins must be configured before patching any third party libraries to
avoid unexpected behavior.
Plugins are employed after they are specified.
Context Missing Strategy
------------------------
Defines the recorder behavior when your instrumented code attempts to record data when no segment is open.
Configure like the following::
xray_recorder.configure(context_missing='Your Strategy Name Here')
Supported strategies are:
* RUNTIME_ERROR: throw an SegmentNotFoundException
* LOG_ERROR: log an error and continue
* IGNORE_ERROR: do nothing
Segment Dynamic Naming
----------------------
For a web application you might want to name the segment using host names. You can pass in a pattern
with wildcard character "*" and "?". "*" represents any combination of characters.
"?" represents a single character. If the host name from incoming request's header matches the pattern,
the host name will be used as the segment name, otherwise it uses fallback name defined in ``AWS_XRAY_TRACING_NAME``.
To configure dynamic naming, use code like the following::
xray_recorder.configure(dynamic_naming='*.example.com')
Environment Variables
---------------------
There are three supported environment variables to configure the global
recorder:
* AWS_XRAY_CONTEXT_MISSING: configure context missing strategy
* AWS_XRAY_TRACING_NAME: default segment name
* AWS_XRAY_DAEMON_ADDRESS: where the recorder sends data to over UDP
Environment variables has higher precedence over ``xray_recorder.configure()``
Logging
-------
The SDK uses Python's built-in ``logging`` module to perform logging.
You can configure the SDK logging just like how you configure other
python libraries. An example of set the SDK log level is like the following::
logging.basicConfig(level='DEBUG')
logging.getLogger('aws_xray_sdk').setLevel(logging.WARNING)
Context Storage
---------------
The global recorder uses threadlocal to store active segments/subsegments.
You can override the default context class to implement your own context storage::
from aws_xray_sdk.core.context import Context
class MyOwnContext(Context):
def put_segment(self, segment):
# store the segment created by ``xray_recorder`` to the context.
pass
def end_segment(self, end_time=None):
# end the segment in the current context.
pass
def put_subsegment(self, subsegment):
# store the subsegment created by ``xray_recorder`` to the context.
pass
def end_subsegment(self, end_time=None):
# end the subsegment in the current context.
pass
def get_trace_entity(self):
# get the current active trace entity(segment or subsegment).
pass
def set_trace_entity(self, trace_entity):
# manually inject a trace entity to the context storage.
pass
def clear_trace_entities(self):
# clean up context storage.
pass
def handle_context_missing(self):
# behavior on no trace entity to access or mutate.
pass
The function ``current_segment`` and ``current_subsegment`` on recorder level uses
``context.get_trace_entity()`` and dynamically return the expected type by using internal
references inside segment/subsegment objects.
Then you can pass your own context::
my_context=MyOwnContext()
xray_recorder.configure(context=my_context)
Emitter
-------
The default emitter uses non-blocking socket to send data to the X-Ray daemon.
It doesn't retry on IOError. To override the default emitter::
from aws_xray_sdk.core.emitters.udp_emitter import UDPEmitter
class MyOwnEmitter(UDPEmitter):
def send_entity(self, entity):
# send the input segment/subsegment to the X-Ray daemon.
# Return True on success and False on failure.
pass
def set_daemon_address(self, address):
# parse input full address like 127.0.0.1:8000 to ip and port and
# store them to the local emitter properties.
pass
Then you can pass your own emitter::
my_emitter = MyOwnEmitter()
xray_recorder.configure(emitter=my_emitter)
================================================
FILE: docs/frameworks.rst
================================================
.. _frameworks:
Django
======
Configure X-Ray Recorder
------------------------
Make sure you add ``XRayMiddleWare`` as the first entry in your
Django *settings.py* file, as shown in the following example::
MIDDLEWARE = [
'aws_xray_sdk.ext.django.middleware.XRayMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
]
The incoming requests to the Django app are then automatically recorded as
a segment.
To get the current segment and add annotations or metadata as needed,
use the following statement in your application code when processing request::
segment = xray_recorder.current_segment()
For more configurations in your Django ``settings.py`` file,
add the following line::
INSTALLED_APPS = [
'django.contrib.admin',
...
'django.contrib.sessions',
'aws_xray_sdk.ext.django',
]
You can configure the X-Ray recorder in a Django app under the
'XRAY_RECORDER' namespace.
The default values are as follows::
XRAY_RECORDER = {
'AWS_XRAY_DAEMON_ADDRESS': '127.0.0.1:2000',
'AUTO_INSTRUMENT': True, # If turned on built-in database queries and template rendering will be recorded as subsegments
'AWS_XRAY_CONTEXT_MISSING': 'LOG_ERROR',
'PLUGINS': (),
'SAMPLING': True,
'SAMPLING_RULES': None,
'AWS_XRAY_TRACING_NAME': None, # the segment name for segments generated from incoming requests
'DYNAMIC_NAMING': None, # defines a pattern that host names should match
'STREAMING_THRESHOLD': None, # defines when a segment starts to stream out its children subsegments
}
Environment variables have higher precedence over user settings.
If neither is set, the defaults values shown previously are used.
'AWS_XRAY_TRACING_NAME' is required unless specified as an environment variable.
All other keys are optional.
For further information on individual settings, see the :ref:`Configure Global Recorder ` section.
Local Development
-----------------
When doing Django app local development, if you configured Django built-in database with ``AUTO_INSTRUMENT`` turned-on,
the command ``manage.py runserver`` may fail if ``AWS_XRAY_CONTEXT_MISSING`` is set to ``RUNTIME_ERROR``. This is because
the command ``runserver`` performs migrations check which will generate a subsegment,
the ``xray_recorder`` will raise an error since there is no active segment.
One solution is to set ``AWS_XRAY_CONTEXT_MISSING`` to ``LOG_ERROR`` so it only emits a error message on server startup.
Alternatively if you have defined your own ``ready()`` function for code execution at startup you can manually create a segment
as a placeholder.
By Django official guide it's recommanded to deploy Django to other servers in production so this particular issue normally
doesn't exist in production.
Flask
=====
To generate segment based on incoming requests, you need to instantiate the X-Ray middleware for flask::
from aws_xray_sdk.core import xray_recorder
from aws_xray_sdk.ext.flask.middleware import XRayMiddleware
app = Flask(__name__)
xray_recorder.configure(service='my_app_name')
XRayMiddleware(app, xray_recorder)
Flask built-in template rendering will be wrapped into subsegments.
You can configure the recorder, see :ref:`Configure Global Recorder ` for more details.
Aiohttp
=======
Server
------
For X-Ray to create a segment based on an incoming request, you need register some middleware with aiohttp. As aiohttp
is an asyncronous framework, X-Ray will also need to be configured with an ``AsyncContext`` compared to the default threaded
version.::
import asyncio
from aiohttp import web
from aws_xray_sdk.ext.aiohttp.middleware import middleware
from aws_xray_sdk.core.async_context import AsyncContext
from aws_xray_sdk.core import xray_recorder
# Configure X-Ray to use AsyncContext
xray_recorder.configure(service='service_name', context=AsyncContext())
async def handler(request):
return web.Response(body='Hello World')
loop = asyncio.get_event_loop()
# Use X-Ray SDK middleware, its crucial the X-Ray middleware comes first
app = web.Application(middlewares=[middleware])
app.router.add_get("/", handler)
web.run_app(app)
There are two things to note from the example above. Firstly a middleware corountine from aws-xray-sdk is provided during the creation
of an aiohttp server app. Lastly the ``xray_recorder`` has also been configured with a name and an ``AsyncContext``. See
:ref:`Configure Global Recorder ` for more information about configuring the ``xray_recorder``.
Client
------
Since 3.0.0 Aiohttp provides a generic object that allows third packages to gather the different events ocurred during an HTTP call, X-Ray
can be configured to track these requests as subsegments using the `aws_xray_trace_config` function. This will return a valid `TraceConfig` ready to be installed
in any `aiohttp.ClientSession`. The following example shows how it can be used.::
from aws_xray_sdk.ext.aiohttp.client import aws_xray_trace_config
trace_config = aws_xray_trace_config()
async with ClientSession(loop=loop, trace_configs=[trace_config]) as session:
async with session.get(url) as resp
await resp.read()
================================================
FILE: docs/index.rst
================================================
.. aws-xray-sdk documentation master file, created by
sphinx-quickstart on Wed Aug 2 15:33:56 2017.
You can adapt this file completely to your liking, but it should at least
contain the root `toctree` directive.
Welcome to the AWS X-Ray SDK for Python!
========================================
This project is open sourced in Github. Please see: https://github.com/aws/aws-xray-sdk-python.
The AWS X-Ray service accepts application information in the form of trace segments.
A trace segment represents the work done by a single machine as a part of the entire task or request.
A set of trace segments which share the same trace ID form a trace.
A trace represents a full unit of work completed for a single task or request.
Learn more about AWS X-Ray service: https://aws.amazon.com/xray/.
The AWS X-Ray SDK for Python (the SDK) enables Python developers to record and emit
information from within their applications to the AWS X-Ray service.
You can get started in minutes using ``pip`` or by downloading a zip file.
Currently supported web frameworks and libraries:
* aioboto3/aiobotocore
* aiohttp >=2.3
* boto3/botocore
* Bottle
* Django >=1.10
* Flask
* httplib/http.client
* mysql-connector
* pg8000
* psycopg2
* psycopg (psycopg3)
* pymongo
* pymysql
* pynamodb
* requests
* SQLAlchemy
* sqlite3
You must have the X-Ray daemon running to use the SDK.
For information about installing and configuring the daemon see:
http://docs.aws.amazon.com/xray/latest/devguide/xray-daemon.html.
Contents:
.. toctree::
:maxdepth: 2
Basic Usage
Recorder Configurations
Third Party Libraries
Working with Web Frameworks
Change Log
License
Indices and tables
==================
* :ref:`modindex`
* :ref:`search`
================================================
FILE: docs/license.rst
================================================
.. _license:
License
=======
Please see Github page on https://github.com/aws/aws-xray-sdk-python/blob/master/LICENSE.
================================================
FILE: docs/make.bat
================================================
@ECHO OFF
REM Command file for Sphinx documentation
pushd %~dp0
if "%SPHINXBUILD%" == "" (
set SPHINXBUILD=python -msphinx
)
set BUILDDIR=_build
set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
set I18NSPHINXOPTS=%SPHINXOPTS% .
if NOT "%PAPER%" == "" (
set ALLSPHINXOPTS=-D latex_elements.papersize=%PAPER% %ALLSPHINXOPTS%
set I18NSPHINXOPTS=-D latex_elements.papersize=%PAPER% %I18NSPHINXOPTS%
)
if "%1" == "" goto help
if "%1" == "help" (
:help
echo.Please use `make ^` where ^ is one of
echo. html to make standalone HTML files
echo. dirhtml to make HTML files named index.html in directories
echo. singlehtml to make a single large HTML file
echo. pickle to make pickle files
echo. json to make JSON files
echo. htmlhelp to make HTML files and an HTML help project
echo. qthelp to make HTML files and a qthelp project
echo. devhelp to make HTML files and a Devhelp project
echo. epub to make an epub
echo. epub3 to make an epub3
echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
echo. text to make text files
echo. man to make manual pages
echo. texinfo to make Texinfo files
echo. gettext to make PO message catalogs
echo. changes to make an overview over all changed/added/deprecated items
echo. xml to make Docutils-native XML files
echo. pseudoxml to make pseudoxml-XML files for display purposes
echo. linkcheck to check all external links for integrity
echo. doctest to run all doctests embedded in the documentation if enabled
echo. coverage to run coverage check of the documentation if enabled
echo. dummy to check syntax errors of document sources
goto end
)
if "%1" == "clean" (
for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
del /q /s %BUILDDIR%\*
goto end
)
REM Check if sphinx-build is available
%SPHINXBUILD% 1>NUL 2>NUL
if errorlevel 1 (
echo.
echo.The Sphinx module was not found. Make sure you have Sphinx installed,
echo.then set the SPHINXBUILD environment variable to point to the full
echo.path of the 'sphinx-build' executable. Alternatively you may add the
echo.Sphinx directory to PATH.
echo.
echo.If you don't have Sphinx installed, grab it from
echo.http://sphinx-doc.org/
exit /b 1
)
if "%1" == "html" (
%SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The HTML pages are in %BUILDDIR%/html.
goto end
)
if "%1" == "dirhtml" (
%SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
goto end
)
if "%1" == "singlehtml" (
%SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
goto end
)
if "%1" == "pickle" (
%SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can process the pickle files.
goto end
)
if "%1" == "json" (
%SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can process the JSON files.
goto end
)
if "%1" == "htmlhelp" (
%SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can run HTML Help Workshop with the ^
.hhp project file in %BUILDDIR%/htmlhelp.
goto end
)
if "%1" == "qthelp" (
%SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can run "qcollectiongenerator" with the ^
.qhcp project file in %BUILDDIR%/qthelp, like this:
echo.^> qcollectiongenerator %BUILDDIR%\qthelp\aws_xray_sdk.qhcp
echo.To view the help file:
echo.^> assistant -collectionFile %BUILDDIR%\qthelp\aws_xray_sdk.ghc
goto end
)
if "%1" == "devhelp" (
%SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
if errorlevel 1 exit /b 1
echo.
echo.Build finished.
goto end
)
if "%1" == "epub" (
%SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The epub file is in %BUILDDIR%/epub.
goto end
)
if "%1" == "epub3" (
%SPHINXBUILD% -b epub3 %ALLSPHINXOPTS% %BUILDDIR%/epub3
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The epub3 file is in %BUILDDIR%/epub3.
goto end
)
if "%1" == "latex" (
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
if errorlevel 1 exit /b 1
echo.
echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
goto end
)
if "%1" == "latexpdf" (
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
cd %BUILDDIR%/latex
make all-pdf
cd %~dp0
echo.
echo.Build finished; the PDF files are in %BUILDDIR%/latex.
goto end
)
if "%1" == "latexpdfja" (
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
cd %BUILDDIR%/latex
make all-pdf-ja
cd %~dp0
echo.
echo.Build finished; the PDF files are in %BUILDDIR%/latex.
goto end
)
if "%1" == "text" (
%SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The text files are in %BUILDDIR%/text.
goto end
)
if "%1" == "man" (
%SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The manual pages are in %BUILDDIR%/man.
goto end
)
if "%1" == "texinfo" (
%SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
goto end
)
if "%1" == "gettext" (
%SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
goto end
)
if "%1" == "changes" (
%SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
if errorlevel 1 exit /b 1
echo.
echo.The overview file is in %BUILDDIR%/changes.
goto end
)
if "%1" == "linkcheck" (
%SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
if errorlevel 1 exit /b 1
echo.
echo.Link check complete; look for any errors in the above output ^
or in %BUILDDIR%/linkcheck/output.txt.
goto end
)
if "%1" == "doctest" (
%SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
if errorlevel 1 exit /b 1
echo.
echo.Testing of doctests in the sources finished, look at the ^
results in %BUILDDIR%/doctest/output.txt.
goto end
)
if "%1" == "coverage" (
%SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage
if errorlevel 1 exit /b 1
echo.
echo.Testing of coverage in the sources finished, look at the ^
results in %BUILDDIR%/coverage/python.txt.
goto end
)
if "%1" == "xml" (
%SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The XML files are in %BUILDDIR%/xml.
goto end
)
if "%1" == "pseudoxml" (
%SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml.
goto end
)
if "%1" == "dummy" (
%SPHINXBUILD% -b dummy %ALLSPHINXOPTS% %BUILDDIR%/dummy
if errorlevel 1 exit /b 1
echo.
echo.Build finished. Dummy builder generates no files.
goto end
)
:end
popd
================================================
FILE: docs/modules.rst
================================================
aws_xray_sdk
============
.. toctree::
:maxdepth: 4
aws_xray_sdk
================================================
FILE: docs/thirdparty.rst
================================================
.. _thirdparty:
Third Party Library Support
===========================
Patching Supported Libraries
----------------------------
The X-Ray Python SDK supports patching aioboto3, aiobotocore, boto3, botocore, pynamodb, requests,
sqlite3, mysql, httplib, pymongo, pymysql, psycopg2, pg8000, sqlalchemy_core, httpx, and mysql-connector.
To patch, use code like the following in the main app::
from aws_xray_sdk.core import patch_all
patch_all()
``patch_all`` ignores any libraries that are not installed.
To patch specific modules::
from aws_xray_sdk.core import patch
i_want_to_patch = ('botocore') # a tuple that contains the libs you want to patch
patch(i_want_to_patch)
The following modules are available to patch::
SUPPORTED_MODULES = (
'aioboto3',
'aiobotocore',
'boto3',
'botocore',
'pynamodb',
'requests',
'sqlite3',
'mysql',
'httplib',
'pymongo',
'pymysql',
'psycopg2',
'pg8000',
'sqlalchemy_core',
'httpx',
)
Patching boto3 and botocore are equivalent since boto3 depends on botocore.
Patching pynamodb applies the botocore patch as well, as it uses the logic from the botocore
patch to apply the trace header.
Patching mysql
----------------------------
For mysql, only the mysql-connector module is supported and you have to use
code like the following to generate a subsegment for an SQL query::
def call_mysql():
conn = mysql.connector.connect(
host='host',
port='some_port',
user='some_user',
password='your_password',
database='your_db_name'
)
conn.cursor().execute('SHOW TABLES')
Patching aioboto3 and aiobotocore
---------------------------------
On top of patching aioboto3 or aiobotocore, the xray_recorder also needs to be
configured to use the ``AsyncContext``. The following snippet shows how to set
up the X-Ray SDK with an Async Context, bear in mind this requires Python 3.5+::
from aws_xray_sdk.core.async_context import AsyncContext
from aws_xray_sdk.core import xray_recorder
# Configure X-Ray to use AsyncContext
xray_recorder.configure(service='service_name', context=AsyncContext())
See :ref:`Configure Global Recorder ` for more information about
configuring the ``xray_recorder``.
Patching httplib
----------------
httplib is a low-level python module which is used by several third party modules, so
by enabling patching to this module you can gain patching of many modules "for free."
Some examples of modules that depend on httplib: requests and httplib2
================================================
FILE: sample-apps/LICENSE
================================================
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy of this
software and associated documentation files (the "Software"), to deal in the Software
without restriction, including without limitation the rights to use, copy, modify,
merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
================================================
FILE: sample-apps/flask/Dockerfile
================================================
FROM python:3.6
WORKDIR /app
COPY . ./
RUN pip install -r requirements.txt
CMD ["python", "application.py"]
================================================
FILE: sample-apps/flask/application.py
================================================
import boto3
from flask import Flask
from aws_xray_sdk.core import xray_recorder, patch_all
from aws_xray_sdk.ext.flask.middleware import XRayMiddleware
from aws_xray_sdk.ext.flask_sqlalchemy.query import XRayFlaskSqlAlchemy
import requests
import os
application = app = Flask(__name__)
application.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
application.config["SQLALCHEMY_DATABASE_URI"] = "sqlite:///db.sqlite3"
xray_recorder.configure(service='My Flask Web Application')
XRayMiddleware(app, xray_recorder)
patch_all()
db = XRayFlaskSqlAlchemy(app=application)
class User(db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(255), nullable=False, unique=True)
# test http instrumentation
@app.route('/outgoing-http-call')
def callHTTP():
requests.get("https://aws.amazon.com")
return "Ok! tracing outgoing http call"
# test aws sdk instrumentation
@app.route('/aws-sdk-call')
def callAWSSDK():
client = boto3.client('s3')
client.list_buckets()
return 'Ok! tracing aws sdk call'
# test flask-sql alchemy instrumentation
@app.route('/flask-sql-alchemy-call')
def callSQL():
name = 'sql-alchemy-model'
user = User(name=name)
db.create_all()
db.session.add(user)
return 'Ok! tracing sql call'
@app.route('/')
def default():
return "healthcheck"
if __name__ == "__main__":
address = os.environ.get('LISTEN_ADDRESS')
if address is None:
host = '127.0.0.1'
port = '5000'
else:
host, port = address.split(":")
app.run(host=host, port=int(port), debug=True)
================================================
FILE: sample-apps/flask/requirements.txt
================================================
boto3==1.34.26
certifi==2024.7.4
chardet==5.2.0
Flask==2.3.3
idna==3.7
requests==2.32.0
urllib3==1.26.19
Werkzeug==3.0.6
flask-sqlalchemy==2.5.1
SQLAlchemy==1.4
aws_xray_sdk==2.6.0
================================================
FILE: setup.cfg
================================================
[bdist_wheel]
universal=1
================================================
FILE: setup.py
================================================
from setuptools import setup, find_packages
from os import path
from aws_xray_sdk.version import VERSION
CURRENT_DIR = path.abspath(path.dirname(__file__))
with open(path.join(CURRENT_DIR, 'README.md'), 'r') as f:
long_description = f.read()
setup(
name='aws-xray-sdk',
version=VERSION,
description='The AWS X-Ray SDK for Python (the SDK) enables Python developers to record'
' and emit information from within their applications to the AWS X-Ray service.',
long_description=long_description,
long_description_content_type='text/markdown',
url='https://github.com/aws/aws-xray-sdk-python',
author='Amazon Web Services',
license="Apache License 2.0",
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
'Programming Language :: Python :: 3.11',
],
python_requires=">=3.7",
install_requires=[
'wrapt',
'botocore>=1.11.3',
],
keywords='aws xray sdk',
packages=find_packages(exclude=['tests*']),
include_package_data=True
)
================================================
FILE: terraform/eb.tf
================================================
terraform {
required_providers {
aws = {
source = "hashicorp/aws"
version = "3.5.0"
}
}
}
provider "aws" {
profile = "default"
region = var.region
}
resource "aws_s3_bucket_public_access_block" "bucket_access" {
bucket = aws_s3_bucket.eb_app_bucket.id
restrict_public_buckets = true
}
resource "aws_s3_bucket" "eb_app_bucket" {
bucket = "${var.resource_prefix}.eb.app.applicationversion"
versioning {
enabled = true
}
server_side_encryption_configuration {
rule {
apply_server_side_encryption_by_default {
sse_algorithm = "AES256"
}
}
}
}
resource "aws_s3_bucket_object" "eb_app_package" {
bucket = aws_s3_bucket.eb_app_bucket.id
key = var.bucket_key
source = var.source_path
}
resource "aws_elastic_beanstalk_application" "eb_app" {
name = "${var.resource_prefix}-EB-App"
description = "Deployment of EB App for integration testing"
}
resource "aws_elastic_beanstalk_application_version" "eb_app_version" {
name = "${var.resource_prefix}-EB-App-1"
application = aws_elastic_beanstalk_application.eb_app.name
bucket = aws_s3_bucket.eb_app_bucket.id
key = aws_s3_bucket_object.eb_app_package.id
}
resource "aws_elastic_beanstalk_environment" "eb_env" {
name = "${var.resource_prefix}-EB-App-Env"
application = aws_elastic_beanstalk_application.eb_app.name
solution_stack_name = "64bit Amazon Linux 2 v3.5.12 running Python 3.8"
tier = "WebServer"
version_label = aws_elastic_beanstalk_application_version.eb_app_version.name
cname_prefix = "${var.resource_prefix}-Eb-app-env"
setting {
namespace = "aws:autoscaling:launchconfiguration"
name = "IamInstanceProfile"
value = "aws-elasticbeanstalk-ec2-role"
}
setting {
namespace = "aws:elasticbeanstalk:xray"
name = "XRayEnabled"
value = "true"
}
setting {
namespace = "aws:autoscaling:launchconfiguration"
name = "DisableIMDSv1"
value = "true"
}
}
================================================
FILE: terraform/fixtures.us-west-2.tfvars
================================================
region = "us-west-2"
bucket_key = "beanstalk/deploy.zip"
source_path = "deploy.zip"
================================================
FILE: terraform/variables.tf
================================================
variable "region" {
type = string
description = "AWS region for deployment of resources"
}
variable "bucket_key" {
type = string
description = "AWS s3 object key"
}
variable "source_path" {
type = string
description = "local source zip path to upload on AWS s3 bucket"
}
variable "resource_prefix" {}
================================================
FILE: tests/__init__.py
================================================
================================================
FILE: tests/distributioncheck/__init__.py
================================================
================================================
FILE: tests/distributioncheck/test_sanity.py
================================================
from aws_xray_sdk.core.models.segment import Segment
def test_create_segment():
segment = Segment('test')
assert segment.name == 'test'
================================================
FILE: tests/ext/__init__.py
================================================
from aws_xray_sdk.core import xray_recorder
from ..util import StubbedEmitter
xray_recorder.configure(sampling=False)
xray_recorder.emitter = StubbedEmitter()
================================================
FILE: tests/ext/aiobotocore/__init__.py
================================================
================================================
FILE: tests/ext/aiobotocore/test_aiobotocore.py
================================================
import pytest
from aiobotocore.session import get_session
from botocore.stub import Stubber, ANY
from botocore.exceptions import ClientError
from aws_xray_sdk.core import patch
from aws_xray_sdk.core.async_context import AsyncContext
from aws_xray_sdk.core import xray_recorder
patch(('aiobotocore',))
@pytest.fixture(scope='function')
def recorder(event_loop):
"""
Clean up before and after each test run
"""
xray_recorder.configure(
service='test', sampling=False, context=AsyncContext(loop=event_loop)
)
xray_recorder.clear_trace_entities()
yield xray_recorder
xray_recorder.clear_trace_entities()
async def test_describe_table(event_loop, recorder):
segment = recorder.begin_segment('name')
req_id = '1234'
response = {'ResponseMetadata': {'RequestId': req_id, 'HTTPStatusCode': 403}}
session = get_session()
async with session.create_client('dynamodb', region_name='eu-west-2') as client:
with Stubber(client) as stubber:
stubber.add_response('describe_table', response, {'TableName': 'mytable'})
await client.describe_table(TableName='mytable')
subsegment = segment.subsegments[0]
assert subsegment.error
assert subsegment.http['response']['status'] == 403
aws_meta = subsegment.aws
assert aws_meta['table_name'] == 'mytable'
assert aws_meta['request_id'] == req_id
assert aws_meta['region'] == 'eu-west-2'
assert aws_meta['operation'] == 'DescribeTable'
async def test_s3_parameter_capture(event_loop, recorder):
segment = recorder.begin_segment('name')
bucket_name = 'mybucket'
key = 'mykey'
version_id = 'myversionid'
response = {'ResponseMetadata': {'RequestId': '1234', 'HTTPStatusCode': 200}}
session = get_session()
async with session.create_client('s3', region_name='eu-west-2') as client:
with Stubber(client) as stubber:
stubber.add_response('get_object', response,
{'Bucket': bucket_name, 'Key': key, 'VersionId': version_id})
await client.get_object(Bucket=bucket_name, Key=key,
VersionId=version_id)
subsegment = segment.subsegments[0]
aws_meta = subsegment.aws
assert aws_meta['bucket_name'] == bucket_name
assert aws_meta['key'] == key
assert aws_meta['version_id'] == version_id
assert aws_meta['operation'] == 'GetObject'
async def test_list_parameter_counting(event_loop, recorder):
"""
Test special parameters that have shape of list are recorded
as count based on `para_whitelist.json`
"""
segment = recorder.begin_segment('name')
queue_urls = ['url1', 'url2']
queue_name_prefix = 'url'
response = {
'QueueUrls': queue_urls,
'ResponseMetadata': {
'RequestId': '1234',
'HTTPStatusCode': 200,
}
}
session = get_session()
async with session.create_client('sqs', region_name='eu-west-2') as client:
with Stubber(client) as stubber:
stubber.add_response('list_queues', response, {'QueueNamePrefix': queue_name_prefix})
await client.list_queues(QueueNamePrefix='url')
subsegment = segment.subsegments[0]
assert subsegment.http['response']['status'] == 200
aws_meta = subsegment.aws
assert aws_meta['queue_count'] == len(queue_urls)
# all whitelisted input parameters will be converted to snake case
# unless there is an explicit 'rename_to' attribute in json key
assert aws_meta['queue_name_prefix'] == queue_name_prefix
async def test_map_parameter_grouping(event_loop, recorder):
"""
Test special parameters that have shape of map are recorded
as a list of keys based on `para_whitelist.json`
"""
segment = recorder.begin_segment('name')
response = {
'ResponseMetadata': {
'RequestId': '1234',
'HTTPStatusCode': 500,
}
}
session = get_session()
async with session.create_client('dynamodb', region_name='eu-west-2') as client:
with Stubber(client) as stubber:
stubber.add_response('batch_write_item', response, {'RequestItems': ANY})
await client.batch_write_item(RequestItems={'table1': [{}], 'table2': [{}]})
subsegment = segment.subsegments[0]
assert subsegment.fault
assert subsegment.http['response']['status'] == 500
aws_meta = subsegment.aws
assert sorted(aws_meta['table_names']) == ['table1', 'table2']
async def test_context_missing_not_swallow_return(event_loop, recorder):
xray_recorder.configure(service='test', sampling=False,
context=AsyncContext(loop=event_loop),
context_missing='LOG_ERROR')
response = {'ResponseMetadata': {'RequestId': '1234', 'HTTPStatusCode': 403}}
session = get_session()
async with session.create_client('dynamodb', region_name='eu-west-2') as client:
with Stubber(client) as stubber:
stubber.add_response('describe_table', response, {'TableName': 'mytable'})
actual_resp = await client.describe_table(TableName='mytable')
assert actual_resp == response
async def test_context_missing_not_suppress_exception(event_loop, recorder):
xray_recorder.configure(service='test', sampling=False,
context=AsyncContext(loop=event_loop),
context_missing='LOG_ERROR')
session = get_session()
async with session.create_client('dynamodb', region_name='eu-west-2') as client:
with Stubber(client) as stubber:
stubber.add_client_error('describe_table', expected_params={'TableName': ANY})
with pytest.raises(ClientError):
await client.describe_table(TableName='mytable')
================================================
FILE: tests/ext/aiohttp/__init__.py
================================================
================================================
FILE: tests/ext/aiohttp/test_client.py
================================================
import logging
import pytest
from aiohttp import ClientSession
from aws_xray_sdk.core import xray_recorder
from aws_xray_sdk.core.async_context import AsyncContext
from aws_xray_sdk.core.context import MISSING_SEGMENT_MSG
from aws_xray_sdk.core.exceptions.exceptions import SegmentNotFoundException
from aws_xray_sdk.ext.util import strip_url, get_hostname
from aws_xray_sdk.ext.aiohttp.client import aws_xray_trace_config
from aws_xray_sdk.ext.aiohttp.client import REMOTE_NAMESPACE, LOCAL_NAMESPACE
# httpbin.org is created by the same author of requests to make testing http easy.
BASE_URL = 'httpbin.org'
@pytest.fixture(scope='function')
def recorder(loop):
"""
Initiate a recorder and clear it up once has been used.
"""
xray_recorder.configure(service='test', sampling=False, context=AsyncContext(loop=loop))
xray_recorder.clear_trace_entities()
yield recorder
xray_recorder.clear_trace_entities()
async def test_ok(loop, recorder):
xray_recorder.begin_segment('name')
trace_config = aws_xray_trace_config()
status_code = 200
url = 'http://{}/status/{}?foo=bar'.format(BASE_URL, status_code)
async with ClientSession(loop=loop, trace_configs=[trace_config]) as session:
async with session.get(url):
pass
subsegment = xray_recorder.current_segment().subsegments[0]
assert subsegment.name == get_hostname(url)
assert subsegment.namespace == REMOTE_NAMESPACE
http_meta = subsegment.http
assert http_meta['request']['url'] == strip_url(url)
assert http_meta['request']['method'] == 'GET'
assert http_meta['response']['status'] == status_code
async def test_ok_name(loop, recorder):
xray_recorder.begin_segment('name')
trace_config = aws_xray_trace_config(name='test')
status_code = 200
url = 'http://{}/status/{}?foo=bar'.format(BASE_URL, status_code)
async with ClientSession(loop=loop, trace_configs=[trace_config]) as session:
async with session.get(url):
pass
subsegment = xray_recorder.current_segment().subsegments[0]
assert subsegment.name == 'test'
async def test_error(loop, recorder):
xray_recorder.begin_segment('name')
trace_config = aws_xray_trace_config()
status_code = 400
url = 'http://{}/status/{}'.format(BASE_URL, status_code)
async with ClientSession(loop=loop, trace_configs=[trace_config]) as session:
async with session.post(url):
pass
subsegment = xray_recorder.current_segment().subsegments[0]
assert subsegment.name == get_hostname(url)
assert subsegment.error
http_meta = subsegment.http
assert http_meta['request']['url'] == strip_url(url)
assert http_meta['request']['method'] == 'POST'
assert http_meta['response']['status'] == status_code
async def test_throttle(loop, recorder):
xray_recorder.begin_segment('name')
trace_config = aws_xray_trace_config()
status_code = 429
url = 'http://{}/status/{}'.format(BASE_URL, status_code)
async with ClientSession(loop=loop, trace_configs=[trace_config]) as session:
async with session.head(url):
pass
subsegment = xray_recorder.current_segment().subsegments[0]
assert subsegment.name == get_hostname(url)
assert subsegment.error
assert subsegment.throttle
http_meta = subsegment.http
assert http_meta['request']['url'] == strip_url(url)
assert http_meta['request']['method'] == 'HEAD'
assert http_meta['response']['status'] == status_code
async def test_fault(loop, recorder):
xray_recorder.begin_segment('name')
trace_config = aws_xray_trace_config()
status_code = 500
url = 'http://{}/status/{}'.format(BASE_URL, status_code)
async with ClientSession(loop=loop, trace_configs=[trace_config]) as session:
async with session.put(url):
pass
subsegment = xray_recorder.current_segment().subsegments[0]
assert subsegment.name == get_hostname(url)
assert subsegment.fault
http_meta = subsegment.http
assert http_meta['request']['url'] == strip_url(url)
assert http_meta['request']['method'] == 'PUT'
assert http_meta['response']['status'] == status_code
async def test_invalid_url(loop, recorder):
xray_recorder.begin_segment('name')
trace_config = aws_xray_trace_config()
async with ClientSession(loop=loop, trace_configs=[trace_config]) as session:
try:
async with session.get('http://doesnt.exist'):
pass
except Exception:
# prevent uncatch exception from breaking test run
pass
subsegment = xray_recorder.current_segment().subsegments[0]
assert subsegment.namespace == LOCAL_NAMESPACE
assert subsegment.fault
exception = subsegment.cause['exceptions'][0]
assert exception.type == 'ClientConnectorError'
async def test_no_segment_raise(loop, recorder):
xray_recorder.configure(context_missing='RUNTIME_ERROR')
trace_config = aws_xray_trace_config()
status_code = 200
url = 'http://{}/status/{}?foo=bar'.format(BASE_URL, status_code)
with pytest.raises(SegmentNotFoundException):
async with ClientSession(loop=loop, trace_configs=[trace_config]) as session:
async with session.get(url):
pass
async def test_no_segment_log_error(loop, recorder, caplog):
caplog.set_level(logging.ERROR)
xray_recorder.configure(context_missing='LOG_ERROR')
trace_config = aws_xray_trace_config()
status_code = 200
url = 'http://{}/status/{}?foo=bar'.format(BASE_URL, status_code)
async with ClientSession(loop=loop, trace_configs=[trace_config]) as session:
async with session.get(url) as resp:
status_received = resp.status
# Just check that the request was done correctly
assert status_received == status_code
assert MISSING_SEGMENT_MSG in [rec.message for rec in caplog.records]
async def test_no_segment_ignore_error(loop, recorder, caplog):
caplog.set_level(logging.ERROR)
xray_recorder.configure(context_missing='IGNORE_ERROR')
trace_config = aws_xray_trace_config()
status_code = 200
url = 'http://{}/status/{}?foo=bar'.format(BASE_URL, status_code)
async with ClientSession(loop=loop, trace_configs=[trace_config]) as session:
async with session.get(url) as resp:
status_received = resp.status
# Just check that the request was done correctly
assert status_received == status_code
assert MISSING_SEGMENT_MSG not in [rec.message for rec in caplog.records]
================================================
FILE: tests/ext/aiohttp/test_middleware.py
================================================
"""
Tests the middleware for aiohttp server
Expects pytest-aiohttp
"""
import asyncio
import sys
from unittest.mock import patch
import pytest
from aiohttp import web
from aiohttp.web_exceptions import HTTPUnauthorized
from aws_xray_sdk import global_sdk_config
from aws_xray_sdk.core.async_context import AsyncContext
from aws_xray_sdk.core.emitters.udp_emitter import UDPEmitter
from aws_xray_sdk.core.models import http
from aws_xray_sdk.ext.aiohttp.middleware import middleware
from tests.util import get_new_stubbed_recorder
class CustomStubbedEmitter(UDPEmitter):
"""
Custom stubbed emitter which stores all segments instead of the last one
"""
def __init__(self, daemon_address='127.0.0.1:2000'):
super().__init__(daemon_address)
self.local = []
def send_entity(self, entity):
self.local.append(entity)
def pop(self):
try:
return self.local.pop(0)
except IndexError:
return None
class ServerTest:
"""
Simple class to hold a copy of the event loop
"""
__test__ = False
def __init__(self, loop):
self._loop = loop
async def handle_ok(self, request: web.Request) -> web.Response:
"""
Handle / request
"""
if "content_length" in request.query:
headers = {'Content-Length': request.query['content_length']}
else:
headers = None
return web.Response(text="ok", headers=headers)
async def handle_error(self, request: web.Request) -> web.Response:
"""
Handle /error which returns a 404
"""
return web.Response(text="not found", status=404)
async def handle_unauthorized(self, request: web.Request) -> web.Response:
"""
Handle /unauthorized which returns a 401
"""
raise HTTPUnauthorized()
async def handle_exception(self, request: web.Request) -> web.Response:
"""
Handle /exception which raises a CancelledError; this is important, as starting from python 3.8 CancelledError
extends BaseException instead of Exception
"""
raise asyncio.CancelledError()
async def handle_delay(self, request: web.Request) -> web.Response:
"""
Handle /delay request
"""
if sys.version_info >= (3, 8):
await asyncio.sleep(0.3)
else:
await asyncio.sleep(0.3, loop=self._loop)
return web.Response(text="ok")
def get_app(self) -> web.Application:
app = web.Application(middlewares=[middleware])
app.router.add_get('/', self.handle_ok)
app.router.add_get('/error', self.handle_error)
app.router.add_get('/exception', self.handle_exception)
app.router.add_get('/unauthorized', self.handle_unauthorized)
app.router.add_get('/delay', self.handle_delay)
return app
@classmethod
def app(cls, loop=None) -> web.Application:
return cls(loop=loop).get_app()
@pytest.fixture(scope='function')
def recorder(loop):
"""
Clean up context storage before and after each test run
"""
xray_recorder = get_new_stubbed_recorder()
xray_recorder.configure(service='test', sampling=False, context=AsyncContext(loop=loop))
patcher = patch('aws_xray_sdk.ext.aiohttp.middleware.xray_recorder', xray_recorder)
patcher.start()
xray_recorder.clear_trace_entities()
yield xray_recorder
global_sdk_config.set_sdk_enabled(True)
xray_recorder.clear_trace_entities()
patcher.stop()
async def test_ok(aiohttp_client, loop, recorder):
"""
Test a normal response
:param aiohttp_client: AioHttp test client fixture
:param loop: Eventloop fixture
:param recorder: X-Ray recorder fixture
"""
client = await aiohttp_client(ServerTest.app(loop=loop))
resp = await client.get('/')
assert resp.status == 200
segment = recorder.emitter.pop()
assert not segment.in_progress
request = segment.http['request']
response = segment.http['response']
assert request['method'] == 'GET'
assert request['url'] == 'http://127.0.0.1:{port}/'.format(port=client.port)
assert response['status'] == 200
async def test_ok_x_forwarded_for(aiohttp_client, loop, recorder):
"""
Test a normal response with x_forwarded_for headers
:param aiohttp_client: AioHttp test client fixture
:param loop: Eventloop fixture
:param recorder: X-Ray recorder fixture
"""
client = await aiohttp_client(ServerTest.app(loop=loop))
resp = await client.get('/', headers={'X-Forwarded-For': 'foo'})
assert resp.status == 200
segment = recorder.emitter.pop()
assert segment.http['request']['client_ip'] == 'foo'
assert segment.http['request']['x_forwarded_for']
async def test_ok_content_length(aiohttp_client, loop, recorder):
"""
Test a normal response with content length as response header
:param aiohttp_client: AioHttp test client fixture
:param loop: Eventloop fixture
:param recorder: X-Ray recorder fixture
"""
client = await aiohttp_client(ServerTest.app(loop=loop))
resp = await client.get('/?content_length=100')
assert resp.status == 200
segment = recorder.emitter.pop()
assert segment.http['response']['content_length'] == 100
async def test_error(aiohttp_client, loop, recorder):
"""
Test a 4XX response
:param aiohttp_client: AioHttp test client fixture
:param loop: Eventloop fixture
:param recorder: X-Ray recorder fixture
"""
client = await aiohttp_client(ServerTest.app(loop=loop))
resp = await client.get('/error')
assert resp.status == 404
segment = recorder.emitter.pop()
assert not segment.in_progress
assert segment.error
request = segment.http['request']
response = segment.http['response']
assert request['method'] == 'GET'
assert request['url'] == 'http://127.0.0.1:{port}/error'.format(port=client.port)
assert request['client_ip'] == '127.0.0.1'
assert response['status'] == 404
async def test_exception(aiohttp_client, loop, recorder):
"""
Test handling an exception
:param aiohttp_client: AioHttp test client fixture
:param loop: Eventloop fixture
:param recorder: X-Ray recorder fixture
"""
client = await aiohttp_client(ServerTest.app(loop=loop))
with pytest.raises(Exception):
await client.get('/exception')
segment = recorder.emitter.pop()
assert not segment.in_progress
assert segment.fault
request = segment.http['request']
response = segment.http['response']
exception = segment.cause['exceptions'][0]
assert request['method'] == 'GET'
assert request['url'] == 'http://127.0.0.1:{port}/exception'.format(port=client.port)
assert request['client_ip'] == '127.0.0.1'
assert response['status'] == 500
assert exception.type == 'CancelledError'
async def test_unhauthorized(aiohttp_client, loop, recorder):
"""
Test a 401 response
:param aiohttp_client: AioHttp test client fixture
:param loop: Eventloop fixture
:param recorder: X-Ray recorder fixture
"""
client = await aiohttp_client(ServerTest.app(loop=loop))
resp = await client.get('/unauthorized')
assert resp.status == 401
segment = recorder.emitter.pop()
assert not segment.in_progress
assert segment.error
request = segment.http['request']
response = segment.http['response']
assert request['method'] == 'GET'
assert request['url'] == 'http://127.0.0.1:{port}/unauthorized'.format(port=client.port)
assert request['client_ip'] == '127.0.0.1'
assert response['status'] == 401
async def test_response_trace_header(aiohttp_client, loop, recorder):
client = await aiohttp_client(ServerTest.app(loop=loop))
resp = await client.get('/')
xray_header = resp.headers[http.XRAY_HEADER]
segment = recorder.emitter.pop()
expected = 'Root=%s' % segment.trace_id
assert expected in xray_header
async def test_concurrent(aiohttp_client, loop, recorder):
"""
Test multiple concurrent requests
:param aiohttp_client: AioHttp test client fixture
:param loop: Eventloop fixture
:param recorder: X-Ray recorder fixture
"""
client = await aiohttp_client(ServerTest.app(loop=loop))
recorder.emitter = CustomStubbedEmitter()
async def get_delay():
resp = await client.get('/delay')
assert resp.status == 200
if sys.version_info >= (3, 8):
await asyncio.wait([loop.create_task(get_delay()) for i in range(9)])
else:
await asyncio.wait([loop.create_task(get_delay()) for i in range(9)], loop=loop)
# Ensure all ID's are different
ids = [item.id for item in recorder.emitter.local]
assert len(ids) == len(set(ids))
async def test_disabled_sdk(aiohttp_client, loop, recorder):
"""
Test a normal response when the SDK is disabled.
:param aiohttp_client: AioHttp test client fixture
:param loop: Eventloop fixture
:param recorder: X-Ray recorder fixture
"""
global_sdk_config.set_sdk_enabled(False)
client = await aiohttp_client(ServerTest.app(loop=loop))
resp = await client.get('/')
assert resp.status == 200
segment = recorder.emitter.pop()
assert not segment
================================================
FILE: tests/ext/botocore/__init__.py
================================================
================================================
FILE: tests/ext/botocore/test_botocore.py
================================================
import pytest
import botocore.session
from botocore.stub import Stubber, ANY
from aws_xray_sdk.core import patch
from aws_xray_sdk.core import xray_recorder
from aws_xray_sdk.core.context import Context
patch(('botocore',))
session = botocore.session.get_session()
REQUEST_ID = '1234'
@pytest.fixture(autouse=True)
def construct_ctx():
"""
Clean up context storage on each test run and begin a segment
so that later subsegment can be attached. After each test run
it cleans up context storage again.
"""
xray_recorder.configure(service='test', sampling=False, context=Context())
xray_recorder.clear_trace_entities()
xray_recorder.begin_segment('name')
yield
xray_recorder.clear_trace_entities()
def test_ddb_table_name():
ddb = session.create_client('dynamodb', region_name='us-west-2')
response = {
'ResponseMetadata': {
'RequestId': REQUEST_ID,
'HTTPStatusCode': 403,
}
}
with Stubber(ddb) as stubber:
stubber.add_response('describe_table', response, {'TableName': 'mytable'})
ddb.describe_table(TableName='mytable')
subsegment = xray_recorder.current_segment().subsegments[0]
assert subsegment.error
assert subsegment.http['response']['status'] == 403
aws_meta = subsegment.aws
assert aws_meta['table_name'] == 'mytable'
assert aws_meta['request_id'] == REQUEST_ID
assert aws_meta['region'] == 'us-west-2'
assert aws_meta['operation'] == 'DescribeTable'
def test_s3_bucket_name_capture():
s3 = session.create_client('s3', region_name='us-west-2')
response = {
'ResponseMetadata': {
'RequestId': REQUEST_ID,
'HTTPStatusCode': 200,
}
}
bucket_name = 'mybucket'
with Stubber(s3) as stubber:
stubber.add_response('list_objects_v2', response, {'Bucket': bucket_name})
s3.list_objects_v2(Bucket=bucket_name)
subsegment = xray_recorder.current_segment().subsegments[0]
aws_meta = subsegment.aws
assert aws_meta['bucket_name'] == bucket_name
assert aws_meta['request_id'] == REQUEST_ID
assert aws_meta['region'] == 'us-west-2'
assert aws_meta['operation'] == 'ListObjectsV2'
def test_list_parameter_counting():
"""
Test special parameters that have shape of list are recorded
as count based on `para_whitelist.json`
"""
sqs = session.create_client('sqs', region_name='us-west-2')
queue_urls = ['url1', 'url2']
queue_name_prefix = 'url'
response = {
'QueueUrls': queue_urls,
'ResponseMetadata': {
'RequestId': '1234',
'HTTPStatusCode': 200,
}
}
with Stubber(sqs) as stubber:
stubber.add_response('list_queues', response, {'QueueNamePrefix': queue_name_prefix})
sqs.list_queues(QueueNamePrefix='url')
subsegment = xray_recorder.current_segment().subsegments[0]
assert subsegment.http['response']['status'] == 200
aws_meta = subsegment.aws
assert aws_meta['queue_count'] == len(queue_urls)
# all whitelisted input parameters will be converted to snake case
# unless there is an explicit 'rename_to' attribute in json key
assert aws_meta['queue_name_prefix'] == queue_name_prefix
def test_map_parameter_grouping():
"""
Test special parameters that have shape of map are recorded
as a list of keys based on `para_whitelist.json`
"""
ddb = session.create_client('dynamodb', region_name='us-west-2')
response = {
'ResponseMetadata': {
'RequestId': REQUEST_ID,
'HTTPStatusCode': 500,
}
}
with Stubber(ddb) as stubber:
stubber.add_response('batch_write_item', response, {'RequestItems': ANY})
ddb.batch_write_item(RequestItems={'table1': [{}], 'table2': [{}]})
subsegment = xray_recorder.current_segment().subsegments[0]
assert subsegment.fault
assert subsegment.http['response']['status'] == 500
aws_meta = subsegment.aws
assert sorted(aws_meta['table_names']) == ['table1', 'table2']
def test_pass_through_on_context_missing():
"""
The built-in patcher or subsegment capture logic should not throw
any error when a `None` subsegment created from `LOG_ERROR` missing context.
"""
xray_recorder.configure(context_missing='LOG_ERROR')
xray_recorder.clear_trace_entities()
ddb = session.create_client('dynamodb', region_name='us-west-2')
response = {
'ResponseMetadata': {
'RequestId': REQUEST_ID,
'HTTPStatusCode': 200,
}
}
with Stubber(ddb) as stubber:
stubber.add_response('describe_table', response, {'TableName': 'mytable'})
result = ddb.describe_table(TableName='mytable')
assert result is not None
xray_recorder.configure(context_missing='RUNTIME_ERROR')
def test_sns_publish_parameters():
sns = session.create_client('sns', region_name='us-west-2')
response = {
'ResponseMetadata': {
'RequestId': REQUEST_ID,
'HTTPStatusCode': 200,
}
}
with Stubber(sns) as stubber:
stubber.add_response('publish', response, {'TopicArn': 'myAmazingTopic', 'Message': 'myBodaciousMessage'})
sns.publish(TopicArn='myAmazingTopic', Message='myBodaciousMessage')
subsegment = xray_recorder.current_segment().subsegments[0]
assert subsegment.http['response']['status'] == 200
aws_meta = subsegment.aws
assert aws_meta['topic_arn'] == 'myAmazingTopic'
assert aws_meta['request_id'] == REQUEST_ID
assert aws_meta['region'] == 'us-west-2'
assert aws_meta['operation'] == 'Publish'
================================================
FILE: tests/ext/bottle/__init__.py
================================================
================================================
FILE: tests/ext/bottle/test_bottle.py
================================================
import pytest
from bottle import Bottle, request, response, template, view, HTTPError, TEMPLATE_PATH
from webtest import TestApp as WebApp
from aws_xray_sdk import global_sdk_config
from aws_xray_sdk.ext.bottle.middleware import XRayMiddleware
from aws_xray_sdk.core.context import Context
from aws_xray_sdk.core import lambda_launcher
from aws_xray_sdk.core.models import http, facade_segment, segment as segment_model
from tests.util import get_new_stubbed_recorder
import os
# define Bottle app for testing purpose
TEMPLATE_PATH.insert(0, os.path.dirname(__file__) + '/views')
app = Bottle()
@app.route('/ok')
def ok():
response_data = 'ok'
# Bottle not always set Content-Length header
response.content_length = len(response_data)
return response_data
@app.route('/error')
def error():
response.status = 404
return 'Not Found'
@app.route('/client_error')
def faulty_client():
class CustomError(Exception):
def __init__(self, description=None, status_code=None):
self.description = description
self.status_code = status_code
raise CustomError(description='Bad request', status_code=400)
@app.route('/server_error')
def faulty_server():
raise HTTPError(status=503, body='Service Unavailable')
@app.route('/fault')
def fault():
return {}['key']
@app.route('/template')
def template_():
return template('Hello {{name}}!', name='World')
@app.route('/view')
@view('index')
def view_(name='bottle'):
return dict(name=name)
# add X-Ray plugin to Bottle app
recorder = get_new_stubbed_recorder()
recorder.configure(service='test', sampling=False, context=Context())
app.install(XRayMiddleware(recorder))
app = WebApp(app)
BASE_URL = 'http://localhost:80{}'
@pytest.fixture(autouse=True)
def cleanup():
"""
Clean up context storage before and after each test run
"""
recorder.clear_trace_entities()
yield
recorder.clear_trace_entities()
global_sdk_config.set_sdk_enabled(True)
def test_ok():
path = '/ok'
app.get(path, extra_environ={'REMOTE_ADDR': '127.0.0.1'})
segment = recorder.emitter.pop()
assert not segment.in_progress
request = segment.http['request']
response = segment.http['response']
assert request['method'] == 'GET'
assert request['url'] == BASE_URL.format(path)
assert request['client_ip'] == '127.0.0.1'
assert response['status'] == 200
assert response['content_length'] == 2
def test_error():
path = '/error'
try:
app.get(path, extra_environ={'HTTP_X_FORWARDED_FOR': '192.168.0.0'})
except Exception:
pass
segment = recorder.emitter.pop()
assert not segment.in_progress
assert segment.error
request = segment.http['request']
response = segment.http['response']
assert request['method'] == 'GET'
assert request['url'] == BASE_URL.format(path)
assert request['client_ip'] == '192.168.0.0'
assert response['status'] == 404
def test_custom_client_error():
path = '/client_error'
try:
app.get(path)
except Exception:
pass
segment = recorder.emitter.pop()
assert not segment.in_progress
assert segment.error
response = segment.http['response']
assert response['status'] == 400
exception = segment.cause['exceptions'][0]
assert exception.type == 'CustomError'
request = segment.http['request']
assert request['method'] == 'GET'
assert request['url'] == BASE_URL.format(path)
def test_server_error():
path = '/server_error'
try:
app.get(path)
except Exception as e:
pass
segment = recorder.emitter.pop()
assert not segment.in_progress
assert segment.fault
response = segment.http['response']
assert response['status'] == 503
exception = segment.cause['exceptions'][0]
assert exception.type == 'HTTPError'
def test_fault():
path = '/fault'
try:
app.get(path)
except Exception:
pass
segment = recorder.emitter.pop()
assert not segment.in_progress
assert segment.fault
response = segment.http['response']
assert response['status'] == 500
exception = segment.cause['exceptions'][0]
assert exception.type == 'KeyError'
def test_render_template():
path = '/template'
app.get(path)
segment = recorder.emitter.pop()
assert not segment.in_progress
# segment should contain a template render subsegment
assert segment.subsegments
subsegment = segment.subsegments[0]
assert subsegment.name
assert subsegment.namespace == 'local'
assert not subsegment.in_progress
def test_render_view():
path = '/view'
response = app.get(path)
assert response.text == "Hello Bottle!
\nHow are you?
\n"
segment = recorder.emitter.pop()
assert not segment.in_progress
# segment should contain a template render subsegment
assert segment.subsegments
subsegment = segment.subsegments[0]
assert subsegment.name
assert subsegment.namespace == 'local'
assert not subsegment.in_progress
def test_incoming_sampling_decision_respected():
path = '/ok'
# resp = app.get(path, headers={http.XRAY_HEADER: 'Sampled=0'})
resp = app.get(path, headers={http.XRAY_HEADER: 'Sampled=0'})
resp_header = resp.headers[http.XRAY_HEADER]
segment = recorder.emitter.pop()
assert not segment
# The SDK should still send the headers back regardless of sampling decision
assert 'Root' in resp_header
def test_trace_header_data_perservation():
path = '/ok'
app.get(path, headers={http.XRAY_HEADER: 'k1=v1'})
segment = recorder.emitter.pop()
header = segment.get_origin_trace_header()
assert header.data['k1'] == 'v1'
def test_sampled_response_header():
path = '/ok'
app.get(path, headers={http.XRAY_HEADER: 'Sampled=?;k1=v1'})
segment = recorder.emitter.pop()
resp_header = response.headers.get(http.XRAY_HEADER)
assert segment.trace_id in resp_header
assert 'Sampled=1' in resp_header
def test_disabled_sdk():
global_sdk_config.set_sdk_enabled(False)
path = '/ok'
app.get(path)
segment = recorder.emitter.pop()
assert not segment
def test_lambda_serverless():
TRACE_ID = '1-5759e988-bd862e3fe1be46a994272793'
PARENT_ID = '53995c3f42cd8ad8'
HEADER_VAR = 'Root=%s;Parent=%s;Sampled=1' % (TRACE_ID, PARENT_ID)
os.environ[lambda_launcher.LAMBDA_TRACE_HEADER_KEY] = HEADER_VAR
lambda_context = lambda_launcher.LambdaContext()
new_recorder = get_new_stubbed_recorder()
new_recorder.configure(service='test', sampling=False, context=lambda_context)
new_app = Bottle()
@new_app.route('/subsegment')
def subsegment_():
# Test in between request and make sure Serverless creates a subsegment instead of a segment.
# Ensure that the parent segment is a facade segment.
assert new_recorder.current_subsegment()
assert type(new_recorder.current_segment()) == facade_segment.FacadeSegment
return 'ok'
@new_app.route('/trace_header')
def trace_header():
# Ensure trace header is preserved.
subsegment = new_recorder.current_subsegment()
header = subsegment.get_origin_trace_header()
assert header.data['k1'] == 'v1'
return 'ok'
plugin = XRayMiddleware(new_recorder)
plugin._in_lambda_ctx = True
new_app.install(plugin)
app_client = WebApp(new_app)
path = '/subsegment'
app_client.get(path)
new_app.get(path)
segment = recorder.emitter.pop()
assert not segment # Segment should be none because it's created and ended by the plugin
path2 = '/trace_header'
app_client.get(path2, headers={http.XRAY_HEADER: 'k1=v1'})
def test_lambda_default_ctx():
# Track to make sure that Bottle will default to generating segments if context is not the lambda context
new_recorder = get_new_stubbed_recorder()
new_recorder.configure(service='test', sampling=False)
new_app = Bottle()
@new_app.route('/segment')
def segment_():
# Test in between request and make sure Lambda that uses default context generates a segment.
assert new_recorder.current_segment()
assert type(new_recorder.current_segment()) == segment_model.Segment
return 'ok'
new_app.install(XRayMiddleware(new_recorder))
app_client = WebApp(new_app)
path = '/segment'
app_client.get(path)
segment = recorder.emitter.pop()
assert not segment # Segment should be none because it's created and ended by the plugin
================================================
FILE: tests/ext/bottle/views/index.tpl
================================================
Hello {{name.title()}}!
How are you?
================================================
FILE: tests/ext/django/__init__.py
================================================
================================================
FILE: tests/ext/django/app/__init__.py
================================================
================================================
FILE: tests/ext/django/app/settings.py
================================================
"""
Config file for a django app used by django testing client
"""
import os
from aws_xray_sdk.core.sampling.sampler import LocalSampler
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:'
}
}
ALLOWED_HOSTS = ['testserver']
SECRET_KEY = 'doesntreallymatter'
ROOT_URLCONF = 'tests.ext.django.app.views'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(BASE_DIR, 'app', 'templates'),
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
],
},
},
]
MIDDLEWARE = [
# X-Ray middleware for django
'aws_xray_sdk.ext.django.middleware.XRayMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
]
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'aws_xray_sdk.ext.django',
]
XRAY_RECORDER = {
'AWS_XRAY_TRACING_NAME': 'django',
'SAMPLING': False,
'SAMPLER': LocalSampler(),
}
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
STATIC_URL = '/static/'
================================================
FILE: tests/ext/django/app/templates/block.html
================================================
Hello World
================================================
FILE: tests/ext/django/app/templates/block_user.html
================================================
Django Test App
{% include "block.html" %}
================================================
FILE: tests/ext/django/app/templates/index.html
================================================
Django Test App
Hello World
================================================
FILE: tests/ext/django/app/views.py
================================================
import sqlite3
from django.http import HttpResponse
from django.urls import path
from django.views.generic import TemplateView
class IndexView(TemplateView):
template_name = 'index.html'
class TemplateBlockView(TemplateView):
template_name = 'block_user.html'
def ok(request):
return HttpResponse(status=200)
def fault(request):
{}['key']
def call_db(request):
conn = sqlite3.connect(':memory:')
q = 'SELECT name FROM sqlite_master'
conn.execute(q)
return HttpResponse(status=201)
# def template(request):
urlpatterns = [
path('200ok/', ok, name='200ok'),
path('500fault/', fault, name='500fault'),
path('call_db/', call_db, name='call_db'),
path('template/', IndexView.as_view(), name='template'),
path('template_block/', TemplateBlockView.as_view(), name='template_block'),
]
================================================
FILE: tests/ext/django/test_db.py
================================================
import django
import pytest
from aws_xray_sdk.core import xray_recorder
from aws_xray_sdk.core.context import Context
from aws_xray_sdk.ext.django.db import patch_db
@pytest.fixture(scope='module', autouse=True)
def setup():
django.setup()
xray_recorder.configure(context=Context())
patch_db()
@pytest.fixture(scope='module')
def user_class(setup):
from django.db import models
from django_fake_model import models as f
class User(f.FakeModel):
name = models.CharField(max_length=255)
password = models.CharField(max_length=255)
return User
@pytest.fixture(
autouse=True,
params=[
False,
True,
]
)
@pytest.mark.django_db
def func_setup(request, user_class):
xray_recorder.stream_sql = request.param
xray_recorder.clear_trace_entities()
xray_recorder.begin_segment('name')
try:
user_class.create_table()
yield
finally:
xray_recorder.clear_trace_entities()
try:
user_class.delete_table()
finally:
xray_recorder.end_segment()
def _assert_query(sql_meta):
if xray_recorder.stream_sql:
assert 'sanitized_query' in sql_meta
assert sql_meta['sanitized_query']
assert sql_meta['sanitized_query'].startswith('SELECT')
else:
if 'sanitized_query' in sql_meta:
assert sql_meta['sanitized_query']
# Django internally executes queries for table checks, ignore those
assert not sql_meta['sanitized_query'].startswith('SELECT')
def test_all(user_class):
""" Test calling all() on get all records.
Verify we run the query and return the SQL as metadata"""
# Materialising the query executes the SQL
list(user_class.objects.all())
subsegment = xray_recorder.current_segment().subsegments[-1]
sql = subsegment.sql
assert sql['database_type'] == 'sqlite'
_assert_query(sql)
def test_filter(user_class):
""" Test calling filter() to get filtered records.
Verify we run the query and return the SQL as metadata"""
# Materialising the query executes the SQL
list(user_class.objects.filter(password='mypassword!').all())
subsegment = xray_recorder.current_segment().subsegments[-1]
sql = subsegment.sql
assert sql['database_type'] == 'sqlite'
_assert_query(sql)
if xray_recorder.stream_sql:
assert 'mypassword!' not in sql['sanitized_query']
assert '"password" = %s' in sql['sanitized_query']
================================================
FILE: tests/ext/django/test_middleware.py
================================================
import django
from aws_xray_sdk import global_sdk_config
from django.urls import reverse
from django.test import TestCase
from aws_xray_sdk.core import xray_recorder, lambda_launcher
from aws_xray_sdk.core.context import Context
from aws_xray_sdk.core.models import http, facade_segment, segment
from aws_xray_sdk.core import patch
from tests.util import get_new_stubbed_recorder
import os
class XRayTestCase(TestCase):
def setUp(self):
django.setup()
xray_recorder.configure(context=Context())
xray_recorder.clear_trace_entities()
global_sdk_config.set_sdk_enabled(True)
def tearDown(self):
xray_recorder.clear_trace_entities()
def test_ok(self):
url = reverse('200ok')
self.client.get(url)
segment = xray_recorder.emitter.pop()
request = segment.http['request']
response = segment.http['response']
assert request['method'] == 'GET'
assert request['client_ip'] == '127.0.0.1'
assert response['status'] == 200
def test_error(self):
self.client.get('/notfound/')
segment = xray_recorder.emitter.pop()
assert segment.error
request = segment.http['request']
response = segment.http['response']
assert request['method'] == 'GET'
assert request['client_ip'] == '127.0.0.1'
assert response['status'] == 404
def test_fault(self):
url = reverse('500fault')
try:
self.client.get(url)
except Exception:
pass
segment = xray_recorder.emitter.pop()
assert segment.fault
request = segment.http['request']
response = segment.http['response']
assert request['method'] == 'GET'
assert request['client_ip'] == '127.0.0.1'
assert response['status'] == 500
exception = segment.cause['exceptions'][0]
assert exception.type == 'KeyError'
def test_db(self):
patch(('sqlite3',))
url = reverse('call_db')
self.client.get(url)
segment = xray_recorder.emitter.pop()
assert len(segment.subsegments) == 1
subsegment = segment.subsegments[0]
assert subsegment.name == ':memory:'
assert not subsegment.in_progress
sql = subsegment.sql
assert sql['database_type'] == 'sqlite3'
assert sql['database_version']
def test_template(self):
url = reverse('template')
self.client.get(url)
segment = xray_recorder.emitter.pop()
assert len(segment.subsegments) == 1
subsegment = segment.subsegments[0]
assert subsegment.name == 'index.html'
assert not subsegment.in_progress
assert subsegment.namespace == 'local'
def test_template_block(self):
url = reverse('template_block')
self.client.get(url)
segment = xray_recorder.emitter.pop()
assert len(segment.subsegments) == 1
subsegment = segment.subsegments[0]
assert subsegment.name == 'block_user.html'
assert not subsegment.in_progress
assert subsegment.namespace == 'local'
def test_trace_header_data_perservation(self):
url = reverse('200ok')
self.client.get(url, HTTP_X_AMZN_TRACE_ID='k1=v1')
segment = xray_recorder.emitter.pop()
header = segment.get_origin_trace_header()
assert header.data['k1'] == 'v1'
def test_response_header(self):
url = reverse('200ok')
resp = self.client.get(url, HTTP_X_AMZN_TRACE_ID='Sampled=?')
segment = xray_recorder.emitter.pop()
trace_header = resp[http.XRAY_HEADER]
assert 'Sampled=1' in trace_header
assert segment.trace_id in trace_header
def test_disabled_sdk(self):
global_sdk_config.set_sdk_enabled(False)
url = reverse('200ok')
self.client.get(url)
segment = xray_recorder.emitter.pop()
assert not segment
def test_lambda_serverless(self):
TRACE_ID = '1-5759e988-bd862e3fe1be46a994272793'
PARENT_ID = '53995c3f42cd8ad8'
HEADER_VAR = "Root=%s;Parent=%s;Sampled=1" % (TRACE_ID, PARENT_ID)
os.environ[lambda_launcher.LAMBDA_TRACE_HEADER_KEY] = HEADER_VAR
lambda_context = lambda_launcher.LambdaContext()
new_recorder = get_new_stubbed_recorder()
new_recorder.configure(service='test', sampling=False, context=lambda_context)
subsegment = new_recorder.begin_subsegment("subsegment")
assert type(subsegment.parent_segment) == facade_segment.FacadeSegment
new_recorder.end_subsegment()
url = reverse('200ok')
self.client.get(url)
segment = new_recorder.emitter.pop()
assert not segment
# Test Fault in Lambda
url = reverse('500fault')
try:
self.client.get(url)
except Exception:
pass
segment = xray_recorder.emitter.pop()
assert segment.fault
request = segment.http['request']
response = segment.http['response']
assert request['method'] == 'GET'
assert request['client_ip'] == '127.0.0.1'
assert response['status'] == 500
exception = segment.cause['exceptions'][0]
assert exception.type == 'KeyError'
def test_lambda_default_ctx(self):
# Track to make sure that Django will default to generating segments if context is not the lambda context
url = reverse('200ok')
self.client.get(url)
cur_segment = xray_recorder.emitter.pop()
assert type(cur_segment) == segment.Segment
================================================
FILE: tests/ext/django/test_settings.py
================================================
from unittest import mock
import django
from django.apps import apps
from django.conf import settings
from django.test import TestCase, override_settings
from aws_xray_sdk import global_sdk_config
from aws_xray_sdk.core import xray_recorder
from aws_xray_sdk.core.sampling.sampler import LocalSampler
class XRayConfigurationTestCase(TestCase):
def test_sampler_can_be_configured(self):
assert isinstance(settings.XRAY_RECORDER['SAMPLER'], LocalSampler)
assert isinstance(xray_recorder.sampler, LocalSampler)
================================================
FILE: tests/ext/flask/__init__.py
================================================
================================================
FILE: tests/ext/flask/test_flask.py
================================================
import pytest
from flask import Flask, render_template_string
from aws_xray_sdk import global_sdk_config
from aws_xray_sdk.ext.flask.middleware import XRayMiddleware
from aws_xray_sdk.core.context import Context
from aws_xray_sdk.core import lambda_launcher
from aws_xray_sdk.core.models import http, facade_segment, segment
from tests.util import get_new_stubbed_recorder
import os
# define a flask app for testing purpose
app = Flask(__name__)
@app.route('/ok')
def ok():
return 'ok'
@app.route('/error')
def error():
return 'Not Found', 404
@app.route('/fault')
def fault():
return {}['key']
@app.route('/fault_no_exception')
def fault_no_exception():
return "SomeException", 500
@app.route('/template')
def template():
return render_template_string('hello template')
# add X-Ray middleware to flask app
recorder = get_new_stubbed_recorder()
recorder.configure(service='test', sampling=False, context=Context())
XRayMiddleware(app, recorder)
# We don't need to enable testing mode by doing app.config['TESTING'] = True
# because what it does is disable error catching during request handling,
# so that you get better error reports when performing test requests against the application.
# But this also results in `after_request` method not getting invoked during unhandled exception which we want
# since it is the actual application behavior in our use case.
app = app.test_client()
BASE_URL = 'http://localhost{}'
@pytest.fixture(autouse=True)
def cleanup():
"""
Clean up context storage before and after each test run
"""
recorder.clear_trace_entities()
yield
recorder.clear_trace_entities()
global_sdk_config.set_sdk_enabled(True)
def test_ok():
path = '/ok'
app.get(path)
segment = recorder.emitter.pop()
assert not segment.in_progress
request = segment.http['request']
response = segment.http['response']
assert request['method'] == 'GET'
assert request['url'] == BASE_URL.format(path)
assert request['client_ip'] == '127.0.0.1'
assert response['status'] == 200
assert response['content_length'] == 2
def test_error():
path = '/error'
app.get(path)
segment = recorder.emitter.pop()
assert not segment.in_progress
assert segment.error
request = segment.http['request']
response = segment.http['response']
assert request['method'] == 'GET'
assert request['url'] == BASE_URL.format(path)
assert request['client_ip'] == '127.0.0.1'
assert response['status'] == 404
def test_fault():
path = '/fault'
try:
app.get(path)
except Exception:
pass
segment = recorder.emitter.pop()
assert not segment.in_progress
assert segment.fault
response = segment.http['response']
assert response['status'] == 500
exception = segment.cause['exceptions'][0]
assert exception.type == 'KeyError'
def test_fault_no_exception():
path = '/fault_no_exception'
app.get(path)
segment = recorder.emitter.pop()
assert not segment.in_progress
assert segment.fault
response = segment.http['response']
assert response['status'] == 500
assert segment.cause == {}
def test_render_template():
path = '/template'
app.get(path)
segment = recorder.emitter.pop()
assert not segment.in_progress
# segment should contain a template render subsegment
assert segment.subsegments
subsegment = segment.subsegments[0]
assert subsegment.name
assert subsegment.namespace == 'local'
assert not subsegment.in_progress
def test_incoming_sampling_decision_respected():
path = '/ok'
resp = app.get(path, headers={http.XRAY_HEADER: 'Sampled=0'})
resp_header = resp.headers[http.XRAY_HEADER]
segment = recorder.emitter.pop()
assert not segment
# The SDK should still send the headers back regardless of sampling decision
assert 'Root' in resp_header
def test_trace_header_data_perservation():
path = '/ok'
app.get(path, headers={http.XRAY_HEADER: 'k1=v1'})
segment = recorder.emitter.pop()
header = segment.get_origin_trace_header()
assert header.data['k1'] == 'v1'
def test_sampled_response_header():
path = '/ok'
resp = app.get(path, headers={http.XRAY_HEADER: 'Sampled=?;k1=v1'})
segment = recorder.emitter.pop()
resp_header = resp.headers[http.XRAY_HEADER]
assert segment.trace_id in resp_header
assert 'Sampled=1' in resp_header
def test_disabled_sdk():
global_sdk_config.set_sdk_enabled(False)
path = '/ok'
app.get(path)
segment = recorder.emitter.pop()
assert not segment
def test_lambda_serverless():
TRACE_ID = '1-5759e988-bd862e3fe1be46a994272793'
PARENT_ID = '53995c3f42cd8ad8'
HEADER_VAR = "Root=%s;Parent=%s;Sampled=1" % (TRACE_ID, PARENT_ID)
os.environ[lambda_launcher.LAMBDA_TRACE_HEADER_KEY] = HEADER_VAR
lambda_context = lambda_launcher.LambdaContext()
new_recorder = get_new_stubbed_recorder()
new_recorder.configure(service='test', sampling=False, context=lambda_context)
new_app = Flask(__name__)
@new_app.route('/subsegment')
def subsegment():
# Test in between request and make sure Serverless creates a subsegment instead of a segment.
# Ensure that the parent segment is a facade segment.
assert new_recorder.current_subsegment()
assert type(new_recorder.current_segment()) == facade_segment.FacadeSegment
return 'ok'
@new_app.route('/trace_header')
def trace_header():
# Ensure trace header is preserved.
subsegment = new_recorder.current_subsegment()
header = subsegment.get_origin_trace_header()
assert header.data['k1'] == 'v1'
return 'ok'
middleware = XRayMiddleware(new_app, new_recorder)
middleware.in_lambda_ctx = True
app_client = new_app.test_client()
path = '/subsegment'
app_client.get(path)
segment = recorder.emitter.pop()
assert not segment # Segment should be none because it's created and ended by the middleware
path2 = '/trace_header'
app_client.get(path2, headers={http.XRAY_HEADER: 'k1=v1'})
def test_lambda_default_ctx():
# Track to make sure that Flask will default to generating segments if context is not the lambda context
new_recorder = get_new_stubbed_recorder()
new_recorder.configure(service='test', sampling=False)
new_app = Flask(__name__)
@new_app.route('/segment')
def subsegment():
# Test in between request and make sure Lambda that uses default context generates a segment.
assert new_recorder.current_segment()
assert type(new_recorder.current_segment()) == segment.Segment
return 'ok'
XRayMiddleware(new_app, new_recorder)
app_client = new_app.test_client()
path = '/segment'
app_client.get(path)
segment = recorder.emitter.pop()
assert not segment # Segment should be none because it's created and ended by the middleware
================================================
FILE: tests/ext/flask_sqlalchemy/__init__.py
================================================
================================================
FILE: tests/ext/flask_sqlalchemy/test_query.py
================================================
import pytest
from aws_xray_sdk.core import xray_recorder
from aws_xray_sdk.core.context import Context
from aws_xray_sdk.ext.flask_sqlalchemy.query import XRayFlaskSqlAlchemy
from flask import Flask
from ...util import find_subsegment_by_annotation
app = Flask(__name__)
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
app.config["SQLALCHEMY_DATABASE_URI"] = "sqlite:///:memory:"
db = XRayFlaskSqlAlchemy(app)
class User(db.Model):
__tablename__ = "users"
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(255), nullable=False, unique=True)
fullname = db.Column(db.String(255), nullable=False)
password = db.Column(db.String(255), nullable=False)
@pytest.fixture(
params=[
False,
True,
],
)
def session(request):
"""Test Fixture to Create DataBase Tables and start a trace segment"""
xray_recorder.configure(service='test', sampling=False, context=Context(), stream_sql=request.param)
xray_recorder.clear_trace_entities()
xray_recorder.begin_segment('SQLAlchemyTest')
db.create_all()
yield
xray_recorder.end_segment()
xray_recorder.clear_trace_entities()
def test_all(capsys, session):
""" Test calling all() on get all records.
Verify that we capture trace of query and return the SQL as metdata"""
# with capsys.disabled():
User.query.all()
subsegment = find_subsegment_by_annotation(xray_recorder.current_segment(), 'sqlalchemy', 'sqlalchemy.orm.query.all')
assert subsegment['annotations']['sqlalchemy'] == 'sqlalchemy.orm.query.all'
assert subsegment['sql']['url']
assert bool(subsegment['sql'].get('sanitized_query', None)) is xray_recorder.stream_sql
def test_add(capsys, session):
""" Test calling add() on insert a row.
Verify we that we capture trace for the add"""
# with capsys.disabled():
john = User(name='John', fullname="John Doe", password="password")
db.session.add(john)
subsegment = find_subsegment_by_annotation(xray_recorder.current_segment(), 'sqlalchemy', 'sqlalchemy.orm.session.add')
assert subsegment['annotations']['sqlalchemy'] == 'sqlalchemy.orm.session.add'
assert subsegment['sql']['url']
================================================
FILE: tests/ext/httplib/__init__.py
================================================
================================================
FILE: tests/ext/httplib/test_httplib.py
================================================
import http.client as httplib
from urllib.parse import urlparse
import pytest
from aws_xray_sdk.core import patch, xray_recorder
from aws_xray_sdk.core.context import Context
from aws_xray_sdk.ext.util import get_hostname, strip_url
# httpbin.org is created by the same author of requests to make testing http easy.
BASE_URL = 'httpbin.org'
@pytest.fixture(autouse=True)
def construct_ctx():
"""
Clean up context storage on each test run and begin a segment
so that later subsegment can be attached. After each test run
it cleans up context storage again.
"""
from aws_xray_sdk.ext.httplib import reset_ignored, unpatch
patch(('httplib',))
xray_recorder.configure(service='test', sampling=False, context=Context())
xray_recorder.clear_trace_entities()
xray_recorder.begin_segment('name')
yield
xray_recorder.clear_trace_entities()
unpatch()
reset_ignored()
def _do_req(url, method='GET', use_https=True):
parts = urlparse(url)
host, _, port = parts.netloc.partition(':')
if port == '':
port = None
if use_https:
conn = httplib.HTTPSConnection(parts.netloc, port)
else:
conn = httplib.HTTPConnection(parts.netloc, port)
path = '{}?{}'.format(parts.path, parts.query) if parts.query else parts.path
conn.request(method, path)
resp = conn.getresponse()
def test_ok():
status_code = 200
url = 'https://{}/status/{}?foo=bar&baz=foo'.format(BASE_URL, status_code)
_do_req(url)
subsegment = xray_recorder.current_segment().subsegments[1]
assert subsegment.name == get_hostname(url)
http_meta = subsegment.http
assert http_meta['request']['url'] == strip_url(url)
assert http_meta['request']['method'].upper() == 'GET'
assert http_meta['response']['status'] == status_code
def test_error():
status_code = 400
url = 'https://{}/status/{}'.format(BASE_URL, status_code)
_do_req(url, 'POST')
subsegment = xray_recorder.current_segment().subsegments[1]
assert subsegment.name == get_hostname(url)
assert subsegment.error
http_meta = subsegment.http
assert http_meta['request']['url'] == strip_url(url)
assert http_meta['request']['method'].upper() == 'POST'
assert http_meta['response']['status'] == status_code
def test_throttle():
status_code = 429
url = 'https://{}/status/{}'.format(BASE_URL, status_code)
_do_req(url, 'HEAD')
subsegment = xray_recorder.current_segment().subsegments[1]
assert subsegment.name == get_hostname(url)
assert subsegment.error
assert subsegment.throttle
http_meta = subsegment.http
assert http_meta['request']['url'] == strip_url(url)
assert http_meta['request']['method'].upper() == 'HEAD'
assert http_meta['response']['status'] == status_code
def test_fault():
status_code = 500
url = 'https://{}/status/{}'.format(BASE_URL, status_code)
_do_req(url, 'PUT')
subsegment = xray_recorder.current_segment().subsegments[1]
assert subsegment.name == get_hostname(url)
assert subsegment.fault
http_meta = subsegment.http
assert http_meta['request']['url'] == strip_url(url)
assert http_meta['request']['method'].upper() == 'PUT'
assert http_meta['response']['status'] == status_code
def test_invalid_url():
try:
_do_req('http://doesnt.exist')
except Exception:
# prevent uncatch exception from breaking test run
pass
subsegment = xray_recorder.current_segment().subsegments[0]
assert subsegment.fault
exception = subsegment.cause['exceptions'][0]
assert exception.type == 'gaierror'
def test_correct_identify_http():
status_code = 200
url = 'http://{}/status/{}?foo=bar&baz=foo'.format(BASE_URL, status_code)
_do_req(url, use_https=False)
subsegment = xray_recorder.current_segment().subsegments[0]
assert subsegment.name == get_hostname(url)
http_meta = subsegment.http
assert http_meta['request']['url'].split(":")[0] == 'http'
def test_correct_identify_https():
status_code = 200
url = 'https://{}/status/{}?foo=bar&baz=foo'.format(BASE_URL, status_code)
_do_req(url, use_https=True)
subsegment = xray_recorder.current_segment().subsegments[0]
assert subsegment.name == get_hostname(url)
https_meta = subsegment.http
assert https_meta['request']['url'].split(":")[0] == 'https'
def test_ignore_url():
from aws_xray_sdk.ext.httplib import add_ignored
path = '/status/200'
url = 'https://{}{}'.format(BASE_URL, path)
add_ignored(urls=[path])
_do_req(url, use_https=True)
assert len(xray_recorder.current_segment().subsegments) == 0
def test_ignore_hostname():
from aws_xray_sdk.ext.httplib import add_ignored
path = '/status/200'
url = 'https://{}{}'.format(BASE_URL, path)
add_ignored(hostname=BASE_URL)
_do_req(url, use_https=True)
assert len(xray_recorder.current_segment().subsegments) == 0
def test_ignore_hostname_glob():
from aws_xray_sdk.ext.httplib import add_ignored
path = '/status/200'
url = 'https://{}{}'.format(BASE_URL, path)
add_ignored(hostname='http*.org')
_do_req(url, use_https=True)
assert len(xray_recorder.current_segment().subsegments) == 0
class CustomHttpsConnection(httplib.HTTPSConnection):
pass
def test_ignore_subclass():
from aws_xray_sdk.ext.httplib import add_ignored
path = '/status/200'
subclass = 'tests.ext.httplib.test_httplib.CustomHttpsConnection'
add_ignored(subclass=subclass)
conn = CustomHttpsConnection(BASE_URL)
conn.request('GET', path)
conn.getresponse()
assert len(xray_recorder.current_segment().subsegments) == 0
def test_ignore_multiple_match():
from aws_xray_sdk.ext.httplib import add_ignored
path = '/status/200'
subclass = 'tests.ext.httplib.test_httplib.CustomHttpsConnection'
add_ignored(subclass=subclass, hostname=BASE_URL)
conn = CustomHttpsConnection(BASE_URL)
conn.request('GET', path)
conn.getresponse()
assert len(xray_recorder.current_segment().subsegments) == 0
def test_ignore_multiple_no_match():
from aws_xray_sdk.ext.httplib import add_ignored
path = '/status/200'
subclass = 'tests.ext.httplib.test_httplib.CustomHttpsConnection'
add_ignored(subclass=subclass, hostname='fake.host')
conn = CustomHttpsConnection(BASE_URL)
conn.request('GET', path)
conn.getresponse()
assert len(xray_recorder.current_segment().subsegments) > 0
================================================
FILE: tests/ext/httpx/__init__.py
================================================
================================================
FILE: tests/ext/httpx/test_httpx.py
================================================
import pytest
import httpx
from aws_xray_sdk.core import patch
from aws_xray_sdk.core import xray_recorder
from aws_xray_sdk.core.context import Context
from aws_xray_sdk.ext.util import strip_url, get_hostname
patch(("httpx",))
# httpbin.org is created by the same author of requests to make testing http easy.
BASE_URL = "httpbin.org"
@pytest.fixture(autouse=True)
def construct_ctx():
"""
Clean up context storage on each test run and begin a segment
so that later subsegment can be attached. After each test run
it cleans up context storage again.
"""
xray_recorder.configure(service="test", sampling=False, context=Context())
xray_recorder.clear_trace_entities()
xray_recorder.begin_segment("name")
yield
xray_recorder.clear_trace_entities()
@pytest.mark.parametrize("use_client", (True, False))
def test_ok(use_client):
status_code = 200
url = "http://{}/status/{}?foo=bar".format(BASE_URL, status_code)
if use_client:
with httpx.Client() as client:
response = client.get(url)
else:
response = httpx.get(url)
assert "x-amzn-trace-id" in response._request.headers
subsegment = xray_recorder.current_segment().subsegments[0]
assert get_hostname(url) == BASE_URL
assert subsegment.namespace == "remote"
assert subsegment.name == get_hostname(url)
http_meta = subsegment.http
assert http_meta["request"]["url"] == strip_url(url)
assert http_meta["request"]["method"].upper() == "GET"
assert http_meta["response"]["status"] == status_code
@pytest.mark.parametrize("use_client", (True, False))
def test_error(use_client):
status_code = 400
url = "http://{}/status/{}".format(BASE_URL, status_code)
if use_client:
with httpx.Client() as client:
response = client.post(url)
else:
response = httpx.post(url)
assert "x-amzn-trace-id" in response._request.headers
subsegment = xray_recorder.current_segment().subsegments[0]
assert subsegment.namespace == "remote"
assert subsegment.name == get_hostname(url)
assert subsegment.error
http_meta = subsegment.http
assert http_meta["request"]["url"] == strip_url(url)
assert http_meta["request"]["method"].upper() == "POST"
assert http_meta["response"]["status"] == status_code
@pytest.mark.parametrize("use_client", (True, False))
def test_throttle(use_client):
status_code = 429
url = "http://{}/status/{}".format(BASE_URL, status_code)
if use_client:
with httpx.Client() as client:
response = client.head(url)
else:
response = httpx.head(url)
assert "x-amzn-trace-id" in response._request.headers
subsegment = xray_recorder.current_segment().subsegments[0]
assert subsegment.namespace == "remote"
assert subsegment.name == get_hostname(url)
assert subsegment.error
assert subsegment.throttle
http_meta = subsegment.http
assert http_meta["request"]["url"] == strip_url(url)
assert http_meta["request"]["method"].upper() == "HEAD"
assert http_meta["response"]["status"] == status_code
@pytest.mark.parametrize("use_client", (True, False))
def test_fault(use_client):
status_code = 500
url = "http://{}/status/{}".format(BASE_URL, status_code)
if use_client:
with httpx.Client() as client:
response = client.put(url)
else:
response = httpx.put(url)
assert "x-amzn-trace-id" in response._request.headers
subsegment = xray_recorder.current_segment().subsegments[0]
assert subsegment.namespace == "remote"
assert subsegment.name == get_hostname(url)
assert subsegment.fault
http_meta = subsegment.http
assert http_meta["request"]["url"] == strip_url(url)
assert http_meta["request"]["method"].upper() == "PUT"
assert http_meta["response"]["status"] == status_code
@pytest.mark.parametrize("use_client", (True, False))
def test_nonexistent_domain(use_client):
with pytest.raises(httpx.ConnectError):
if use_client:
with httpx.Client() as client:
client.get("http://doesnt.exist")
else:
httpx.get("http://doesnt.exist")
subsegment = xray_recorder.current_segment().subsegments[0]
assert subsegment.namespace == "remote"
assert subsegment.fault
exception = subsegment.cause["exceptions"][0]
assert exception.type == "ConnectError"
@pytest.mark.parametrize("use_client", (True, False))
def test_invalid_url(use_client):
url = "KLSDFJKLSDFJKLSDJF"
with pytest.raises(httpx.UnsupportedProtocol):
if use_client:
with httpx.Client() as client:
client.get(url)
else:
httpx.get(url)
subsegment = xray_recorder.current_segment().subsegments[0]
assert subsegment.namespace == "remote"
assert subsegment.name == get_hostname(url)
assert subsegment.fault
http_meta = subsegment.http
assert http_meta["request"]["url"] == "/{}".format(strip_url(url))
exception = subsegment.cause["exceptions"][0]
assert exception.type == "UnsupportedProtocol"
@pytest.mark.parametrize("use_client", (True, False))
def test_name_uses_hostname(use_client):
if use_client:
client = httpx.Client()
else:
client = httpx
try:
url1 = "http://{}/fakepath/stuff/koo/lai/ahh".format(BASE_URL)
client.get(url1)
subsegment = xray_recorder.current_segment().subsegments[-1]
assert subsegment.namespace == "remote"
assert subsegment.name == BASE_URL
http_meta1 = subsegment.http
assert http_meta1["request"]["url"] == strip_url(url1)
assert http_meta1["request"]["method"].upper() == "GET"
url2 = "http://{}/".format(BASE_URL)
client.get(url2, params={"some": "payload", "not": "toBeIncluded"})
subsegment = xray_recorder.current_segment().subsegments[-1]
assert subsegment.namespace == "remote"
assert subsegment.name == BASE_URL
http_meta2 = subsegment.http
assert http_meta2["request"]["url"] == strip_url(url2)
assert http_meta2["request"]["method"].upper() == "GET"
url3 = "http://subdomain.{}/fakepath/stuff/koo/lai/ahh".format(BASE_URL)
try:
client.get(url3)
except httpx.ConnectError:
pass
subsegment = xray_recorder.current_segment().subsegments[-1]
assert subsegment.namespace == "remote"
assert subsegment.name == "subdomain." + BASE_URL
http_meta3 = subsegment.http
assert http_meta3["request"]["url"] == strip_url(url3)
assert http_meta3["request"]["method"].upper() == "GET"
finally:
if use_client:
client.close()
@pytest.mark.parametrize("use_client", (True, False))
def test_strip_http_url(use_client):
status_code = 200
url = "http://{}/get?foo=bar".format(BASE_URL)
if use_client:
with httpx.Client() as client:
response = client.get(url)
else:
response = httpx.get(url)
assert "x-amzn-trace-id" in response._request.headers
subsegment = xray_recorder.current_segment().subsegments[0]
assert subsegment.namespace == "remote"
assert subsegment.name == get_hostname(url)
http_meta = subsegment.http
assert http_meta["request"]["url"] == strip_url(url)
assert http_meta["request"]["method"].upper() == "GET"
assert http_meta["response"]["status"] == status_code
================================================
FILE: tests/ext/httpx/test_httpx_async.py
================================================
import pytest
import httpx
from aws_xray_sdk.core import patch
from aws_xray_sdk.core import xray_recorder
from aws_xray_sdk.core.context import Context
from aws_xray_sdk.ext.util import strip_url, get_hostname
patch(("httpx",))
# httpbin.org is created by the same author of requests to make testing http easy.
BASE_URL = "httpbin.org"
@pytest.fixture(autouse=True)
def construct_ctx():
"""
Clean up context storage on each test run and begin a segment
so that later subsegment can be attached. After each test run
it cleans up context storage again.
"""
xray_recorder.configure(service="test", sampling=False, context=Context())
xray_recorder.clear_trace_entities()
xray_recorder.begin_segment("name")
yield
xray_recorder.clear_trace_entities()
@pytest.mark.asyncio
async def test_ok_async():
status_code = 200
url = "http://{}/status/{}?foo=bar".format(BASE_URL, status_code)
async with httpx.AsyncClient() as client:
response = await client.get(url)
assert "x-amzn-trace-id" in response._request.headers
subsegment = xray_recorder.current_segment().subsegments[0]
assert get_hostname(url) == BASE_URL
assert subsegment.namespace == "remote"
assert subsegment.name == get_hostname(url)
http_meta = subsegment.http
assert http_meta["request"]["url"] == strip_url(url)
assert http_meta["request"]["method"].upper() == "GET"
assert http_meta["response"]["status"] == status_code
@pytest.mark.asyncio
async def test_error_async():
status_code = 400
url = "http://{}/status/{}".format(BASE_URL, status_code)
async with httpx.AsyncClient() as client:
response = await client.post(url)
assert "x-amzn-trace-id" in response._request.headers
subsegment = xray_recorder.current_segment().subsegments[0]
assert subsegment.namespace == "remote"
assert subsegment.name == get_hostname(url)
assert subsegment.error
http_meta = subsegment.http
assert http_meta["request"]["url"] == strip_url(url)
assert http_meta["request"]["method"].upper() == "POST"
assert http_meta["response"]["status"] == status_code
@pytest.mark.asyncio
async def test_throttle_async():
status_code = 429
url = "http://{}/status/{}".format(BASE_URL, status_code)
async with httpx.AsyncClient() as client:
response = await client.head(url)
assert "x-amzn-trace-id" in response._request.headers
subsegment = xray_recorder.current_segment().subsegments[0]
assert subsegment.namespace == "remote"
assert subsegment.name == get_hostname(url)
assert subsegment.error
assert subsegment.throttle
http_meta = subsegment.http
assert http_meta["request"]["url"] == strip_url(url)
assert http_meta["request"]["method"].upper() == "HEAD"
assert http_meta["response"]["status"] == status_code
@pytest.mark.asyncio
async def test_fault_async():
status_code = 500
url = "http://{}/status/{}".format(BASE_URL, status_code)
async with httpx.AsyncClient() as client:
response = await client.put(url)
assert "x-amzn-trace-id" in response._request.headers
subsegment = xray_recorder.current_segment().subsegments[0]
assert subsegment.namespace == "remote"
assert subsegment.name == get_hostname(url)
assert subsegment.fault
http_meta = subsegment.http
assert http_meta["request"]["url"] == strip_url(url)
assert http_meta["request"]["method"].upper() == "PUT"
assert http_meta["response"]["status"] == status_code
@pytest.mark.asyncio
async def test_nonexistent_domain_async():
with pytest.raises(httpx.ConnectError):
async with httpx.AsyncClient() as client:
await client.get("http://doesnt.exist")
subsegment = xray_recorder.current_segment().subsegments[0]
assert subsegment.namespace == "remote"
assert subsegment.fault
exception = subsegment.cause["exceptions"][0]
assert exception.type == "ConnectError"
@pytest.mark.asyncio
async def test_invalid_url_async():
url = "KLSDFJKLSDFJKLSDJF"
with pytest.raises(httpx.UnsupportedProtocol):
async with httpx.AsyncClient() as client:
await client.get(url)
subsegment = xray_recorder.current_segment().subsegments[0]
assert subsegment.namespace == "remote"
assert subsegment.name == get_hostname(url)
assert subsegment.fault
http_meta = subsegment.http
assert http_meta["request"]["url"] == "/{}".format(strip_url(url))
exception = subsegment.cause["exceptions"][0]
assert exception.type == "UnsupportedProtocol"
@pytest.mark.asyncio
async def test_name_uses_hostname_async():
async with httpx.AsyncClient() as client:
url1 = "http://{}/fakepath/stuff/koo/lai/ahh".format(BASE_URL)
await client.get(url1)
subsegment = xray_recorder.current_segment().subsegments[-1]
assert subsegment.namespace == "remote"
assert subsegment.name == BASE_URL
http_meta1 = subsegment.http
assert http_meta1["request"]["url"] == strip_url(url1)
assert http_meta1["request"]["method"].upper() == "GET"
url2 = "http://{}/".format(BASE_URL)
await client.get(url2, params={"some": "payload", "not": "toBeIncluded"})
subsegment = xray_recorder.current_segment().subsegments[-1]
assert subsegment.namespace == "remote"
assert subsegment.name == BASE_URL
http_meta2 = subsegment.http
assert http_meta2["request"]["url"] == strip_url(url2)
assert http_meta2["request"]["method"].upper() == "GET"
url3 = "http://subdomain.{}/fakepath/stuff/koo/lai/ahh".format(BASE_URL)
try:
await client.get(url3)
except Exception:
# This is an invalid url so we dont want to break the test
pass
subsegment = xray_recorder.current_segment().subsegments[-1]
assert subsegment.namespace == "remote"
assert subsegment.name == "subdomain." + BASE_URL
http_meta3 = subsegment.http
assert http_meta3["request"]["url"] == strip_url(url3)
assert http_meta3["request"]["method"].upper() == "GET"
@pytest.mark.asyncio
async def test_strip_http_url_async():
status_code = 200
url = "http://{}/get?foo=bar".format(BASE_URL)
async with httpx.AsyncClient() as client:
response = await client.get(url)
assert "x-amzn-trace-id" in response._request.headers
subsegment = xray_recorder.current_segment().subsegments[0]
assert subsegment.namespace == "remote"
assert subsegment.name == get_hostname(url)
http_meta = subsegment.http
assert http_meta["request"]["url"] == strip_url(url)
assert http_meta["request"]["method"].upper() == "GET"
assert http_meta["response"]["status"] == status_code
================================================
FILE: tests/ext/pg8000/__init__.py
================================================
================================================
FILE: tests/ext/pg8000/test_pg8000.py
================================================
import pg8000
import pytest
import testing.postgresql
from aws_xray_sdk.core import patch
from aws_xray_sdk.core import xray_recorder
from aws_xray_sdk.core.context import Context
from aws_xray_sdk.ext.pg8000 import unpatch
@pytest.fixture(scope='module', autouse=True)
def patch_module():
patch(('pg8000',))
yield
unpatch()
@pytest.fixture(autouse=True)
def construct_ctx():
"""
Clean up context storage on each test run and begin a segment
so that later subsegment can be attached. After each test run
it cleans up context storage again.
"""
xray_recorder.configure(service='test', sampling=False, context=Context())
xray_recorder.clear_trace_entities()
xray_recorder.begin_segment('name')
yield
xray_recorder.clear_trace_entities()
def test_execute_dsn_kwargs():
q = 'SELECT 1'
with testing.postgresql.Postgresql() as postgresql:
dsn = postgresql.dsn()
conn = pg8000.connect(database=dsn['database'],
user=dsn['user'],
password='',
host=dsn['host'],
port=dsn['port'])
cur = conn.cursor()
cur.execute(q)
subsegment = xray_recorder.current_segment().subsegments[-1]
assert subsegment.name == 'execute'
sql = subsegment.sql
assert sql['database_type'] == 'PostgreSQL'
assert sql['user'] == dsn['user']
assert sql['database_version']
def test_execute_bad_query():
q = 'SELECT blarg'
with testing.postgresql.Postgresql() as postgresql:
dsn = postgresql.dsn()
conn = pg8000.connect(database=dsn['database'],
user=dsn['user'],
password='',
host=dsn['host'],
port=dsn['port'])
cur = conn.cursor()
try:
cur.execute(q)
except Exception:
pass
subsegment = xray_recorder.current_segment().subsegments[-1]
assert subsegment.name == 'execute'
sql = subsegment.sql
assert sql['database_type'] == 'PostgreSQL'
assert sql['user'] == dsn['user']
assert sql['database_version']
exception = subsegment.cause['exceptions'][0]
assert exception.type == 'ProgrammingError'
================================================
FILE: tests/ext/psycopg/__init__.py
================================================
================================================
FILE: tests/ext/psycopg/test_psycopg.py
================================================
import psycopg
import psycopg.sql
import psycopg_pool
import pytest
import testing.postgresql
from aws_xray_sdk.core import patch
from aws_xray_sdk.core import xray_recorder
from aws_xray_sdk.core.context import Context
patch(('psycopg',))
@pytest.fixture(autouse=True)
def construct_ctx():
"""
Clean up context storage on each test run and begin a segment
so that later subsegment can be attached. After each test run
it cleans up context storage again.
"""
xray_recorder.configure(service='test', sampling=False, context=Context())
xray_recorder.clear_trace_entities()
xray_recorder.begin_segment('name')
yield
xray_recorder.clear_trace_entities()
def test_execute_dsn_kwargs():
q = 'SELECT 1'
with testing.postgresql.Postgresql() as postgresql:
url = postgresql.url()
dsn = postgresql.dsn()
conn = psycopg.connect(dbname=dsn['database'],
user=dsn['user'],
password='',
host=dsn['host'],
port=dsn['port'])
cur = conn.cursor()
cur.execute(q)
subsegment = xray_recorder.current_segment().subsegments[0]
assert subsegment.name == 'execute'
sql = subsegment.sql
assert sql['database_type'] == 'PostgreSQL'
assert sql['user'] == dsn['user']
assert sql['url'] == url
assert sql['database_version']
def test_execute_dsn_string():
q = 'SELECT 1'
with testing.postgresql.Postgresql() as postgresql:
url = postgresql.url()
dsn = postgresql.dsn()
conn = psycopg.connect('dbname=' + dsn['database'] +
' password=mypassword' +
' host=' + dsn['host'] +
' port=' + str(dsn['port']) +
' user=' + dsn['user'])
cur = conn.cursor()
cur.execute(q)
subsegment = xray_recorder.current_segment().subsegments[0]
assert subsegment.name == 'execute'
sql = subsegment.sql
assert sql['database_type'] == 'PostgreSQL'
assert sql['user'] == dsn['user']
assert sql['url'] == url
assert sql['database_version']
def test_execute_in_pool():
q = 'SELECT 1'
with testing.postgresql.Postgresql() as postgresql:
url = postgresql.url()
dsn = postgresql.dsn()
pool = psycopg_pool.ConnectionPool('dbname=' + dsn['database'] +
' password=mypassword' +
' host=' + dsn['host'] +
' port=' + str(dsn['port']) +
' user=' + dsn['user'],
min_size=1,
max_size=1)
with pool.connection() as conn:
cur = conn.cursor()
cur.execute(q)
subsegment = xray_recorder.current_segment().subsegments[0]
assert subsegment.name == 'execute'
sql = subsegment.sql
assert sql['database_type'] == 'PostgreSQL'
assert sql['user'] == dsn['user']
assert sql['url'] == url
assert sql['database_version']
def test_execute_bad_query():
q = 'SELECT blarg'
with testing.postgresql.Postgresql() as postgresql:
url = postgresql.url()
dsn = postgresql.dsn()
conn = psycopg.connect(dbname=dsn['database'],
user=dsn['user'],
password='',
host=dsn['host'],
port=dsn['port'])
cur = conn.cursor()
try:
cur.execute(q)
except Exception:
pass
subsegment = xray_recorder.current_segment().subsegments[0]
assert subsegment.name == 'execute'
sql = subsegment.sql
assert sql['database_type'] == 'PostgreSQL'
assert sql['user'] == dsn['user']
assert sql['url'] == url
assert sql['database_version']
exception = subsegment.cause['exceptions'][0]
assert exception.type == 'UndefinedColumn'
def test_query_as_string():
with testing.postgresql.Postgresql() as postgresql:
url = postgresql.url()
dsn = postgresql.dsn()
conn = psycopg.connect('dbname=' + dsn['database'] +
' password=mypassword' +
' host=' + dsn['host'] +
' port=' + str(dsn['port']) +
' user=' + dsn['user'])
test_sql = psycopg.sql.Identifier('test')
assert test_sql.as_string(conn)
assert test_sql.as_string(conn.cursor())
================================================
FILE: tests/ext/psycopg2/__init__.py
================================================
================================================
FILE: tests/ext/psycopg2/test_psycopg2.py
================================================
import psycopg2
import psycopg2.extras
import psycopg2.pool
import psycopg2.sql
import pytest
import testing.postgresql
from aws_xray_sdk.core import patch
from aws_xray_sdk.core import xray_recorder
from aws_xray_sdk.core.context import Context
patch(('psycopg2',))
@pytest.fixture(autouse=True)
def construct_ctx():
"""
Clean up context storage on each test run and begin a segment
so that later subsegment can be attached. After each test run
it cleans up context storage again.
"""
xray_recorder.configure(service='test', sampling=False, context=Context())
xray_recorder.clear_trace_entities()
xray_recorder.begin_segment('name')
yield
xray_recorder.clear_trace_entities()
def test_execute_dsn_kwargs():
q = 'SELECT 1'
with testing.postgresql.Postgresql() as postgresql:
url = postgresql.url()
dsn = postgresql.dsn()
conn = psycopg2.connect(dbname=dsn['database'],
user=dsn['user'],
password='',
host=dsn['host'],
port=dsn['port'])
cur = conn.cursor()
cur.execute(q)
subsegment = xray_recorder.current_segment().subsegments[0]
assert subsegment.name == 'execute'
sql = subsegment.sql
assert sql['database_type'] == 'PostgreSQL'
assert sql['user'] == dsn['user']
assert sql['url'] == url
assert sql['database_version']
def test_execute_dsn_kwargs_alt_dbname():
"""
Psycopg supports database to be passed as `database` or `dbname`
"""
q = 'SELECT 1'
with testing.postgresql.Postgresql() as postgresql:
url = postgresql.url()
dsn = postgresql.dsn()
conn = psycopg2.connect(database=dsn['database'],
user=dsn['user'],
password='',
host=dsn['host'],
port=dsn['port'])
cur = conn.cursor()
cur.execute(q)
subsegment = xray_recorder.current_segment().subsegments[0]
assert subsegment.name == 'execute'
sql = subsegment.sql
assert sql['database_type'] == 'PostgreSQL'
assert sql['user'] == dsn['user']
assert sql['url'] == url
assert sql['database_version']
def test_execute_dsn_string():
q = 'SELECT 1'
with testing.postgresql.Postgresql() as postgresql:
url = postgresql.url()
dsn = postgresql.dsn()
conn = psycopg2.connect('dbname=' + dsn['database'] +
' password=mypassword' +
' host=' + dsn['host'] +
' port=' + str(dsn['port']) +
' user=' + dsn['user'])
cur = conn.cursor()
cur.execute(q)
subsegment = xray_recorder.current_segment().subsegments[0]
assert subsegment.name == 'execute'
sql = subsegment.sql
assert sql['database_type'] == 'PostgreSQL'
assert sql['user'] == dsn['user']
assert sql['url'] == url
assert sql['database_version']
def test_execute_in_pool():
q = 'SELECT 1'
with testing.postgresql.Postgresql() as postgresql:
url = postgresql.url()
dsn = postgresql.dsn()
pool = psycopg2.pool.SimpleConnectionPool(1, 1,
dbname=dsn['database'],
user=dsn['user'],
password='',
host=dsn['host'],
port=dsn['port'])
cur = pool.getconn(key=dsn['user']).cursor()
cur.execute(q)
subsegment = xray_recorder.current_segment().subsegments[0]
assert subsegment.name == 'execute'
sql = subsegment.sql
assert sql['database_type'] == 'PostgreSQL'
assert sql['user'] == dsn['user']
assert sql['url'] == url
assert sql['database_version']
def test_execute_bad_query():
q = 'SELECT blarg'
with testing.postgresql.Postgresql() as postgresql:
url = postgresql.url()
dsn = postgresql.dsn()
conn = psycopg2.connect(dbname=dsn['database'],
user=dsn['user'],
password='',
host=dsn['host'],
port=dsn['port'])
cur = conn.cursor()
try:
cur.execute(q)
except Exception:
pass
subsegment = xray_recorder.current_segment().subsegments[0]
assert subsegment.name == 'execute'
sql = subsegment.sql
assert sql['database_type'] == 'PostgreSQL'
assert sql['user'] == dsn['user']
assert sql['url'] == url
assert sql['database_version']
exception = subsegment.cause['exceptions'][0]
assert exception.type == 'UndefinedColumn'
def test_register_extensions():
with testing.postgresql.Postgresql() as postgresql:
url = postgresql.url()
dsn = postgresql.dsn()
conn = psycopg2.connect('dbname=' + dsn['database'] +
' password=mypassword' +
' host=' + dsn['host'] +
' port=' + str(dsn['port']) +
' user=' + dsn['user'])
assert psycopg2.extras.register_uuid(None, conn)
assert psycopg2.extras.register_uuid(None, conn.cursor())
def test_query_as_string():
with testing.postgresql.Postgresql() as postgresql:
url = postgresql.url()
dsn = postgresql.dsn()
conn = psycopg2.connect('dbname=' + dsn['database'] +
' password=mypassword' +
' host=' + dsn['host'] +
' port=' + str(dsn['port']) +
' user=' + dsn['user'])
test_sql = psycopg2.sql.Identifier('test')
assert test_sql.as_string(conn)
assert test_sql.as_string(conn.cursor())
def test_register_default_jsonb():
with testing.postgresql.Postgresql() as postgresql:
url = postgresql.url()
dsn = postgresql.dsn()
conn = psycopg2.connect('dbname=' + dsn['database'] +
' password=mypassword' +
' host=' + dsn['host'] +
' port=' + str(dsn['port']) +
' user=' + dsn['user'])
assert psycopg2.extras.register_default_jsonb(conn_or_curs=conn, loads=lambda x: x)
assert psycopg2.extras.register_default_jsonb(conn_or_curs=conn.cursor(), loads=lambda x: x)
================================================
FILE: tests/ext/pymysql/__init__.py
================================================
================================================
FILE: tests/ext/pymysql/test_pymysql.py
================================================
import pymysql
import pytest
from aws_xray_sdk.core import patch
from aws_xray_sdk.core import xray_recorder
from aws_xray_sdk.core.context import Context
from aws_xray_sdk.ext.pymysql import unpatch
MYSQL_USER = "root"
MYSQL_PASSWORD = "root"
MYSQL_HOST = "localhost"
MYSQL_PORT = 3306
MYSQL_DB_NAME = "test_db"
@pytest.fixture(scope='module', autouse=True)
def patch_module():
patch(('pymysql',))
yield
unpatch()
@pytest.fixture(autouse=True)
def construct_ctx():
"""
Clean up context storage on each test run and begin a segment
so that later subsegment can be attached. After each test run
it cleans up context storage again.
"""
xray_recorder.configure(service='test', sampling=False, context=Context())
xray_recorder.clear_trace_entities()
xray_recorder.begin_segment('name')
yield
xray_recorder.clear_trace_entities()
def test_execute_dsn_kwargs():
q = 'SELECT 1'
conn = pymysql.connect(database=MYSQL_DB_NAME,
user=MYSQL_USER,
password=MYSQL_PASSWORD,
host=MYSQL_HOST,
port=MYSQL_PORT)
cur = conn.cursor()
cur.execute(q)
subsegment = xray_recorder.current_segment().subsegments[-1]
assert subsegment.name == 'execute'
sql = subsegment.sql
assert sql['database_type'] == 'MySQL'
assert sql['user'] == MYSQL_USER
assert sql['driver_version'] == 'PyMySQL'
assert sql['database_version']
def test_execute_bad_query():
q = "SELECT blarg"
conn = pymysql.connect(database=MYSQL_DB_NAME,
user=MYSQL_USER,
password=MYSQL_PASSWORD,
host=MYSQL_HOST,
port=MYSQL_PORT)
cur = conn.cursor()
try:
cur.execute(q)
except Exception:
pass
subsegment = xray_recorder.current_segment().subsegments[-1]
assert subsegment.name == "execute"
sql = subsegment.sql
assert sql['database_type'] == 'MySQL'
assert sql['user'] == MYSQL_USER
assert sql['driver_version'] == 'PyMySQL'
assert sql['database_version']
exception = subsegment.cause['exceptions'][0]
assert exception.type is not None
================================================
FILE: tests/ext/pynamodb/__init__.py
================================================
================================================
FILE: tests/ext/pynamodb/test_pynamodb.py
================================================
import pytest
import botocore.session
from botocore import UNSIGNED
from botocore.client import Config
from botocore.exceptions import ClientError
from pynamodb.attributes import UnicodeAttribute
from pynamodb.models import Model
from aws_xray_sdk.core import patch
from aws_xray_sdk.core import xray_recorder
from aws_xray_sdk.core.context import Context
patch(('pynamodb',))
@pytest.fixture(autouse=True)
def construct_ctx():
"""
Clean up context storage on each test run and begin a segment
so that later subsegment can be attached. After each test run
it cleans up context storage again.
"""
xray_recorder.configure(service='test', sampling=False, context=Context())
xray_recorder.clear_trace_entities()
xray_recorder.begin_segment('name')
yield
xray_recorder.clear_trace_entities()
def test_exception():
class SampleModel(Model):
class Meta:
region = 'us-west-2'
table_name = 'mytable'
sample_attribute = UnicodeAttribute(hash_key=True)
try:
SampleModel.describe_table()
except Exception:
pass
subsegments = xray_recorder.current_segment().subsegments
assert len(subsegments) == 1
subsegment = subsegments[0]
assert subsegment.name == 'dynamodb'
assert len(subsegment.subsegments) == 0
assert subsegment.error
aws_meta = subsegment.aws
assert aws_meta['region'] == 'us-west-2'
assert aws_meta['operation'] == 'DescribeTable'
assert aws_meta['table_name'] == 'mytable'
def test_empty_response():
from aws_xray_sdk.ext.pynamodb.patch import pynamodb_meta_processor
subsegment = xray_recorder.begin_subsegment('test')
class TempReq:
def __init__(self):
self.headers = {'X-Amz-Target': 'ddb.ListTables'.encode('utf-8')}
self.url = 'ddb.us-west-2'
self.body = '{}'.encode('utf-8')
prepared_request = TempReq()
args = [prepared_request]
pynamodb_meta_processor(wrapped=None, instance=None, args=args,
kwargs=None, return_value=None,
exception=None, subsegment=subsegment,
stack=None)
aws_meta = subsegment.aws
assert aws_meta['region'] == 'us-west-2'
assert aws_meta['operation'] == 'ListTables'
def test_only_dynamodb_calls_are_traced():
"""Test only a single subsegment is created for other AWS services.
As the pynamodb patch applies the botocore patch as well, we need
to ensure that only one subsegment is created for all calls not
made by PynamoDB. As PynamoDB calls botocore differently than the
botocore patch expects we also just get a single subsegment per
PynamoDB call.
"""
session = botocore.session.get_session()
s3 = session.create_client('s3', region_name='us-west-2',
config=Config(signature_version=UNSIGNED))
try:
s3.get_bucket_location(Bucket='mybucket')
except ClientError:
pass
subsegments = xray_recorder.current_segment().subsegments
assert len(subsegments) == 1
assert subsegments[0].name == 's3'
assert len(subsegments[0].subsegments) == 0
================================================
FILE: tests/ext/requests/__init__.py
================================================
================================================
FILE: tests/ext/requests/test_requests.py
================================================
import pytest
import requests
from aws_xray_sdk.core import patch
from aws_xray_sdk.core import xray_recorder
from aws_xray_sdk.core.context import Context
from aws_xray_sdk.ext.util import strip_url, get_hostname
patch(('requests',))
# httpbin.org is created by the same author of requests to make testing http easy.
BASE_URL = 'httpbin.org'
@pytest.fixture(autouse=True)
def construct_ctx():
"""
Clean up context storage on each test run and begin a segment
so that later subsegment can be attached. After each test run
it cleans up context storage again.
"""
xray_recorder.configure(service='test', sampling=False, context=Context())
xray_recorder.clear_trace_entities()
xray_recorder.begin_segment('name')
yield
xray_recorder.clear_trace_entities()
def test_ok():
status_code = 200
url = 'http://{}/status/{}?foo=bar'.format(BASE_URL, status_code)
requests.get(url)
subsegment = xray_recorder.current_segment().subsegments[0]
assert get_hostname(url) == BASE_URL
assert subsegment.name == get_hostname(url)
http_meta = subsegment.http
assert http_meta['request']['url'] == strip_url(url)
assert http_meta['request']['method'].upper() == 'GET'
assert http_meta['response']['status'] == status_code
def test_error():
status_code = 400
url = 'http://{}/status/{}'.format(BASE_URL, status_code)
requests.post(url)
subsegment = xray_recorder.current_segment().subsegments[0]
assert subsegment.name == get_hostname(url)
assert subsegment.error
http_meta = subsegment.http
assert http_meta['request']['url'] == strip_url(url)
assert http_meta['request']['method'].upper() == 'POST'
assert http_meta['response']['status'] == status_code
def test_throttle():
status_code = 429
url = 'http://{}/status/{}'.format(BASE_URL, status_code)
requests.head(url)
subsegment = xray_recorder.current_segment().subsegments[0]
assert subsegment.name == get_hostname(url)
assert subsegment.error
assert subsegment.throttle
http_meta = subsegment.http
assert http_meta['request']['url'] == strip_url(url)
assert http_meta['request']['method'].upper() == 'HEAD'
assert http_meta['response']['status'] == status_code
def test_fault():
status_code = 500
url = 'http://{}/status/{}'.format(BASE_URL, status_code)
requests.put(url)
subsegment = xray_recorder.current_segment().subsegments[0]
assert subsegment.name == get_hostname(url)
assert subsegment.fault
http_meta = subsegment.http
assert http_meta['request']['url'] == strip_url(url)
assert http_meta['request']['method'].upper() == 'PUT'
assert http_meta['response']['status'] == status_code
def test_nonexistent_domain():
try:
requests.get('http://doesnt.exist')
except Exception:
# prevent uncatch exception from breaking test run
pass
subsegment = xray_recorder.current_segment().subsegments[0]
assert subsegment.fault
exception = subsegment.cause['exceptions'][0]
assert exception.type == 'ConnectionError'
def test_invalid_url():
url = 'KLSDFJKLSDFJKLSDJF'
try:
requests.get(url)
except Exception:
# prevent uncatch exception from breaking test run
pass
subsegment = xray_recorder.current_segment().subsegments[0]
assert subsegment.name == get_hostname(url)
assert subsegment.fault
http_meta = subsegment.http
assert http_meta['request']['url'] == strip_url(url)
exception = subsegment.cause['exceptions'][0]
assert exception.type == 'MissingSchema'
def test_name_uses_hostname():
url1 = 'http://{}/fakepath/stuff/koo/lai/ahh'.format(BASE_URL)
requests.get(url1)
subsegment = xray_recorder.current_segment().subsegments[-1]
assert subsegment.name == BASE_URL
http_meta1 = subsegment.http
assert http_meta1['request']['url'] == strip_url(url1)
assert http_meta1['request']['method'].upper() == 'GET'
url2 = 'http://{}/'.format(BASE_URL)
requests.get(url2, params={"some": "payload", "not": "toBeIncluded"})
subsegment = xray_recorder.current_segment().subsegments[-1]
assert subsegment.name == BASE_URL
http_meta2 = subsegment.http
assert http_meta2['request']['url'] == strip_url(url2)
assert http_meta2['request']['method'].upper() == 'GET'
url3 = 'http://subdomain.{}/fakepath/stuff/koo/lai/ahh'.format(BASE_URL)
try:
requests.get(url3)
except Exception:
# This is an invalid url so we dont want to break the test
pass
subsegment = xray_recorder.current_segment().subsegments[-1]
assert subsegment.name == "subdomain." + BASE_URL
http_meta3 = subsegment.http
assert http_meta3['request']['url'] == strip_url(url3)
assert http_meta3['request']['method'].upper() == 'GET'
def test_strip_http_url():
status_code = 200
url = 'http://{}/get?foo=bar'.format(BASE_URL)
requests.get(url)
subsegment = xray_recorder.current_segment().subsegments[0]
assert subsegment.name == get_hostname(url)
http_meta = subsegment.http
assert http_meta['request']['url'] == strip_url(url)
assert http_meta['request']['method'].upper() == 'GET'
assert http_meta['response']['status'] == status_code
================================================
FILE: tests/ext/sqlalchemy/__init__.py
================================================
================================================
FILE: tests/ext/sqlalchemy/test_query.py
================================================
import pytest
from aws_xray_sdk.core import xray_recorder
from aws_xray_sdk.core.context import Context
from aws_xray_sdk.ext.sqlalchemy.query import XRaySessionMaker
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import create_engine, Column, Integer, String
from ...util import find_subsegment_by_annotation
Base = declarative_base()
class User(Base):
__tablename__ = 'users'
id = Column(Integer, primary_key=True)
name = Column(String)
fullname = Column(String)
password = Column(String)
@pytest.fixture()
def engine():
return create_engine('sqlite:///:memory:')
@pytest.fixture()
def session(engine):
"""Test Fixture to Create DataBase Tables and start a trace segment"""
engine = create_engine('sqlite:///:memory:')
xray_recorder.configure(service='test', sampling=False, context=Context())
xray_recorder.clear_trace_entities()
xray_recorder.begin_segment('SQLAlchemyTest')
Session = XRaySessionMaker(bind=engine)
Base.metadata.create_all(engine)
session = Session()
yield session
xray_recorder.end_segment()
xray_recorder.clear_trace_entities()
@pytest.fixture()
def connection(engine):
conn = engine.connect()
xray_recorder.configure(service='test', sampling=False, context=Context())
xray_recorder.clear_trace_entities()
xray_recorder.begin_segment('SQLAlchemyTest')
Session = XRaySessionMaker(bind=conn)
Base.metadata.create_all(engine)
session = Session()
yield session
xray_recorder.end_segment()
xray_recorder.clear_trace_entities()
def test_all(capsys, session):
""" Test calling all() on get all records.
Verify we run the query and return the SQL as metdata"""
# with capsys.disabled():
session.query(User).all()
subsegment = find_subsegment_by_annotation(xray_recorder.current_segment(), 'sqlalchemy', 'sqlalchemy.orm.query.all')
assert subsegment['annotations']['sqlalchemy'] == 'sqlalchemy.orm.query.all'
assert subsegment['sql']['sanitized_query']
assert subsegment['sql']['url']
def test_supports_connection(capsys, connection):
""" Test that XRaySessionMaker supports connection as well as engine"""
connection.query(User).all()
subsegment = find_subsegment_by_annotation(xray_recorder.current_segment(), 'sqlalchemy',
'sqlalchemy.orm.query.all')
assert subsegment['annotations']['sqlalchemy'] == 'sqlalchemy.orm.query.all'
def test_add(capsys, session):
""" Test calling add() on insert a row.
Verify we that we capture trace for the add"""
# with capsys.disabled():
john = User(name='John', fullname="John Doe", password="password")
session.add(john)
subsegment = find_subsegment_by_annotation(xray_recorder.current_segment(), 'sqlalchemy', 'sqlalchemy.orm.session.add')
assert subsegment['annotations']['sqlalchemy'] == 'sqlalchemy.orm.session.add'
assert subsegment['sql']['url']
def test_filter_first(capsys, session):
""" Test calling filter().first() on get first filtered records.
Verify we run the query and return the SQL as metdata"""
# with capsys.disabled():
session.query(User).filter(User.password=="mypassword!").first()
subsegment = find_subsegment_by_annotation(xray_recorder.current_segment(), 'sqlalchemy', 'sqlalchemy.orm.query.first')
assert subsegment['annotations']['sqlalchemy'] == 'sqlalchemy.orm.query.first'
assert subsegment['sql']['sanitized_query']
assert "mypassword!" not in subsegment['sql']['sanitized_query']
assert "users.password = ?" in subsegment['sql']['sanitized_query']
assert subsegment['sql']['url']
================================================
FILE: tests/ext/sqlalchemy_core/__init__.py
================================================
================================================
FILE: tests/ext/sqlalchemy_core/test_base.py
================================================
import pytest
from sqlalchemy import create_engine, Column, Integer, String
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
from aws_xray_sdk.core import xray_recorder, patch
from aws_xray_sdk.core.context import Context
Base = declarative_base()
class User(Base):
__tablename__ = 'users'
id = Column(Integer, primary_key=True)
name = Column(String)
fullname = Column(String)
password = Column(String)
@pytest.fixture()
def db_url():
return 'sqlite:///:memory:'
@pytest.fixture()
def engine(db_url):
"""
Clean up context storage on each test run and begin a segment
so that later subsegment can be attached. After each test run
it cleans up context storage again.
"""
from aws_xray_sdk.ext.sqlalchemy_core import unpatch
patch(('sqlalchemy_core',))
engine = create_engine(db_url)
xray_recorder.configure(service='test', sampling=False, context=Context())
xray_recorder.begin_segment('name')
Base.metadata.create_all(engine)
xray_recorder.clear_trace_entities()
xray_recorder.begin_segment('name')
yield engine
xray_recorder.clear_trace_entities()
unpatch()
@pytest.fixture()
def connection(engine):
return engine.connect()
@pytest.fixture()
def session(engine):
Session = sessionmaker(bind=engine)
return Session()
================================================
FILE: tests/ext/sqlalchemy_core/test_dburl.py
================================================
from sqlalchemy import create_engine
import urllib
import pytest
from aws_xray_sdk.core import xray_recorder, patch
from aws_xray_sdk.ext.sqlalchemy_core import unpatch
from aws_xray_sdk.core.context import Context
MYSQL_USER = "test_dburl_user"
MYSQL_PASSWORD = "test]password"
MYSQL_HOST = "localhost"
MYSQL_PORT = 3306
MYSQL_DB_NAME = "test_dburl"
patch(('sqlalchemy_core',))
@pytest.fixture(autouse=True)
def construct_ctx():
"""
Clean up context storage on each test run and begin a segment
so that later subsegment can be attached. After each test run
it cleans up context storage again.
"""
xray_recorder.configure(service='test', sampling=False, context=Context())
xray_recorder.clear_trace_entities()
xray_recorder.begin_segment('name')
yield
xray_recorder.clear_trace_entities()
def test_db_url_with_special_char():
password = urllib.parse.quote_plus(MYSQL_PASSWORD)
db_url = f"mysql+pymysql://{MYSQL_USER}:{password}@{MYSQL_HOST}:{MYSQL_PORT}/{MYSQL_DB_NAME}"
engine = create_engine(db_url)
conn = engine.connect()
conn.execute("select 1")
subsegment = xray_recorder.current_segment().subsegments[-1]
assert subsegment.name == f"{MYSQL_HOST}:{MYSQL_PORT}"
sql = subsegment.sql
assert sql['database_type'] == 'mysql'
assert sql['user'] == MYSQL_USER
assert sql['driver_version'] == 'pymysql'
assert sql['database_version']
================================================
FILE: tests/ext/sqlalchemy_core/test_postgres.py
================================================
import pytest
from .test_base import connection, engine, session, User
from sqlalchemy import create_engine
from sqlalchemy.dialects.postgresql import insert as pg_insert
from aws_xray_sdk.core import xray_recorder, patch
from aws_xray_sdk.core.context import Context
import testing.postgresql
@pytest.fixture()
def postgres_db():
with testing.postgresql.Postgresql() as postgresql:
yield postgresql
@pytest.fixture()
def db_url(postgres_db):
return postgres_db.url()
@pytest.fixture()
def sanitized_db_url(postgres_db):
dsn = postgres_db.dsn()
return 'postgresql://{user}@{host}:{port}/{db}'.format(
user=dsn['user'],
host=dsn['host'],
port=dsn['port'],
db=dsn['database'],
)
def test_all(session, sanitized_db_url):
""" Test calling all() on get all records.
Verify we run the query and return the SQL as metdata"""
session.query(User).all()
assert len(xray_recorder.current_segment().subsegments) == 1
sql_meta = xray_recorder.current_segment().subsegments[0].sql
assert sql_meta['url'] == sanitized_db_url
assert sql_meta['sanitized_query'].startswith('SELECT')
assert sql_meta['sanitized_query'].endswith('FROM users')
def test_insert_on_conflict_renders(connection):
statement = pg_insert(User).values(name='John', fullname="John Doe", password='123456')
statement = statement.on_conflict_do_nothing()
connection.execute(statement)
assert len(xray_recorder.current_segment().subsegments) == 1
sql_meta = xray_recorder.current_segment().subsegments[0].sql
assert sql_meta['sanitized_query'].startswith('INSERT INTO users')
assert 'ON CONFLICT DO NOTHING' in sql_meta['sanitized_query']
================================================
FILE: tests/ext/sqlalchemy_core/test_sqlalchemy_core.py
================================================
from .test_base import User, session, db_url, engine, connection
from sqlalchemy.sql.expression import Insert, Delete
from aws_xray_sdk.core import xray_recorder
def test_all(session):
""" Test calling all() on get all records.
Verify we run the query and return the SQL as metdata"""
session.query(User).all()
assert len(xray_recorder.current_segment().subsegments) == 1
sql_meta = xray_recorder.current_segment().subsegments[0].sql
assert sql_meta['url'] == 'sqlite:///:memory:'
assert sql_meta['sanitized_query'].startswith('SELECT')
assert sql_meta['sanitized_query'].endswith('FROM users')
def test_filter_first(session):
""" Test calling filter().first() on get first filtered records.
Verify we run the query and return the SQL as metdata"""
session.query(User).filter(User.password=="mypassword!").first()
assert len(xray_recorder.current_segment().subsegments) == 1
sql_meta = xray_recorder.current_segment().subsegments[0].sql
assert sql_meta['sanitized_query'].startswith('SELECT')
assert 'FROM users' in sql_meta['sanitized_query']
assert "mypassword!" not in sql_meta['sanitized_query']
def test_connection_add(connection):
password = "123456"
statement = Insert(User).values(name='John', fullname="John Doe", password=password)
connection.execute(statement)
assert len(xray_recorder.current_segment().subsegments) == 1
sql_meta = xray_recorder.current_segment().subsegments[0].sql
assert sql_meta['sanitized_query'].startswith('INSERT INTO users')
assert sql_meta['url'] == 'sqlite:///:memory:'
assert password not in sql_meta['sanitized_query']
def test_connection_query(connection):
password = "123456"
statement = Delete(User).where(User.name == 'John').where(User.password == password)
connection.execute(statement)
assert len(xray_recorder.current_segment().subsegments) == 1
sql_meta = xray_recorder.current_segment().subsegments[0].sql
assert sql_meta['sanitized_query'].startswith('DELETE FROM users')
assert sql_meta['url'] == 'sqlite:///:memory:'
assert password not in sql_meta['sanitized_query']
================================================
FILE: tests/ext/sqlalchemy_core/test_sqlalchemy_core_2.py
================================================
from .test_base import User, session, db_url, engine, connection
from sqlalchemy.sql.expression import select
from aws_xray_sdk.core import xray_recorder
# 2.0 style execution test. see https://docs.sqlalchemy.org/en/14/changelog/migration_14.html#orm-query-is-internally
# -unified-with-select-update-delete-2-0-style-execution-available
def test_orm_style_select_execution(session):
statement = select(User).where(
User.name == 'John'
)
session.execute(statement)
assert len(xray_recorder.current_segment().subsegments) == 1
sql_meta = xray_recorder.current_segment().subsegments[0].sql
assert sql_meta['sanitized_query'].startswith('SELECT')
assert 'FROM users' in sql_meta['sanitized_query']
================================================
FILE: tests/ext/sqlite3/__init__.py
================================================
================================================
FILE: tests/ext/sqlite3/test_sqlite3.py
================================================
import sqlite3
import pytest
from aws_xray_sdk.core import patch
from aws_xray_sdk.core import xray_recorder
from aws_xray_sdk.core.context import Context
@pytest.fixture(scope="module")
def db():
patch(('sqlite3',))
return sqlite3.connect(":memory:")
@pytest.fixture(autouse=True)
def construct_ctx():
"""
Clean up context storage on each test run and begin a segment
so that later subsegment can be attached. After each test run
it cleans up context storage again.
"""
xray_recorder.configure(service='test', sampling=False, context=Context())
xray_recorder.clear_trace_entities()
xray_recorder.begin_segment('name')
yield
xray_recorder.clear_trace_entities()
def test_execute(db):
q = 'SELECT name FROM sqlite_master'
db.execute(q)
subsegment = xray_recorder.current_segment().subsegments[0]
assert subsegment.name == ':memory:'
sql = subsegment.sql
assert sql['database_type'] == 'sqlite3'
assert sql['database_version']
def test_invalid_syntax(db):
q = 'some_query'
try:
db.execute(q)
except Exception:
pass
subsegment = xray_recorder.current_segment().subsegments[0]
assert subsegment.name == ':memory:'
sql = subsegment.sql
assert sql['database_type'] == 'sqlite3'
assert sql['database_version']
exception = subsegment.cause['exceptions'][0]
assert exception.type == 'OperationalError'
================================================
FILE: tests/mock_module/__init__.py
================================================
def mock_init():
pass
================================================
FILE: tests/mock_module/mock_file.py
================================================
def mock_func():
pass
================================================
FILE: tests/mock_module/mock_submodule/__init__.py
================================================
def mock_subinit():
pass
================================================
FILE: tests/mock_module/mock_submodule/mock_subfile.py
================================================
from aws_xray_sdk.core import xray_recorder
def mock_subfunc():
pass
@xray_recorder.capture()
def mock_no_doublepatch():
pass
class MockClass:
def __init__(self):
pass
def mock_method(self):
pass
@classmethod
def mock_classmethod(cls):
# Should not be automatically patched
pass
@staticmethod
def mock_staticmethod():
pass
class MockSubclass(MockClass):
def __init__(self):
super().__init__()
def mock_submethod(self):
pass
================================================
FILE: tests/mock_sampling_rule.json
================================================
{
"version": 2,
"default": {
"fixed_target": 1,
"rate": 0.05
},
"rules": [
]
}
================================================
FILE: tests/test_async_local_storage.py
================================================
import asyncio
import random
import sys
from aws_xray_sdk.core.async_context import TaskLocalStorage
def test_localstorage_isolation(event_loop):
local_storage = TaskLocalStorage(loop=event_loop)
async def _test():
"""
Compute a random number
Store it in task local storage
Suspend task so another can run
Retrieve random number from task local storage
Compare that to the local variable
"""
try:
random_int = random.random()
local_storage.randint = random_int
if sys.version_info >= (3, 8):
await asyncio.sleep(0.0)
else:
await asyncio.sleep(0.0, loop=event_loop)
current_random_int = local_storage.randint
assert random_int == current_random_int
return True
except:
return False
# Run loads of concurrent tasks
if sys.version_info >= (3, 8):
results = event_loop.run_until_complete(
asyncio.wait([event_loop.create_task(_test()) for _ in range(0, 100)])
)
else:
results = event_loop.run_until_complete(
asyncio.wait(
[event_loop.create_task(_test()) for _ in range(0, 100)],
loop=event_loop,
)
)
results = [item.result() for item in results[0]]
# Double check all is good
assert all(results)
================================================
FILE: tests/test_async_recorder.py
================================================
import platform
from .util import get_new_stubbed_recorder
from aws_xray_sdk.version import VERSION
from aws_xray_sdk.core.async_context import AsyncContext
import asyncio
xray_recorder = get_new_stubbed_recorder()
@xray_recorder.capture_async('test_2')
async def async_method2():
pass
@xray_recorder.capture_async('test_1')
async def async_method():
await async_method2()
async def test_capture(event_loop):
xray_recorder.configure(
service='test', sampling=False, context=AsyncContext(loop=event_loop)
)
segment = xray_recorder.begin_segment('name')
await async_method()
# Check subsegment is created from async_method
assert len(segment.subsegments) == 1
assert segment.subsegments[0].name == 'test_1'
# Check nested subsegment is created from async_method2
subsegment = segment.subsegments[0]
assert len(subsegment.subsegments) == 1
assert subsegment.subsegments[0].name == 'test_2'
# Check runtime context is correctly attached
xray_meta = segment.aws.get('xray')
assert 'X-Ray for Python' == xray_meta.get('sdk')
assert VERSION == xray_meta.get('sdk_version')
service = segment.service
assert platform.python_implementation() == service.get('runtime')
assert platform.python_version() == service.get('runtime_version')
async def test_concurrent_calls(event_loop):
xray_recorder.configure(
service='test', sampling=False, context=AsyncContext(loop=event_loop)
)
async with xray_recorder.in_segment_async('segment') as segment:
global counter
counter = 0
total_tasks = 10
flag = asyncio.Event()
async def assert_task():
async with xray_recorder.in_subsegment_async('segment') as subsegment:
global counter
counter += 1
# Begin all subsegments before closing any to ensure they overlap
if counter < total_tasks:
await flag.wait()
else:
flag.set()
return subsegment.parent_id
tasks = [assert_task() for task in range(total_tasks)]
subsegs_parent_ids = await asyncio.gather(*tasks)
for subseg_parent_id in subsegs_parent_ids:
assert subseg_parent_id == segment.id
async def test_async_context_managers(event_loop):
xray_recorder.configure(
service='test', sampling=False, context=AsyncContext(loop=event_loop)
)
async with xray_recorder.in_segment_async('segment') as segment:
async with xray_recorder.capture_async('aio_capture') as subsegment:
assert segment.subsegments[0].name == 'aio_capture'
assert subsegment.in_progress is False
async with xray_recorder.in_subsegment_async('in_sub') as subsegment:
assert segment.subsegments[1].name == 'in_sub'
assert subsegment.in_progress is True
assert subsegment.in_progress is False
================================================
FILE: tests/test_daemon_config.py
================================================
import pytest
from aws_xray_sdk.core.daemon_config import DaemonConfig
from aws_xray_sdk.core.exceptions.exceptions import InvalidDaemonAddressException
DEFAULT_IP = '127.0.0.1'
DEFAULT_PORT = 2000
def test_default_config():
config = DaemonConfig()
assert config.udp_ip == DEFAULT_IP
assert config.tcp_ip == DEFAULT_IP
assert config.udp_port == 2000
assert config.tcp_port == 2000
def test_single_address():
config = DaemonConfig('192.168.0.1:3000')
assert config.udp_ip == '192.168.0.1'
assert config.tcp_ip == '192.168.0.1'
assert config.udp_port == 3000
assert config.tcp_port == 3000
def test_set_tcp_udp_separately():
config = DaemonConfig('tcp:192.168.0.1:3000 udp:127.0.0.2:8080')
assert config.udp_ip == '127.0.0.2'
assert config.tcp_ip == '192.168.0.1'
assert config.udp_port == 8080
assert config.tcp_port == 3000
# order can be reversed
config = DaemonConfig('udp:127.0.0.2:8080 tcp:192.168.0.1:3000')
assert config.udp_ip == '127.0.0.2'
assert config.tcp_ip == '192.168.0.1'
assert config.udp_port == 8080
assert config.tcp_port == 3000
def test_invalid_address():
with pytest.raises(InvalidDaemonAddressException):
DaemonConfig('192.168.0.1')
with pytest.raises(InvalidDaemonAddressException):
DaemonConfig('tcp:192.168.0.1:3000')
with pytest.raises(InvalidDaemonAddressException):
DaemonConfig('127.0.0.2:8080 192.168.0.1:3000')
with pytest.raises(InvalidDaemonAddressException):
DaemonConfig('udp:127.0.0.2:8080 192.168.0.1:3000')
================================================
FILE: tests/test_dummy_entites.py
================================================
from aws_xray_sdk.core.models.dummy_entities import DummySegment, DummySubsegment
from aws_xray_sdk.core.models import http
def test_not_sampled():
segment = DummySegment()
subsegment = DummySubsegment(segment)
assert not segment.sampled
assert not subsegment.sampled
def test_no_ops():
segment = DummySegment()
segment.put_metadata('key', 'value')
segment.put_annotation('key', 'value')
segment.put_http_meta(http.URL, 'url')
segment.set_user('user')
assert not segment.metadata
assert not segment.annotations
assert not segment.http
assert not segment.user
subsegment = DummySubsegment(segment)
subsegment.put_metadata('key', 'value')
subsegment.put_annotation('key', 'value')
subsegment.put_http_meta(http.URL, 'url')
subsegment.set_aws({'key': 'value'})
subsegment.set_sql({'key': 'value'})
assert not subsegment.metadata
assert not subsegment.annotations
assert not subsegment.http
assert not subsegment.aws
assert not subsegment.sql
assert not segment.serialize()
assert not subsegment.serialize()
def test_structure_intact():
segment = DummySegment()
subsegment = DummySubsegment(segment)
subsegment2 = DummySubsegment(segment)
subsegment.add_subsegment(subsegment2)
segment.add_subsegment(subsegment)
assert segment.subsegments[0] is subsegment
assert subsegment.subsegments[0] is subsegment2
subsegment2.close()
subsegment.close()
segment.close()
assert segment.ready_to_send()
def test_invalid_entity_name():
segment = DummySegment('DummySegment() Test?')
subsegment = DummySubsegment(segment, 'Dummy*Sub!segment$')
assert segment.name == 'DummySegment Test'
assert subsegment.name == 'DummySubsegment'
def test_dummy_segment_trace_id():
segment = DummySegment()
assert segment.trace_id != 'dummy'
assert '-' in segment.trace_id
# checking version of trace id
assert segment.trace_id[:1] == '1'
================================================
FILE: tests/test_facade_segment.py
================================================
import pytest
from aws_xray_sdk.core.models.facade_segment import FacadeSegment
from aws_xray_sdk.core.models.subsegment import Subsegment
from aws_xray_sdk.core.exceptions.exceptions import FacadeSegmentMutationException
from aws_xray_sdk.core.models import http
def test_not_ready():
segment = FacadeSegment('name', 'id', 'id', True)
segment.in_progress = False
assert not segment.ready_to_send()
def test_initializing():
segment = FacadeSegment('name', 'id', 'id', False)
assert not segment.initializing
segment2 = FacadeSegment('name', None, 'id', True)
assert segment2.initializing
def test_unsupported_operations():
segment = FacadeSegment('name', 'id', 'id', False)
with pytest.raises(FacadeSegmentMutationException):
segment.put_annotation('key', 'value')
with pytest.raises(FacadeSegmentMutationException):
segment.put_metadata('key', 'value')
with pytest.raises(FacadeSegmentMutationException):
segment.set_user('user')
with pytest.raises(FacadeSegmentMutationException):
segment.close()
with pytest.raises(FacadeSegmentMutationException):
segment.serialize()
with pytest.raises(FacadeSegmentMutationException):
segment.put_http_meta(http.URL, 'value')
def test_structure_intact():
segment = FacadeSegment('name', 'id', 'id', True)
subsegment = Subsegment('name', 'local', segment)
subsegment2 = Subsegment('name', 'local', segment)
segment.add_subsegment(subsegment)
subsegment.add_subsegment(subsegment2)
assert segment.subsegments[0] is subsegment
assert subsegment.subsegments[0] is subsegment2
def test_adding_unsampled_subsegment():
segment = FacadeSegment('name', 'id', 'id', True)
subsegment = Subsegment('sampled', 'local', segment)
subsegment2 = Subsegment('unsampled', 'local', segment)
subsegment2.sampled = False
segment.add_subsegment(subsegment)
subsegment.add_subsegment(subsegment2)
assert segment.subsegments[0] is subsegment
assert subsegment.subsegments[0] is subsegment2
assert subsegment2.sampled == False
================================================
FILE: tests/test_lambda_context.py
================================================
import os
from aws_xray_sdk import global_sdk_config
import pytest
from aws_xray_sdk.core import lambda_launcher
from aws_xray_sdk.core.models.dummy_entities import DummySegment
from aws_xray_sdk.core.models.subsegment import Subsegment
TRACE_ID = '1-5759e988-bd862e3fe1be46a994272793'
PARENT_ID = '53995c3f42cd8ad8'
DATA = 'Foo=Bar'
HEADER_VAR = "Root=%s;Parent=%s;Sampled=1;%s" % (TRACE_ID, PARENT_ID, DATA)
os.environ[lambda_launcher.LAMBDA_TRACE_HEADER_KEY] = HEADER_VAR
context = lambda_launcher.LambdaContext()
@pytest.fixture(autouse=True)
def setup():
yield
global_sdk_config.set_sdk_enabled(True)
def test_facade_segment_generation():
segment = context.get_trace_entity()
assert segment.id == PARENT_ID
assert segment.trace_id == TRACE_ID
assert segment.sampled
assert DATA in segment.get_origin_trace_header().to_header_str()
def test_put_subsegment():
segment = context.get_trace_entity()
subsegment = Subsegment('name', 'local', segment)
context.put_subsegment(subsegment)
assert context.get_trace_entity().id == subsegment.id
subsegment2 = Subsegment('name', 'local', segment)
context.put_subsegment(subsegment2)
assert context.get_trace_entity().id == subsegment2.id
assert subsegment.subsegments[0] is subsegment2
assert subsegment2.parent_id == subsegment.id
assert subsegment.parent_id == segment.id
assert subsegment2.parent_segment is segment
assert DATA in subsegment2.parent_segment.get_origin_trace_header().to_header_str()
context.end_subsegment()
assert context.get_trace_entity().id == subsegment.id
context.end_subsegment()
assert context.get_trace_entity().id == segment.id
def test_disable():
context.clear_trace_entities()
segment = context.get_trace_entity()
assert segment.sampled
context.clear_trace_entities()
global_sdk_config.set_sdk_enabled(False)
segment = context.get_trace_entity()
assert not segment.sampled
assert DATA in segment.get_origin_trace_header().to_header_str()
def test_non_initialized():
# Context that hasn't been initialized by lambda container should not add subsegments to the dummy segment.
temp_header_var = os.environ[lambda_launcher.LAMBDA_TRACE_HEADER_KEY]
del os.environ[lambda_launcher.LAMBDA_TRACE_HEADER_KEY]
temp_context = lambda_launcher.LambdaContext()
dummy_segment = temp_context.get_trace_entity()
subsegment = Subsegment("TestSubsegment", "local", dummy_segment)
temp_context.put_subsegment(subsegment)
assert temp_context.get_trace_entity() == dummy_segment
# "Lambda" container added metadata now. Should see subsegment now.
# The following put_segment call will overwrite the dummy segment in the context with an intialized facade segment that accepts a subsegment.
os.environ[lambda_launcher.LAMBDA_TRACE_HEADER_KEY] = temp_header_var
temp_context.put_subsegment(subsegment)
assert temp_context.get_trace_entity() == subsegment
def test_lambda_passthrough():
# Hold previous environment value
temp_header_var = os.environ[lambda_launcher.LAMBDA_TRACE_HEADER_KEY]
del os.environ[lambda_launcher.LAMBDA_TRACE_HEADER_KEY]
# Set header to lambda passthrough style header
os.environ[lambda_launcher.LAMBDA_TRACE_HEADER_KEY] = "Root=%s;Lineage=10:1234abcd:3" % TRACE_ID
temp_context = lambda_launcher.LambdaContext()
dummy_segment = temp_context.get_trace_entity()
subsegment = Subsegment("TestSubsegment", "local", dummy_segment)
temp_context.put_subsegment(subsegment)
# Resulting entity is not the same dummy segment, so simply check that it is a dummy segment
assert isinstance(temp_context.get_trace_entity(), DummySegment)
# Reset header value and ensure behaviour returns to normal
del os.environ[lambda_launcher.LAMBDA_TRACE_HEADER_KEY]
os.environ[lambda_launcher.LAMBDA_TRACE_HEADER_KEY] = temp_header_var
temp_context.put_subsegment(subsegment)
assert temp_context.get_trace_entity() == subsegment
def test_set_trace_entity():
segment = context.get_trace_entity()
subsegment = Subsegment('name', 'local', segment)
context. clear_trace_entities()
# should set the parent segment in thread local
context.set_trace_entity(subsegment)
tl = context._local
assert tl.__getattribute__('segment') == segment
assert context.get_trace_entity() == subsegment
context.clear_trace_entities()
# should set the segment in thread local
context.set_trace_entity(segment)
tl = context._local
assert tl.__getattribute__('segment') == segment
assert context.get_trace_entity() == segment
================================================
FILE: tests/test_local_sampling.py
================================================
import copy
import pytest
from aws_xray_sdk.core.sampling.local.sampling_rule import SamplingRule
from aws_xray_sdk.core.sampling.local.sampler import LocalSampler
from aws_xray_sdk.core.exceptions.exceptions import InvalidSamplingManifestError
RULE = {"description": "Player moves.",
"host": "*",
"http_method": "*",
"url_path": "/api/move/*",
"fixed_target": 0,
"rate": 0.05
}
RULE_MANIFEST = {
"version": 2,
"rules": [{
"description": "Player moves.",
"host": "*",
"http_method": "*",
"url_path": "/api/move/*",
"fixed_target": 0,
"rate": 0
}],
"default": {
"fixed_target": 1,
"rate": 1
}
}
def test_should_trace():
sampler = LocalSampler(RULE_MANIFEST)
assert sampler.should_trace({'method': 'GET', 'path': '/view'})
assert not sampler.should_trace({'host': 'name', 'method': 'method',
'path': '/api/move/left'})
def test_missing_version_num():
rule = copy.deepcopy(RULE_MANIFEST)
del rule['version']
with pytest.raises(InvalidSamplingManifestError):
LocalSampler(rule)
def test_default_matching():
sampler = LocalSampler(RULE_MANIFEST)
assert sampler.should_trace()
def test_path_matching():
rule = SamplingRule(RULE)
assert rule.applies('name', 'GET', '/api/move/up')
assert rule.applies(None, 'POST', '/api/move/up')
assert rule.applies('name', None, '/api/move/up')
assert rule.applies('name', 'PUT', None)
assert not rule.applies(None, 'GET', '/root')
def test_negative_rate():
rule = copy.deepcopy(RULE)
rule['rate'] = -1
with pytest.raises(InvalidSamplingManifestError):
SamplingRule(rule)
def test_negative_fixed_target():
rule = copy.deepcopy(RULE)
rule['fixed_target'] = -1
with pytest.raises(InvalidSamplingManifestError):
SamplingRule(rule)
def test_invalid_default():
with pytest.raises(InvalidSamplingManifestError):
SamplingRule(RULE, 2, default=True)
def test_incomplete_path_rule():
rule = copy.deepcopy(RULE)
del rule['url_path']
with pytest.raises(InvalidSamplingManifestError):
SamplingRule(rule)
================================================
FILE: tests/test_local_sampling_benchmark.py
================================================
import json
import pkgutil
from pathlib import Path
# Faster
def test_pkgutil_static_read(benchmark):
def get_sampling_rule():
return json.loads(pkgutil.get_data(__name__, 'mock_sampling_rule.json').decode('utf-8'))
benchmark(get_sampling_rule)
# Slower
def test_pathlib_static_read(benchmark):
def get_sampling_rule():
with open(Path(__file__).parent / 'mock_sampling_rule.json') as f:
return json.load(f)
benchmark(get_sampling_rule)
================================================
FILE: tests/test_patcher.py
================================================
import inspect
import pytest
import sys
import wrapt
try:
# Python versions >= 3.4
from importlib import reload
except ImportError:
# Python versions 3 <= x < 3.4 have reload in the imp module
try:
from imp import reload
except ImportError:
# Python versions < 3 have reload built-in
pass
from aws_xray_sdk import global_sdk_config
from aws_xray_sdk.core import patcher, xray_recorder
from aws_xray_sdk.core.context import Context
TEST_MODULES = (
'tests.mock_module',
'tests.mock_module.mock_file',
'tests.mock_module.mock_submodule',
'tests.mock_module.mock_submodule.mock_subfile',
)
@pytest.fixture(autouse=True)
def construct_ctx():
"""
Clean up context storage on each test run and begin a segment
so that later subsegment can be attached. After each test run
it cleans up context storage again.
"""
pre_run_modules = set(module for module in sys.modules.keys())
xray_recorder.configure(service='test', sampling=False, context=Context())
xray_recorder.clear_trace_entities()
xray_recorder.begin_segment('name')
yield
xray_recorder.end_segment()
xray_recorder.clear_trace_entities()
global_sdk_config.set_sdk_enabled(True)
# Reload wrapt.importer references to modules to start off clean
reload(wrapt)
reload(wrapt.importer)
# Reload patcher references to already patched modules
reload(patcher)
# Cleanup the already imported module references in the system
for module_name, module in sorted(sys.modules.items(), key=lambda m: len(m[0]), reverse=True):
if module_name not in pre_run_modules and inspect.ismodule(module):
reload(module)
for module_name in sorted(sys.modules.keys(), key=lambda m: len(m), reverse=True):
if module_name not in pre_run_modules:
del sys.modules[module_name]
def _call_all_mock_functions():
from .mock_module import mock_file, mock_init
from .mock_module.mock_submodule import mock_subfile, mock_subinit
mock_init()
mock_subinit()
mock_file.mock_func()
mock_subfile.mock_subfunc()
mock_subfile.mock_no_doublepatch()
mock_subfile.MockClass.mock_classmethod()
mock_subfile.MockClass.mock_staticmethod()
mock_subfile.MockClass().mock_method()
mock_subfile.MockSubclass().mock_submethod()
@pytest.mark.parametrize('modules', [
('nonexisting.module',),
('psycopg2', 'nonexisting.module',),
('nonexisting.module', 'psycopg2',),
])
def test_incorrect_import_fails(modules):
with pytest.raises(Exception) as e:
patcher.patch(modules)
assert str(e.value) == 'modules nonexisting.module are currently not supported for patching'
def test_external_file():
patcher.patch(['tests.mock_module.mock_file'])
assert len(xray_recorder.current_segment().subsegments) == 0
# We want to make sure patching does not load any of the patched modules
imported_modules = [module for module in TEST_MODULES if module in sys.modules]
assert not imported_modules
_call_all_mock_functions()
assert len(xray_recorder.current_segment().subsegments) == 2
assert xray_recorder.current_segment().subsegments[0].name == 'mock_func'
assert xray_recorder.current_segment().subsegments[1].name == 'mock_no_doublepatch' # It is patched with decorator
def test_external_module():
patcher.patch(['tests.mock_module.mock_submodule'])
assert len(xray_recorder.current_segment().subsegments) == 0
# We want to make sure patching does not load any of the patched modules
imported_modules = [module for module in TEST_MODULES if module in sys.modules]
assert not imported_modules
_call_all_mock_functions()
assert len(xray_recorder.current_segment().subsegments) == 8
assert xray_recorder.current_segment().subsegments[0].name == 'mock_subinit'
assert xray_recorder.current_segment().subsegments[1].name == 'mock_subfunc'
assert xray_recorder.current_segment().subsegments[2].name == 'mock_no_doublepatch' # Should appear only once
assert xray_recorder.current_segment().subsegments[3].name == 'mock_staticmethod'
assert xray_recorder.current_segment().subsegments[4].name == 'MockClass.__init__'
assert xray_recorder.current_segment().subsegments[5].name == 'mock_method'
assert xray_recorder.current_segment().subsegments[6].name == 'MockSubclass.__init__'
assert xray_recorder.current_segment().subsegments[7].name == 'mock_submethod'
def test_external_submodules_full():
patcher.patch(['tests.mock_module'])
assert len(xray_recorder.current_segment().subsegments) == 0
# We want to make sure patching does not load any of the patched modules
imported_modules = [module for module in TEST_MODULES if module in sys.modules]
assert not imported_modules
_call_all_mock_functions()
assert len(xray_recorder.current_segment().subsegments) == 10
assert xray_recorder.current_segment().subsegments[0].name == 'mock_init'
assert xray_recorder.current_segment().subsegments[1].name == 'mock_subinit'
assert xray_recorder.current_segment().subsegments[2].name == 'mock_func'
assert xray_recorder.current_segment().subsegments[3].name == 'mock_subfunc'
assert xray_recorder.current_segment().subsegments[4].name == 'mock_no_doublepatch'
assert xray_recorder.current_segment().subsegments[5].name == 'mock_staticmethod'
assert xray_recorder.current_segment().subsegments[6].name == 'MockClass.__init__'
assert xray_recorder.current_segment().subsegments[7].name == 'mock_method'
assert xray_recorder.current_segment().subsegments[8].name == 'MockSubclass.__init__'
assert xray_recorder.current_segment().subsegments[9].name == 'mock_submethod'
def test_external_submodules_ignores_file():
patcher.patch(['tests.mock_module'], ignore_module_patterns=['tests.mock_module.mock_file'])
assert len(xray_recorder.current_segment().subsegments) == 0
# We want to make sure patching does not load any of the patched modules
imported_modules = [module for module in TEST_MODULES if module in sys.modules]
assert not imported_modules
_call_all_mock_functions()
assert len(xray_recorder.current_segment().subsegments) == 9
assert xray_recorder.current_segment().subsegments[0].name == 'mock_init'
assert xray_recorder.current_segment().subsegments[1].name == 'mock_subinit'
assert xray_recorder.current_segment().subsegments[2].name == 'mock_subfunc'
assert xray_recorder.current_segment().subsegments[3].name == 'mock_no_doublepatch'
assert xray_recorder.current_segment().subsegments[4].name == 'mock_staticmethod'
assert xray_recorder.current_segment().subsegments[5].name == 'MockClass.__init__'
assert xray_recorder.current_segment().subsegments[6].name == 'mock_method'
assert xray_recorder.current_segment().subsegments[7].name == 'MockSubclass.__init__'
assert xray_recorder.current_segment().subsegments[8].name == 'mock_submethod'
def test_external_submodules_ignores_module():
patcher.patch(['tests.mock_module'], ignore_module_patterns=['tests.mock_module.mock_submodule'])
assert len(xray_recorder.current_segment().subsegments) == 0
# We want to make sure patching does not load any of the patched modules
imported_modules = [module for module in TEST_MODULES if module in sys.modules]
assert not imported_modules
_call_all_mock_functions()
assert len(xray_recorder.current_segment().subsegments) == 3
assert xray_recorder.current_segment().subsegments[0].name == 'mock_init'
assert xray_recorder.current_segment().subsegments[1].name == 'mock_func'
assert xray_recorder.current_segment().subsegments[2].name == 'mock_no_doublepatch' # It is patched with decorator
def test_disable_sdk_disables_patching():
global_sdk_config.set_sdk_enabled(False)
patcher.patch(['tests.mock_module'])
imported_modules = [module for module in TEST_MODULES if module in sys.modules]
assert not imported_modules
assert len(xray_recorder.current_segment().subsegments) == 0
================================================
FILE: tests/test_plugins.py
================================================
from unittest.mock import patch
from aws_xray_sdk.core.plugins.utils import get_plugin_modules
supported_plugins = (
'ec2_plugin',
'ecs_plugin',
'elasticbeanstalk_plugin',
)
def test_runtime_context_available():
plugins = get_plugin_modules(supported_plugins)
for plugin in plugins:
plugin.initialize()
assert hasattr(plugin, 'runtime_context')
@patch('aws_xray_sdk.core.plugins.ec2_plugin.do_request')
def test_ec2_plugin_imdsv2_success(mock_do_request):
v2_json_str = "{\"availabilityZone\" : \"us-east-2a\", \"imageId\" : \"ami-03cca83dd001d4666\"," \
" \"instanceId\" : \"i-07a181803de94c666\", \"instanceType\" : \"t3.xlarge\"}"
mock_do_request.side_effect = ['token', v2_json_str]
ec2_plugin = get_plugin_modules(('ec2_plugin',))[0]
ec2_plugin.initialize()
assert hasattr(ec2_plugin, 'runtime_context')
r_c = getattr(ec2_plugin, 'runtime_context')
assert r_c['instance_id'] == 'i-07a181803de94c666'
assert r_c['availability_zone'] == 'us-east-2a'
assert r_c['instance_type'] == 't3.xlarge'
assert r_c['ami_id'] == 'ami-03cca83dd001d4666'
@patch('aws_xray_sdk.core.plugins.ec2_plugin.do_request')
def test_ec2_plugin_v2_fail_v1_success(mock_do_request):
v1_json_str = "{\"availabilityZone\" : \"cn-north-1a\", \"imageId\" : \"ami-03cca83dd001d4111\"," \
" \"instanceId\" : \"i-07a181803de94c111\", \"instanceType\" : \"t2.xlarge\"}"
mock_do_request.side_effect = [Exception("Boom!"), v1_json_str]
ec2_plugin = get_plugin_modules(('ec2_plugin',))[0]
ec2_plugin.initialize()
assert hasattr(ec2_plugin, 'runtime_context')
r_c = getattr(ec2_plugin, 'runtime_context')
assert r_c['instance_id'] == 'i-07a181803de94c111'
assert r_c['availability_zone'] == 'cn-north-1a'
assert r_c['instance_type'] == 't2.xlarge'
assert r_c['ami_id'] == 'ami-03cca83dd001d4111'
@patch('aws_xray_sdk.core.plugins.ec2_plugin.do_request')
def test_ec2_plugin_v2_fail_v1_fail(mock_do_request):
mock_do_request.side_effect = [Exception("Boom v2!"), Exception("Boom v1!")]
ec2_plugin = get_plugin_modules(('ec2_plugin',))[0]
ec2_plugin.initialize()
assert hasattr(ec2_plugin, 'runtime_context')
r_c = getattr(ec2_plugin, 'runtime_context')
assert r_c == {}
================================================
FILE: tests/test_recorder.py
================================================
import platform
import time
import pytest
from aws_xray_sdk.core.sampling.sampling_rule import SamplingRule
from aws_xray_sdk.core.sampling.rule_cache import RuleCache
from aws_xray_sdk.core.sampling.sampler import DefaultSampler
from aws_xray_sdk.version import VERSION
from .util import get_new_stubbed_recorder
from aws_xray_sdk import global_sdk_config
from aws_xray_sdk.core.models.segment import Segment
from aws_xray_sdk.core.models.subsegment import Subsegment
from aws_xray_sdk.core.models.dummy_entities import DummySegment, DummySubsegment
from aws_xray_sdk.core.exceptions.exceptions import SegmentNotFoundException
xray_recorder = get_new_stubbed_recorder()
@pytest.fixture(autouse=True)
def construct_ctx(monkeypatch):
"""
Clean up context storage before and after each test run.
"""
monkeypatch.delattr("botocore.session.Session.get_credentials")
xray_recorder.configure(sampling=False)
xray_recorder.clear_trace_entities()
yield
xray_recorder.clear_trace_entities()
global_sdk_config.set_sdk_enabled(True)
def test_default_runtime_context():
segment = xray_recorder.begin_segment('name')
xray_meta = segment.aws.get('xray')
assert 'X-Ray for Python' == xray_meta.get('sdk')
assert VERSION == xray_meta.get('sdk_version')
service = segment.service
assert platform.python_implementation() == service.get('runtime')
assert platform.python_version() == service.get('runtime_version')
def test_subsegment_parenting():
segment = xray_recorder.begin_segment('name')
subsegment = xray_recorder.begin_subsegment('name')
xray_recorder.end_subsegment('name')
assert xray_recorder.get_trace_entity() is segment
subsegment1 = xray_recorder.begin_subsegment('name1')
subsegment2 = xray_recorder.begin_subsegment('name2')
assert subsegment2.parent_id == subsegment1.id
assert subsegment1.parent_id == segment.id
assert subsegment.parent_id == xray_recorder.current_segment().id
xray_recorder.end_subsegment()
assert not subsegment2.in_progress
assert subsegment1.in_progress
assert xray_recorder.current_subsegment().id == subsegment1.id
xray_recorder.end_subsegment()
assert not subsegment1.in_progress
assert xray_recorder.get_trace_entity() is segment
def test_subsegments_streaming():
xray_recorder.configure(streaming_threshold=10)
segment = xray_recorder.begin_segment('name')
for i in range(0, 11):
xray_recorder.begin_subsegment(name=str(i))
for i in range(0, 1):
# subsegment '10' will be streamed out upon close
xray_recorder.end_subsegment()
assert segment.get_total_subsegments_size() == 10
assert xray_recorder.current_subsegment().name == '9'
def test_subsegment_streaming_set_zero():
xray_recorder.configure(streaming_threshold=0)
segment = xray_recorder.begin_segment('name')
xray_recorder.begin_subsegment(name='sub')
xray_recorder.end_subsegment()
assert xray_recorder.streaming.streaming_threshold == 0
assert segment.get_total_subsegments_size() == 0
def test_put_annotation_metadata():
segment = xray_recorder.begin_segment('name')
xray_recorder.put_annotation('key1', 'value1')
subsegment = xray_recorder.begin_subsegment('name')
xray_recorder.put_metadata('key2', 'value2')
assert 'value1' == segment.annotations['key1']
assert not segment.annotations.get('key2')
assert 'value2' == subsegment.metadata['default']['key2']
assert not subsegment.metadata['default'].get('key1')
def test_default_pass_through_with_missing_context():
xray_recorder = get_new_stubbed_recorder()
xray_recorder.configure(sampling=False) # default context_missing = 'LOG_ERROR'
assert not xray_recorder.is_sampled()
xray_recorder.put_annotation('key', 'value')
xray_recorder.put_metadata('key', 'value')
xray_recorder.end_segment()
def test_raise_runtime_error_with_missing_context():
xray_recorder = get_new_stubbed_recorder()
xray_recorder.configure(sampling=False, context_missing='RUNTIME_ERROR')
with pytest.raises(SegmentNotFoundException):
assert not xray_recorder.is_sampled()
xray_recorder.end_segment()
def test_capture_not_suppress_exception():
xray_recorder = get_new_stubbed_recorder()
xray_recorder.configure(sampling=False)
@xray_recorder.capture()
def buggy_func():
return 1 / 0
with pytest.raises(ZeroDivisionError):
buggy_func()
def test_capture_not_swallow_return():
xray_recorder = get_new_stubbed_recorder()
xray_recorder.configure(sampling=False)
value = 1
@xray_recorder.capture()
def my_func():
return value
actual = my_func()
assert actual == value
def test_first_begin_segment_sampled():
xray_recorder = get_new_stubbed_recorder()
xray_recorder.configure(sampling=True)
segment = xray_recorder.begin_segment('name')
assert segment.sampled
def test_unsampled_subsegment_of_sampled_parent():
xray_recorder = get_new_stubbed_recorder()
xray_recorder.configure(sampling=True)
segment = xray_recorder.begin_segment('name', sampling=True)
subsegment = xray_recorder.begin_subsegment_without_sampling('unsampled')
assert segment.sampled == True
assert subsegment.sampled == False
def test_begin_subsegment_unsampled():
xray_recorder = get_new_stubbed_recorder()
xray_recorder.configure(sampling=False)
segment = xray_recorder.begin_segment('name', sampling=False)
subsegment = xray_recorder.begin_subsegment_without_sampling('unsampled')
assert segment.sampled == False
assert subsegment.sampled == False
def test_in_segment_closing():
xray_recorder = get_new_stubbed_recorder()
xray_recorder.configure(sampling=False)
with xray_recorder.in_segment('name') as segment:
assert segment.in_progress is True
segment.put_metadata('key1', 'value1')
segment.put_annotation('key2', 'value2')
with xray_recorder.in_subsegment('subsegment') as subsegment:
assert subsegment.in_progress is True
with xray_recorder.capture('capture') as subsegment:
assert subsegment.in_progress is True
assert subsegment.name == 'capture'
assert subsegment.in_progress is False
assert segment.in_progress is False
assert segment.annotations['key2'] == 'value2'
assert segment.metadata['default']['key1'] == 'value1'
def test_in_segment_exception():
xray_recorder = get_new_stubbed_recorder()
xray_recorder.configure(sampling=False)
with pytest.raises(Exception):
with xray_recorder.in_segment('name') as segment:
assert segment.in_progress is True
assert 'exceptions' not in segment.cause
raise Exception('test exception')
assert segment.in_progress is False
assert segment.fault is True
assert len(segment.cause['exceptions']) == 1
with pytest.raises(Exception):
with xray_recorder.in_segment('name') as segment:
with xray_recorder.in_subsegment('name') as subsegment:
assert subsegment.in_progress is True
raise Exception('test exception')
assert len(subsegment.cause['exceptions']) == 1
def test_default_enabled():
assert global_sdk_config.sdk_enabled()
segment = xray_recorder.begin_segment('name')
subsegment = xray_recorder.begin_subsegment('name')
assert type(xray_recorder.current_segment()) is Segment
assert type(xray_recorder.current_subsegment()) is Subsegment
def test_disable_is_dummy():
global_sdk_config.set_sdk_enabled(False)
segment = xray_recorder.begin_segment('name')
subsegment = xray_recorder.begin_subsegment('name')
assert type(xray_recorder.current_segment()) is DummySegment
assert type(xray_recorder.current_subsegment()) is DummySubsegment
def test_unsampled_subsegment_is_dummy():
assert global_sdk_config.sdk_enabled()
segment = xray_recorder.begin_segment('name')
subsegment = xray_recorder.begin_subsegment_without_sampling('name')
assert type(xray_recorder.current_subsegment()) is DummySubsegment
def test_subsegment_respects_parent_sampling_decision():
assert global_sdk_config.sdk_enabled()
segment = xray_recorder.begin_segment('name')
subsegment = xray_recorder.begin_subsegment_without_sampling('name2')
subsegment2 = xray_recorder.begin_subsegment('unsampled-subsegment')
assert type(xray_recorder.current_subsegment()) is DummySubsegment
assert subsegment.sampled == False
assert subsegment2.sampled == False
def test_disabled_empty_context_current_calls():
global_sdk_config.set_sdk_enabled(False)
assert type(xray_recorder.current_segment()) is DummySegment
assert type(xray_recorder.current_subsegment()) is DummySubsegment
def test_disabled_out_of_order_begins():
global_sdk_config.set_sdk_enabled(False)
xray_recorder.begin_subsegment("Test")
xray_recorder.begin_segment("Test")
xray_recorder.begin_subsegment("Test1")
xray_recorder.begin_subsegment("Test2")
assert type(xray_recorder.begin_subsegment("Test3")) is DummySubsegment
assert type(xray_recorder.begin_segment("Test4")) is DummySegment
def test_disabled_put_methods():
global_sdk_config.set_sdk_enabled(False)
xray_recorder.put_annotation("Test", "Value")
xray_recorder.put_metadata("Test", "Value", "Namespace")
# Test for random end segments/subsegments without any entities in context.
# Should not throw any exceptions
def test_disabled_ends():
global_sdk_config.set_sdk_enabled(False)
xray_recorder.end_segment()
xray_recorder.end_subsegment()
xray_recorder.end_segment()
xray_recorder.end_segment()
xray_recorder.end_subsegment()
xray_recorder.end_subsegment()
# Begin subsegment should not fail on its own.
def test_disabled_begin_subsegment():
global_sdk_config.set_sdk_enabled(False)
subsegment_entity = xray_recorder.begin_subsegment("Test")
assert type(subsegment_entity) is DummySubsegment
# When disabled, force sampling should still return dummy entities.
def test_disabled_force_sampling():
global_sdk_config.set_sdk_enabled(False)
xray_recorder.configure(sampling=True)
segment_entity = xray_recorder.begin_segment("Test1")
subsegment_entity = xray_recorder.begin_subsegment("Test2")
assert type(segment_entity) is DummySegment
assert type(subsegment_entity) is DummySubsegment
# When disabled, get_trace_entity should return DummySegment if an entity is not present in the context
def test_disabled_get_context_entity():
global_sdk_config.set_sdk_enabled(False)
entity = xray_recorder.get_trace_entity()
assert type(entity) is DummySegment
def test_max_stack_trace_zero():
xray_recorder.configure(max_trace_back=1)
with pytest.raises(Exception):
with xray_recorder.in_segment('name') as segment_with_stack:
assert segment_with_stack.in_progress is True
assert 'exceptions' not in segment_with_stack.cause.__dict__
raise Exception('Test Exception')
assert len(segment_with_stack.cause['exceptions']) == 1
xray_recorder.configure(max_trace_back=0)
with pytest.raises(Exception):
with xray_recorder.in_segment('name') as segment_no_stack:
assert segment_no_stack.in_progress is True
assert 'exceptions' not in segment_no_stack.cause.__dict__
raise Exception('Test Exception')
assert len(segment_no_stack.cause['exceptions']) == 1
assert len(segment_with_stack.cause['exceptions'][0].stack) == 1
assert len(segment_no_stack.cause['exceptions'][0].stack) == 0
# CustomSampler to mimic the DefaultSampler,
# but without the rule and target polling logic.
class CustomSampler(DefaultSampler):
def start(self):
pass
def should_trace(self, sampling_req=None):
rule_cache = RuleCache()
rule_cache.last_updated = int(time.time())
sampling_rule_a = SamplingRule(name='rule_a',
priority=2,
rate=0.5,
reservoir_size=1,
service='app_a')
sampling_rule_b = SamplingRule(name='rule_b',
priority=2,
rate=0.5,
reservoir_size=1,
service='app_b')
rule_cache.load_rules([sampling_rule_a, sampling_rule_b])
now = int(time.time())
if sampling_req and not sampling_req.get('service_type', None):
sampling_req['service_type'] = self._origin
elif sampling_req is None:
sampling_req = {'service_type': self._origin}
matched_rule = rule_cache.get_matched_rule(sampling_req, now)
if matched_rule:
return self._process_matched_rule(matched_rule, now)
else:
return self._local_sampler.should_trace(sampling_req)
def test_begin_segment_matches_sampling_rule_on_name():
xray_recorder.configure(sampling=True, sampler=CustomSampler())
segment = xray_recorder.begin_segment("app_b")
assert segment.aws.get('xray').get('sampling_rule_name') == 'rule_b'
================================================
FILE: tests/test_sampling_rule_cache.py
================================================
import time
import pytest
from aws_xray_sdk.core.sampling.sampling_rule import SamplingRule
from aws_xray_sdk.core.sampling.rule_cache import RuleCache
from aws_xray_sdk.core.sampling.reservoir import Reservoir
rule_0 = SamplingRule(name='a', priority=1, rate=0.1,
reservoir_size=1, host='*mydomain*',
method='GET', path='myop', service='random',
service_type='random')
rule_1 = SamplingRule(name='aa', priority=2, rate=0.1,
reservoir_size=1, host='*random*',
method='POST', path='random', service='proxy',
service_type='random')
rule_2 = SamplingRule(name='b', priority=2, rate=0.1,
reservoir_size=1, host='*', method='GET',
path='ping', service='myapp',
service_type='AWS::EC2::Instance')
rule_default = SamplingRule(name='Default', priority=1000, rate=0.1,
reservoir_size=1)
@pytest.fixture(autouse=True)
def reset_rules():
"""
Clean up context storage before and after each test run.
"""
rules = [rule_default, rule_2, rule_0, rule_1]
for rule in rules:
rule.snapshot_statistics()
rule.reservoir = Reservoir()
yield
def test_rules_sorting():
cache = RuleCache()
rules = [rule_default, rule_2, rule_0, rule_1]
cache.load_rules(rules)
sorted_rules = cache.rules
assert sorted_rules[0] == rule_0
assert sorted_rules[1] == rule_1
assert sorted_rules[2] == rule_2
assert sorted_rules[3] == rule_default
def test_evict_deleted_rules():
cache = RuleCache()
cache.load_rules([rule_default, rule_1, rule_0])
cache.load_rules([rule_default, rule_2])
assert len(cache.rules) == 2
assert rule_1 not in cache.rules
assert rule_0 not in cache.rules
def test_rule_matching():
cache = RuleCache()
now = int(time.time())
cache.load_rules([rule_default, rule_1, rule_2, rule_0])
cache.last_updated = now
sampling_req = {'host': 'mydomain.com'}
rule = cache.get_matched_rule(sampling_req, now)
assert rule.name == 'a'
sampling_req = {'method': 'POST'}
rule = cache.get_matched_rule(sampling_req, now)
assert rule.name == 'aa'
sampling_req = {'path': 'ping'}
rule = cache.get_matched_rule(sampling_req, now)
assert rule.name == 'b'
sampling_req = {'service': 'proxy'}
rule = cache.get_matched_rule(sampling_req, now)
assert rule.name == 'aa'
sampling_req = {'service_type': 'AWS::EC2::Instance'}
rule = cache.get_matched_rule(sampling_req, now)
assert rule.name == 'b'
# Default should be always returned when there is no match
sampling_req = {'host': 'unknown', 'path': 'unknown'}
rule = cache.get_matched_rule(sampling_req, now)
assert rule.is_default()
def test_preserving_sampling_statistics():
cache = RuleCache()
cache.load_rules([rule_default, rule_0])
rule_0.increment_request_count()
rule_0.increment_sampled_count()
rule_0.reservoir.load_quota(quota=3, TTL=15, interval=None)
new_rule_0 = SamplingRule(name='a', priority=1,
rate=0.1, reservoir_size=1)
cache.load_rules([rule_default, new_rule_0])
statistics = cache.rules[0].snapshot_statistics()
reservoir = cache.rules[0].reservoir
assert statistics['request_count'] == 1
assert statistics['sampled_count'] == 1
assert reservoir.quota == 3
assert reservoir.TTL == 15
def test_correct_target_mapping():
cache = RuleCache()
cache.load_rules([rule_default, rule_0])
targets = {
'a': {'quota': 3, 'TTL': None, 'interval': None, 'rate': 0.1},
'b': {'quota': 2, 'TTL': None, 'interval': None, 'rate': 0.1},
'Default': {'quota': 5, 'TTL': None, 'interval': None, 'rate': 0.1},
}
cache.load_targets(targets)
assert rule_0.reservoir.quota == 3
assert rule_default.reservoir.quota == 5
def test_expired_cache():
cache = RuleCache()
now = int(time.time())
cache.load_rules([rule_default, rule_1, rule_2, rule_0])
cache.last_updated = now - 60 * 60 * 24 # makes rule cache one day before
sampling_req = {'host': 'myhost.com', 'method': 'GET',
'path': 'operation', 'service': 'app'}
rule = cache.get_matched_rule(sampling_req, now)
assert rule is None
cache.last_updated = now
rule = cache.get_matched_rule(sampling_req, now)
assert rule.is_default()
================================================
FILE: tests/test_sdk_config.py
================================================
from aws_xray_sdk import global_sdk_config
import os
import pytest
XRAY_ENABLED_KEY = "AWS_XRAY_SDK_ENABLED"
@pytest.fixture(autouse=True)
def cleanup():
"""
Clean up Environmental Variable for enable before and after tests
"""
if XRAY_ENABLED_KEY in os.environ:
del os.environ[XRAY_ENABLED_KEY]
yield
if XRAY_ENABLED_KEY in os.environ:
del os.environ[XRAY_ENABLED_KEY]
global_sdk_config.set_sdk_enabled(True)
def test_enable_key():
assert global_sdk_config.XRAY_ENABLED_KEY == XRAY_ENABLED_KEY
def test_default_enabled():
assert global_sdk_config.sdk_enabled() is True
def test_env_var_precedence():
os.environ[XRAY_ENABLED_KEY] = "true"
# Env Variable takes precedence. This is called to activate the internal check
global_sdk_config.set_sdk_enabled(False)
assert global_sdk_config.sdk_enabled() is True
os.environ[XRAY_ENABLED_KEY] = "false"
global_sdk_config.set_sdk_enabled(False)
assert global_sdk_config.sdk_enabled() is False
os.environ[XRAY_ENABLED_KEY] = "false"
global_sdk_config.set_sdk_enabled(True)
assert global_sdk_config.sdk_enabled() is False
os.environ[XRAY_ENABLED_KEY] = "true"
global_sdk_config.set_sdk_enabled(True)
assert global_sdk_config.sdk_enabled() is True
os.environ[XRAY_ENABLED_KEY] = "true"
global_sdk_config.set_sdk_enabled(None)
assert global_sdk_config.sdk_enabled() is True
def test_env_enable_case():
os.environ[XRAY_ENABLED_KEY] = "TrUE"
# Env Variable takes precedence. This is called to activate the internal check
global_sdk_config.set_sdk_enabled(True)
assert global_sdk_config.sdk_enabled() is True
os.environ[XRAY_ENABLED_KEY] = "true"
global_sdk_config.set_sdk_enabled(True)
assert global_sdk_config.sdk_enabled() is True
os.environ[XRAY_ENABLED_KEY] = "1"
global_sdk_config.set_sdk_enabled(True)
assert global_sdk_config.sdk_enabled() is True
os.environ[XRAY_ENABLED_KEY] = "y"
global_sdk_config.set_sdk_enabled(True)
assert global_sdk_config.sdk_enabled() is True
os.environ[XRAY_ENABLED_KEY] = "t"
global_sdk_config.set_sdk_enabled(True)
assert global_sdk_config.sdk_enabled() is True
os.environ[XRAY_ENABLED_KEY] = "False"
global_sdk_config.set_sdk_enabled(True)
assert global_sdk_config.sdk_enabled() is False
os.environ[XRAY_ENABLED_KEY] = "falSE"
global_sdk_config.set_sdk_enabled(True)
assert global_sdk_config.sdk_enabled() is False
os.environ[XRAY_ENABLED_KEY] = "0"
global_sdk_config.set_sdk_enabled(True)
assert global_sdk_config.sdk_enabled() is False
def test_invalid_env_string():
os.environ[XRAY_ENABLED_KEY] = "INVALID"
# Env Variable takes precedence. This is called to activate the internal check
global_sdk_config.set_sdk_enabled(True)
assert global_sdk_config.sdk_enabled() is True
os.environ[XRAY_ENABLED_KEY] = "1.0"
global_sdk_config.set_sdk_enabled(True)
assert global_sdk_config.sdk_enabled() is True
os.environ[XRAY_ENABLED_KEY] = "1-.0"
global_sdk_config.set_sdk_enabled(False)
assert global_sdk_config.sdk_enabled() is True
os.environ[XRAY_ENABLED_KEY] = "T RUE"
global_sdk_config.set_sdk_enabled(True)
assert global_sdk_config.sdk_enabled() is True
================================================
FILE: tests/test_serialize_entities.py
================================================
import ast
import datetime
import json
import platform
import pytest
from aws_xray_sdk.version import VERSION
from aws_xray_sdk.core.models import http
from aws_xray_sdk.core.models.segment import Segment
from aws_xray_sdk.core.models.subsegment import Subsegment
from .util import entity_to_dict
def test_serialize_segment():
segment = Segment('test')
segment.close()
expected_segment_dict = {
"name": "test",
"start_time": segment.start_time,
"trace_id": segment.trace_id,
"end_time": segment.end_time,
"in_progress": False,
"id": segment.id
}
actual_segment_dict = entity_to_dict(segment)
assert expected_segment_dict == actual_segment_dict
def test_serialize_segment_with_aws():
segment = Segment('test')
XRAY_META = {
'xray': {
'sdk': 'X-Ray for Python',
'sdk_version': VERSION
}
}
segment.set_aws(XRAY_META)
segment.close()
expected_segment_dict = {
"name": "test",
"start_time": segment.start_time,
"trace_id": segment.trace_id,
"end_time": segment.end_time,
"in_progress": False,
"aws": {
"xray": {
"sdk": "X-Ray for Python",
"sdk_version": VERSION
}
},
"id": segment.id
}
actual_segment_dict = entity_to_dict(segment)
assert expected_segment_dict == actual_segment_dict
def test_serialize_segment_with_services():
segment = Segment('test')
SERVICE_INFO = {
'runtime': platform.python_implementation(),
'runtime_version': platform.python_version()
}
segment.set_service(SERVICE_INFO)
segment.close()
expected_segment_dict = {
"name": "test",
"start_time": segment.start_time,
"trace_id": segment.trace_id,
"end_time": segment.end_time,
"in_progress": False,
"service": {
"runtime": segment.service['runtime'],
"runtime_version": segment.service['runtime_version']
},
"id": segment.id
}
actual_segment_dict = entity_to_dict(segment)
assert expected_segment_dict == actual_segment_dict
def test_serialize_segment_with_annotation():
segment = Segment('test')
segment.put_annotation('key', 'value')
segment.close()
expected_segment_dict = {
"id": segment.id,
"name": "test",
"start_time": segment.start_time,
"in_progress": False,
"annotations": {
"key": "value"
},
"trace_id": segment.trace_id,
"end_time": segment.end_time
}
actual_segment_dict = entity_to_dict(segment)
assert expected_segment_dict == actual_segment_dict
def test_serialize_segment_with_metadata():
class TestMetadata():
def __init__(self, parameter_one, parameter_two):
self.parameter_one = parameter_one
self.parameter_two = parameter_two
self.parameter_three = {'test'} #set
self.parameter_four = {'a': [1, 2, 3], 'b': True, 'c': (1.1, 2.2), 'd': list} #dict
self.parameter_five = [TestSubMetadata(datetime.time(9, 25, 31)), TestSubMetadata(datetime.time(23, 14, 6))] #list
class TestSubMetadata():
def __init__(self, time):
self.time = time
segment = Segment('test')
segment.put_metadata('key_one', TestMetadata(1,2), 'namespace_one')
segment.put_metadata('key_two', TestMetadata(3,4), 'namespace_two')
segment.close()
expected_segment_dict = {
"id": segment.id,
"name": "test",
"start_time": segment.start_time,
"in_progress": False,
"metadata": {
"namespace_one": {
"key_one": {
"parameter_one": 1,
"parameter_two": 2,
"parameter_three": [
"test"
],
"parameter_four": {
"a": [
1,
2,
3
],
"b": True,
"c": [
1.1,
2.2
],
"d": str(list)
},
"parameter_five": [
{
"time": "09:25:31"
},
{
"time": "23:14:06"
}
]
}
},
"namespace_two": {
"key_two": {
"parameter_one": 3,
"parameter_two": 4,
"parameter_three": [
"test"
],
"parameter_four": {
"a": [
1,
2,
3
],
"b": True,
"c": [
1.1,
2.2
],
"d": str(list)
},
"parameter_five": [
{
"time": "09:25:31"
},
{
"time": "23:14:06"
}
]
}
}
},
"trace_id": segment.trace_id,
"end_time": segment.end_time
}
actual_segment_dict = entity_to_dict(segment)
assert expected_segment_dict == actual_segment_dict
def test_serialize_segment_with_http():
segment = Segment('test')
segment.put_http_meta(http.URL, 'https://aws.amazon.com')
segment.put_http_meta(http.METHOD, 'get')
segment.put_http_meta(http.USER_AGENT, 'test')
segment.put_http_meta(http.CLIENT_IP, '127.0.0.1')
segment.put_http_meta(http.X_FORWARDED_FOR, True)
segment.put_http_meta(http.STATUS, 200)
segment.put_http_meta(http.CONTENT_LENGTH, 0)
segment.close()
expected_segment_dict = {
"id": segment.id,
"name": "test",
"start_time": segment.start_time,
"in_progress": False,
"http": {
"request": {
"url": "https://aws.amazon.com",
"method": "get",
"user_agent": "test",
"client_ip": "127.0.0.1",
"x_forwarded_for": True
},
"response": {
"status": 200,
"content_length": 0
}
},
"trace_id": segment.trace_id,
"end_time": segment.end_time
}
actual_segment_dict = entity_to_dict(segment)
assert expected_segment_dict == actual_segment_dict
def test_serialize_segment_with_exception():
class TestException(Exception):
def __init__(self, message):
super().__init__(message)
segment_one = Segment('test')
stack_one = [
('/path/to/test.py', 10, 'module', 'another_function()'),
('/path/to/test.py', 3, 'another_function', 'wrong syntax')
]
stack_two = [
('/path/to/test.py', 11, 'module', 'another_function()'),
('/path/to/test.py', 4, 'another_function', 'wrong syntax')
]
exception_one = TestException('test message one')
exception_two = TestException('test message two')
segment_one.add_exception(exception_one, stack_one, True)
segment_one.add_exception(exception_two, stack_two, False)
segment_one.close()
expected_segment_one_dict = {
"id": segment_one.id,
"name": "test",
"start_time": segment_one.start_time,
"in_progress": False,
"cause": {
"working_directory": segment_one.cause['working_directory'],
"exceptions": [
{
"id": exception_one._cause_id,
"message": "test message one",
"type": "TestException",
"remote": True,
"stack": [
{
"path": "test.py",
"line": 10,
"label": "module"
},
{
"path": "test.py",
"line": 3,
"label": "another_function"
}
]
},
{
"id": exception_two._cause_id,
"message": "test message two",
"type": "TestException",
"remote": False,
"stack": [
{
"path": "test.py",
"line": 11,
"label": "module"
},
{
"path": "test.py",
"line": 4,
"label": "another_function"
}
]
}
]
},
"trace_id": segment_one.trace_id,
"fault": True,
"end_time": segment_one.end_time
}
segment_two = Segment('test')
subsegment = Subsegment('test', 'local', segment_two)
subsegment.add_exception(exception_one, stack_one, True)
subsegment.add_exception(exception_two, stack_two, False)
subsegment.close()
# will record cause id instead as same exception already recorded in its subsegment
segment_two.add_exception(exception_one, stack_one, True)
segment_two.close()
expected_segment_two_dict = {
"id": segment_two.id,
"name": "test",
"start_time": segment_two.start_time,
"in_progress": False,
"cause": exception_one._cause_id,
"trace_id": segment_two.trace_id,
"fault": True,
"end_time": segment_two.end_time
}
actual_segment_one_dict = entity_to_dict(segment_one)
actual_segment_two_dict = entity_to_dict(segment_two)
assert expected_segment_one_dict == actual_segment_one_dict
assert expected_segment_two_dict == actual_segment_two_dict
def test_serialize_subsegment():
segment = Segment('test')
subsegment = Subsegment('test', 'local', segment)
subsegment.close()
segment.close()
expected_subsegment_dict = {
"id": subsegment.id,
"name": "test",
"start_time": subsegment.start_time,
"in_progress": False,
"trace_id": subsegment.trace_id,
"type": "subsegment",
"namespace": "local",
"end_time": subsegment.end_time
}
actual_subsegment_dict = entity_to_dict(subsegment)
assert expected_subsegment_dict == actual_subsegment_dict
def test_serialize_subsegment_with_http():
segment = Segment('test')
subsegment = Subsegment('test', 'remote', segment)
subsegment.put_http_meta(http.URL, 'https://aws.amazon.com')
subsegment.put_http_meta(http.METHOD, 'get')
subsegment.put_http_meta(http.STATUS, 200)
subsegment.put_http_meta(http.CONTENT_LENGTH, 0)
subsegment.close()
segment.close()
expected_subsegment_dict = {
"id": subsegment.id,
"name": "test",
"start_time": subsegment.start_time,
"in_progress": False,
"http": {
"request": {
"url": "https://aws.amazon.com",
"method": "get"
},
"response": {
"status": 200,
"content_length": 0
}
},
"trace_id": subsegment.trace_id,
"type": "subsegment",
"namespace": "remote",
"end_time": subsegment.end_time
}
actual_subsegment_dict = entity_to_dict(subsegment)
assert expected_subsegment_dict == actual_subsegment_dict
def test_serialize_subsegment_with_sql():
segment = Segment('test')
subsegment = Subsegment('test', 'remote', segment)
sql = {
"url": "jdbc:postgresql://aawijb5u25wdoy.cpamxznpdoq8.us-west-2.rds.amazonaws.com:5432/ebdb",
"preparation": "statement",
"database_type": "PostgreSQL",
"database_version": "9.5.4",
"driver_version": "PostgreSQL 9.4.1211.jre7",
"user" : "dbuser",
"sanitized_query" : "SELECT * FROM customers WHERE customer_id=?;"
}
subsegment.set_sql(sql)
subsegment.close()
segment.close()
expected_subsegment_dict = {
"id": subsegment.id,
"name": "test",
"start_time": subsegment.start_time,
"in_progress": False,
"trace_id": subsegment.trace_id,
"type": "subsegment",
"namespace": "remote",
"sql": {
"url": "jdbc:postgresql://aawijb5u25wdoy.cpamxznpdoq8.us-west-2.rds.amazonaws.com:5432/ebdb",
"preparation": "statement",
"database_type": "PostgreSQL",
"database_version": "9.5.4",
"driver_version": "PostgreSQL 9.4.1211.jre7",
"user": "dbuser",
"sanitized_query": "SELECT * FROM customers WHERE customer_id=?;"
},
"end_time": subsegment.end_time
}
actual_subsegment_dict = entity_to_dict(subsegment)
assert expected_subsegment_dict == actual_subsegment_dict
def test_serialize_subsegment_with_aws():
segment = Segment('test')
subsegment = Subsegment('test', 'aws', segment)
aws = {
"bucket_name": "testbucket",
"region": "us-east-1",
"operation": "GetObject",
"request_id": "0000000000000000",
"key": "123",
"resource_names": [
"testbucket"
]
}
subsegment.set_aws(aws)
subsegment.close()
segment.close()
expected_subsegment_dict = {
"id": subsegment.id,
"name": "test",
"start_time": subsegment.start_time,
"in_progress": False,
"aws": {
"bucket_name": "testbucket",
"region": "us-east-1",
"operation": "GetObject",
"request_id": "0000000000000000",
"key": "123",
"resource_names": [
"testbucket"
]
},
"trace_id": subsegment.trace_id,
"type": "subsegment",
"namespace": "aws",
"end_time": subsegment.end_time
}
actual_subsegment_dict = entity_to_dict(subsegment)
assert expected_subsegment_dict == actual_subsegment_dict
def test_serialize_with_ast_metadata():
class_string = """\
class A:
def __init__(self, a):
self.a = a
"""
ast_obj = ast.parse(class_string)
segment = Segment('test')
segment.put_metadata('ast', ast_obj)
segment.close()
actual_segment_dict = entity_to_dict(segment)
assert 'ast' in actual_segment_dict['metadata']['default']
================================================
FILE: tests/test_sqs_message_helper.py
================================================
from aws_xray_sdk.core.utils.sqs_message_helper import SqsMessageHelper
import pytest
sampleSqsMessageEvent = {
"Records": [
{
"messageId": "059f36b4-87a3-44ab-83d2-661975830a7d",
"receiptHandle": "AQEBwJnKyrHigUMZj6rYigCgxlaS3SLy0a...",
"body": "Test message.",
"attributes": {
"ApproximateReceiveCount": "1",
"SentTimestamp": "1545082649183",
"SenderId": "AIDAIENQZJOLO23YVJ4VO",
"ApproximateFirstReceiveTimestamp": "1545082649185",
"AWSTraceHeader":"Root=1-632BB806-bd862e3fe1be46a994272793;Sampled=1"
},
"messageAttributes": {},
"md5OfBody": "e4e68fb7bd0e697a0ae8f1bb342846b3",
"eventSource": "aws:sqs",
"eventSourceARN": "arn:aws:sqs:us-east-2:123456789012:my-queue",
"awsRegion": "us-east-2"
},
{
"messageId": "2e1424d4-f796-459a-8184-9c92662be6da",
"receiptHandle": "AQEBzWwaftRI0KuVm4tP+/7q1rGgNqicHq...",
"body": "Test message.",
"attributes": {
"ApproximateReceiveCount": "1",
"SentTimestamp": "1545082650636",
"SenderId": "AIDAIENQZJOLO23YVJ4VO",
"ApproximateFirstReceiveTimestamp": "1545082650649",
"AWSTraceHeader":"Root=1-5759e988-bd862e3fe1be46a994272793;Parent=53995c3f42cd8ad8;Sampled=0"
},
"messageAttributes": {},
"md5OfBody": "e4e68fb7bd0e697a0ae8f1bb342846b3",
"eventSource": "aws:sqs",
"eventSourceARN": "arn:aws:sqs:us-east-2:123456789012:my-queue",
"awsRegion": "us-east-2"
},
{
"messageId": "2e1424d4-f796-459a-8184-9c92662be6da",
"receiptHandle": "AQEBzWwaftRI0KuVm4tP+/7q1rGgNqicHq...",
"body": "Test message.",
"attributes": {
"ApproximateReceiveCount": "1",
"SentTimestamp": "1545082650636",
"SenderId": "AIDAIENQZJOLO23YVJ4VO",
"ApproximateFirstReceiveTimestamp": "1545082650649",
"AWSTraceHeader":"Root=1-5759e988-bd862e3fe1be46a994272793;Parent=53995c3f42cd8ad8"
},
"messageAttributes": {},
"md5OfBody": "e4e68fb7bd0e697a0ae8f1bb342846b3",
"eventSource": "aws:sqs",
"eventSourceARN": "arn:aws:sqs:us-east-2:123456789012:my-queue",
"awsRegion": "us-east-2"
}
]
}
def test_return_true_when_sampling_1():
assert SqsMessageHelper.isSampled(sampleSqsMessageEvent['Records'][0]) == True
def test_return_false_when_sampling_0():
assert SqsMessageHelper.isSampled(sampleSqsMessageEvent['Records'][1]) == False
def test_return_false_with_no_sampling_flag():
assert SqsMessageHelper.isSampled(sampleSqsMessageEvent['Records'][2]) == False
================================================
FILE: tests/test_throwable.py
================================================
from aws_xray_sdk.core.models.throwable import Throwable
def test_message_and_type():
e = TypeError('msg')
throwable = Throwable(e, None, True)
assert throwable.message == 'msg'
assert throwable.type == type(e).__name__
assert throwable.remote
def test_stack_trace_parsing():
# sample output using `traceback.extract_stack()`
stack = [
('/path/to/test.py', 10, 'module', 'another_function()'),
('/path/to/test.py', 3, 'another_function', 'wrong syntax'),
]
throwable = Throwable(TypeError(), stack)
entry1 = throwable.stack[0]
assert entry1['path'] == 'test.py'
assert entry1['line'] == 10
assert entry1['label'] == 'module'
entry2 = throwable.stack[1]
assert entry2['path'] == 'test.py'
assert entry2['line'] == 3
assert entry2['label'] == 'another_function'
================================================
FILE: tests/test_trace_entities.py
================================================
# -*- coding: iso-8859-15 -*-
import pytest
from aws_xray_sdk.core.models.segment import Segment
from aws_xray_sdk.core.models.subsegment import Subsegment
from aws_xray_sdk.core.models import http
from aws_xray_sdk.core.exceptions.exceptions import SegmentNameMissingException
from aws_xray_sdk.core.exceptions.exceptions import SegmentNotFoundException
from aws_xray_sdk.core.exceptions.exceptions import AlreadyEndedException
from .util import entity_to_dict
from .util import get_new_stubbed_recorder
xray_recorder = get_new_stubbed_recorder()
def test_unicode_entity_name():
name1 = u'福'
name2 = u'セツナ'
segment = Segment(name1)
subsegment = Subsegment(name2, 'local', segment)
assert segment.name == name1
assert subsegment.name == name2
def test_segment_user():
segment = Segment('seg')
segment.set_user('whoami')
doc = entity_to_dict(segment)
assert doc['user'] == 'whoami'
def test_put_http_meta():
segment = Segment('seg')
segment.put_http_meta(http.URL, 'my url')
segment.put_http_meta(http.STATUS, 200)
# unsupported key should be dropped
segment.put_http_meta('somekey', 'somevalue')
doc = entity_to_dict(segment)
assert doc['http']['request'][http.URL] == 'my url'
assert doc['http']['response'][http.STATUS] == 200
assert 'somekey' not in doc
def test_put_metadata():
segment = Segment('seg')
meta = {
'key1': 'value1',
'key2': 'value2',
}
segment.put_metadata('key', meta)
subsegment = Subsegment('sub', 'local', segment)
segment.add_subsegment(subsegment)
subsegment.put_metadata('key', meta, 'my namespace')
doc = entity_to_dict(segment)
assert doc['metadata']['default']['key'] == meta
sub_doc = doc['subsegments'][0]
assert sub_doc['metadata']['my namespace']['key'] == meta
def test_put_annotation():
segment = Segment('seg')
invalid = {
'key1': 'value1',
'key2': 'value2',
}
# invalid annotation key-value pair should be dropped
segment.put_annotation('valid_key', invalid)
segment.put_annotation('invalid-key', 'validvalue')
segment.put_annotation('number', 1)
subsegment = Subsegment('sub', 'local', segment)
segment.add_subsegment(subsegment)
subsegment.put_annotation('bool', False)
doc = entity_to_dict(segment)
assert doc['annotations']['number'] == 1
assert 'invalid-value' not in doc['annotations']
assert 'invalid-key' not in doc['annotations']
sub_doc = doc['subsegments'][0]
assert not sub_doc['annotations']['bool']
def test_reference_counting():
segment = Segment('seg')
subsegment = Subsegment('sub', 'local', segment)
segment.add_subsegment(subsegment)
subsegment = Subsegment('sub', 'local', segment)
subsubsegment = Subsegment('subsub', 'local', segment)
subsegment.add_subsegment(subsubsegment)
assert not segment.ready_to_send()
assert segment.ref_counter.get_current() == 2
subsubsegment.close()
assert not segment.ready_to_send()
assert segment.ref_counter.get_current() == 1
subsegment.close()
assert not segment.ready_to_send()
assert segment.ref_counter.get_current() == 0
segment.close()
assert segment.ready_to_send()
assert segment.get_total_subsegments_size() == 2
def test_flags_on_status_code():
segment1 = Segment('seg')
segment1.apply_status_code(429)
assert segment1.throttle
assert segment1.error
segment2 = Segment('seg')
segment2.apply_status_code(503)
assert segment2.fault
segment3 = Segment('seg')
segment3.apply_status_code(403)
assert segment3.error
def test_mutate_closed_entity():
segment = Segment('seg')
segment.close()
with pytest.raises(AlreadyEndedException):
segment.put_annotation('key', 'value')
with pytest.raises(AlreadyEndedException):
segment.put_metadata('key', 'value')
with pytest.raises(AlreadyEndedException):
segment.put_http_meta('url', 'my url')
with pytest.raises(AlreadyEndedException):
segment.close()
def test_no_rule_name_pollution():
segment1 = Segment('seg1')
segment2 = Segment('seg2')
segment1.set_rule_name('rule1')
segment2.set_rule_name('rule2')
assert segment1.aws['xray']['sampling_rule_name'] == 'rule1'
assert segment2.aws['xray']['sampling_rule_name'] == 'rule2'
def test_no_empty_properties():
segment = Segment('seg')
segment.close()
doc = entity_to_dict(segment)
assert 'http' not in doc
assert 'aws' not in doc
assert 'metadata' not in doc
assert 'annotations' not in doc
assert 'subsegments' not in doc
assert 'cause' not in doc
def test_required_properties():
segment = Segment('seg')
segment.close()
doc = entity_to_dict(segment)
assert 'trace_id' in doc
assert 'id' in doc
assert 'start_time' in doc
assert 'end_time' in doc
def test_missing_segment_name():
with pytest.raises(SegmentNameMissingException):
Segment(None)
def test_missing_parent_segment():
with pytest.raises(SegmentNotFoundException):
Subsegment('name', 'local', None)
def test_add_exception():
segment = Segment('seg')
exception = Exception("testException")
stack = [['path', 'line', 'label']]
segment.add_exception(exception=exception, stack=stack)
segment.close()
cause = segment.cause
assert 'exceptions' in cause
exceptions = cause['exceptions']
assert len(exceptions) == 1
assert 'working_directory' in cause
exception = exceptions[0]
assert 'testException' == exception.message
expected_stack = [{'path': 'path', 'line': 'line', 'label': 'label'}]
assert expected_stack == exception.stack
def test_add_exception_referencing():
segment = Segment('seg')
subseg = Subsegment('subseg', 'remote', segment)
exception = Exception("testException")
stack = [['path', 'line', 'label']]
subseg.add_exception(exception=exception, stack=stack)
segment.add_exception(exception=exception, stack=stack)
subseg.close()
segment.close()
seg_cause = segment.cause
subseg_cause = subseg.cause
assert isinstance(subseg_cause, dict)
assert isinstance(seg_cause, str)
assert seg_cause == subseg_cause['exceptions'][0].id
def test_add_exception_cause_resetting():
segment = Segment('seg')
subseg = Subsegment('subseg', 'remote', segment)
exception = Exception("testException")
stack = [['path', 'line', 'label']]
subseg.add_exception(exception=exception, stack=stack)
segment.add_exception(exception=exception, stack=stack)
segment.add_exception(exception=Exception("newException"), stack=stack)
subseg.close()
segment.close()
seg_cause = segment.cause
assert isinstance(seg_cause, dict)
assert 'newException' == seg_cause['exceptions'][0].message
def test_add_exception_appending_exceptions():
segment = Segment('seg')
stack = [['path', 'line', 'label']]
segment.add_exception(exception=Exception("testException"), stack=stack)
segment.add_exception(exception=Exception("newException"), stack=stack)
segment.close()
assert isinstance(segment.cause, dict)
assert len(segment.cause['exceptions']) == 2
def test_adding_subsegments_with_recorder():
xray_recorder.configure(sampling=False)
xray_recorder.clear_trace_entities()
segment = xray_recorder.begin_segment('parent');
subsegment = xray_recorder.begin_subsegment('sampled-child')
unsampled_subsegment = xray_recorder.begin_subsegment_without_sampling('unsampled-child1')
unsampled_child_subsegment = xray_recorder.begin_subsegment('unsampled-child2')
assert segment.sampled == True
assert subsegment.sampled == True
assert unsampled_subsegment.sampled == False
assert unsampled_child_subsegment.sampled == False
xray_recorder.clear_trace_entities()
================================================
FILE: tests/test_trace_header.py
================================================
from aws_xray_sdk.core.models.trace_header import TraceHeader
TRACE_ID = '1-5759e988-bd862e3fe1be46a994272793'
PARENT_ID = '53995c3f42cd8ad8'
def test_no_sample():
header = TraceHeader(root=TRACE_ID, parent=PARENT_ID)
assert header.sampled is None
assert header.root == TRACE_ID
assert header.parent == PARENT_ID
assert header.to_header_str() == 'Root=%s;Parent=%s' % (TRACE_ID, PARENT_ID)
def test_no_parent():
header = TraceHeader(root=TRACE_ID, sampled=1)
assert header.parent is None
assert header.to_header_str() == 'Root=%s;Sampled=1' % TRACE_ID
def test_from_str():
# a full header string that has all fields present
header_str1 = 'Root=%s;Parent=%s;Sampled=1' % (TRACE_ID, PARENT_ID)
header1 = TraceHeader.from_header_str(header_str1)
assert header1.root == TRACE_ID
assert header1.parent == PARENT_ID
assert header1.sampled == 1
# missing parent id
header_str2 = 'Root=%s;Sampled=?' % TRACE_ID
header2 = TraceHeader.from_header_str(header_str2)
assert header2.root == TRACE_ID
assert header2.parent is None
assert header2.sampled == '?'
# missing sampled
header_str3 = 'Root=%s;Parent=%s' % (TRACE_ID, PARENT_ID)
header3 = TraceHeader.from_header_str(header_str3)
assert header3.root == TRACE_ID
assert header3.parent == PARENT_ID
assert header3.sampled is None
def test_arbitrary_fields():
origin_header_str = 'Root=%s;k1=v1;k2=v2' % TRACE_ID
header = TraceHeader.from_header_str(origin_header_str)
header_str = header.to_header_str()
assert 'k1=v1' in header_str
assert 'k2=v2' in header_str
def test_invalid_str():
header = TraceHeader.from_header_str('some invalid string')
assert header.root is None
assert header.parent is None
assert header.sampled is None
================================================
FILE: tests/test_traceid.py
================================================
import os
import pytest
from aws_xray_sdk.core import xray_recorder
from aws_xray_sdk.core.models.traceid import TraceId
@pytest.fixture(autouse=True)
def cleanup():
"""
Clean up Environmental Variable for enable before and after tests
"""
if 'AWS_XRAY_NOOP_ID' in os.environ:
del os.environ['AWS_XRAY_NOOP_ID']
yield
if 'AWS_XRAY_NOOP_ID' in os.environ:
del os.environ['AWS_XRAY_NOOP_ID']
def test_id_format():
trace_id = TraceId().to_id()
assert len(trace_id) == 35
parts = trace_id.split(TraceId.DELIMITER)
assert parts[0] == '1'
int(parts[1], 16)
int(parts[2], 16)
def test_id_generation_default_sampling_false():
segment = xray_recorder.begin_segment('segment_name', sampling=False)
# Start and end a subsegment
subsegment = xray_recorder.begin_subsegment('subsegment_name')
xray_recorder.end_subsegment()
# Close the segment
xray_recorder.end_segment()
assert segment.id == '0000000000000000'
assert segment.trace_id == '1-00000000-000000000000000000000000'
assert subsegment.id == '0000000000000000'
assert subsegment.trace_id == '1-00000000-000000000000000000000000'
assert subsegment.parent_id == '0000000000000000'
def test_id_generation_default_sampling_true():
segment = xray_recorder.begin_segment('segment_name', sampling=True)
# Start and end a subsegment
subsegment = xray_recorder.begin_subsegment('subsegment_name')
xray_recorder.end_subsegment()
# Close the segment
xray_recorder.end_segment()
assert segment.id != '0000000000000000'
assert segment.trace_id != '1-00000000-000000000000000000000000'
assert subsegment.id != '0000000000000000'
assert subsegment.trace_id != '1-00000000-000000000000000000000000'
assert subsegment.parent_id != '0000000000000000'
def test_id_generation_noop_true():
os.environ['AWS_XRAY_NOOP_ID'] = 'True'
segment = xray_recorder.begin_segment('segment_name', sampling=False)
# Start and end a subsegment
subsegment = xray_recorder.begin_subsegment('subsegment_name')
xray_recorder.end_subsegment()
# Close the segment
xray_recorder.end_segment()
assert segment.id == '0000000000000000'
assert segment.trace_id == '1-00000000-000000000000000000000000'
assert subsegment.id == '0000000000000000'
assert subsegment.trace_id == '1-00000000-000000000000000000000000'
assert subsegment.parent_id == '0000000000000000'
def test_id_generation_noop_false():
os.environ['AWS_XRAY_NOOP_ID'] = 'FALSE'
segment = xray_recorder.begin_segment('segment_name', sampling=False)
# Start and end a subsegment
subsegment = xray_recorder.begin_subsegment('subsegment_name')
xray_recorder.end_subsegment()
# Close the segment
xray_recorder.end_segment()
assert segment.id != '0000000000000000'
assert segment.trace_id != '1-00000000-000000000000000000000000'
assert subsegment.id != '0000000000000000'
assert subsegment.trace_id != '1-00000000-000000000000000000000000'
assert subsegment.parent_id != '0000000000000000'
================================================
FILE: tests/test_utils.py
================================================
from aws_xray_sdk.ext.util import to_snake_case, get_hostname, strip_url, inject_trace_header
from aws_xray_sdk.core.models.segment import Segment
from aws_xray_sdk.core.models.subsegment import Subsegment
from aws_xray_sdk.core.models.dummy_entities import DummySegment, DummySubsegment
from .util import get_new_stubbed_recorder
xray_recorder = get_new_stubbed_recorder()
UNKNOWN_HOST = "UNKNOWN HOST"
def test_to_snake_case():
s1 = to_snake_case('Bucket')
assert s1 == 'bucket'
s2 = to_snake_case('TableName')
assert s2 == 'table_name'
s3 = to_snake_case('ACLName')
assert s3 == 'acl_name'
s4 = to_snake_case('getHTTPResponse')
assert s4 == 'get_http_response'
def test_get_hostname():
s1 = get_hostname("https://amazon.com/")
assert s1 == "amazon.com"
s2 = get_hostname("https://amazon.com/avery_long/path/and/stuff")
assert s2 == "amazon.com"
s3 = get_hostname("http://aws.amazon.com/should_get/sub/domains")
assert s3 == "aws.amazon.com"
s4 = get_hostname("https://amazon.com/somestuff?get=request&data=chiem")
assert s4 == "amazon.com"
s5 = get_hostname("INVALID_URL")
assert s5 == UNKNOWN_HOST
s6 = get_hostname("")
assert s6 == UNKNOWN_HOST
s7 = get_hostname(None)
assert s7 == UNKNOWN_HOST
def test_strip_url():
s1 = strip_url("https://amazon.com/page?getdata=response&stuff=morestuff")
assert s1 == "https://amazon.com/page"
s2 = strip_url("aws.google.com/index.html?field=data&suchcool=data")
assert s2 == "aws.google.com/index.html"
s3 = strip_url("INVALID_URL")
assert s3 == "INVALID_URL"
assert strip_url("") == ""
assert not strip_url(None)
def test_inject_trace_header_unsampled():
headers = {'host': 'test', 'accept': '*/*', 'connection': 'keep-alive', 'X-Amzn-Trace-Id': 'Root=1-6369739a-7d8bb07e519b795eb24d382d;Parent=089e3de743fb9e79;Sampled=1'}
xray_recorder = get_new_stubbed_recorder()
xray_recorder.configure(sampling=True)
segment = xray_recorder.begin_segment('name', sampling=True)
subsegment = xray_recorder.begin_subsegment_without_sampling('unsampled')
inject_trace_header(headers, subsegment)
assert 'Sampled=0' in headers['X-Amzn-Trace-Id']
def test_inject_trace_header_respects_parent_subsegment():
headers = {'host': 'test', 'accept': '*/*', 'connection': 'keep-alive', 'X-Amzn-Trace-Id': 'Root=1-6369739a-7d8bb07e519b795eb24d382d;Parent=089e3de743fb9e79;Sampled=1'}
xray_recorder = get_new_stubbed_recorder()
xray_recorder.configure(sampling=True)
segment = xray_recorder.begin_segment('name', sampling=True)
subsegment = xray_recorder.begin_subsegment_without_sampling('unsampled')
subsegment2 = xray_recorder.begin_subsegment('unsampled2')
inject_trace_header(headers, subsegment2)
assert 'Sampled=0' in headers['X-Amzn-Trace-Id']
def test_inject_trace_header_sampled():
headers = {'host': 'test', 'accept': '*/*', 'connection': 'keep-alive', 'X-Amzn-Trace-Id': 'Root=1-6369739a-7d8bb07e519b795eb24d382d;Parent=089e3de743fb9e79;Sampled=1'}
xray_recorder = get_new_stubbed_recorder()
xray_recorder.configure(sampling=True)
segment = xray_recorder.begin_segment('name')
subsegment = xray_recorder.begin_subsegment('unsampled')
inject_trace_header(headers, subsegment)
assert 'Sampled=1' in headers['X-Amzn-Trace-Id']
================================================
FILE: tests/test_wildcard_match.py
================================================
from aws_xray_sdk.core.utils.search_pattern import wildcard_match
def test_match_exact_positive():
pat = 'foo'
bar = 'foo'
assert wildcard_match(pat, bar)
def test_match_exact_negative():
pat = 'foo'
bar = 'cat'
assert not wildcard_match(pat, bar)
def test_single_wildcard_positive():
pat = 'fo?'
bar = 'foo'
assert wildcard_match(pat, bar)
def test_single_wildcard_negative():
pat = 'f?o'
bar = 'boo'
assert not wildcard_match(pat, bar)
def test_multiple_wildcard_positive():
pat = '?o?'
bar = 'foo'
assert wildcard_match(pat, bar)
def test_multiple_wildcard_negative():
pat = 'f??'
bar = 'boo'
assert not wildcard_match(pat, bar)
def test_glob_positive_zero_or_more():
pat = 'foo*'
bar = 'foo'
assert wildcard_match(pat, bar)
def test_glob_negative_zero_or_more():
pat = 'foo*'
bar = 'fo0'
assert not wildcard_match(pat, bar)
def test_glob_negative():
pat = 'fo*'
bar = 'boo'
assert not wildcard_match(pat, bar)
def test_glob_and_single_positive():
pat = '*o?'
bar = 'foo'
assert wildcard_match(pat, bar)
def test_glob_and_single_negative():
pat = 'f?*'
bar = 'boo'
assert not wildcard_match(pat, bar)
def test_pure_wildcard():
pat = '*'
bar = 'foo'
assert wildcard_match(pat, bar)
def test_exact_match():
pat = '6573459'
bar = '6573459'
assert wildcard_match(pat, bar)
def test_misc():
animal1 = '?at'
animal2 = '?o?se'
animal3 = '*s'
vehicle1 = 'J*'
vehicle2 = '????'
assert wildcard_match(animal1, 'bat')
assert wildcard_match(animal1, 'cat')
assert wildcard_match(animal2, 'horse')
assert wildcard_match(animal2, 'mouse')
assert wildcard_match(animal3, 'dogs')
assert wildcard_match(animal3, 'horses')
assert wildcard_match(vehicle1, 'Jeep')
assert wildcard_match(vehicle2, 'ford')
assert not wildcard_match(vehicle2, 'chevy')
assert wildcard_match('*', 'cAr')
assert wildcard_match('*/foo', '/bar/foo')
def test_case_insensitivity():
assert wildcard_match('Foo', 'Foo', False)
assert wildcard_match('Foo', 'Foo', True)
assert not wildcard_match('Foo', 'FOO', False)
assert wildcard_match('Foo', 'FOO', True)
assert wildcard_match('Fo*', 'Foo0', False)
assert wildcard_match('Fo*', 'Foo0', True)
assert not wildcard_match('Fo*', 'FOo0', False)
assert wildcard_match('Fo*', 'FOo0', True)
assert wildcard_match('Fo?', 'Foo', False)
assert wildcard_match('Fo?', 'Foo', True)
assert not wildcard_match('Fo?', 'FOo', False)
assert wildcard_match('Fo?', 'FoO', False)
assert wildcard_match('Fo?', 'FOO', True)
def test_no_globs():
assert not wildcard_match('abcd', 'abc')
def test_edge_case_globs():
assert wildcard_match('', '')
assert wildcard_match('a', 'a')
assert wildcard_match('*a', 'a')
assert wildcard_match('*a', 'ba')
assert wildcard_match('a*', 'a')
assert wildcard_match('a*', 'ab')
assert wildcard_match('a*a', 'aa')
assert wildcard_match('a*a', 'aba')
assert wildcard_match('a*a', 'aaa')
assert wildcard_match('a*a*', 'aa')
assert wildcard_match('a*a*', 'aba')
assert wildcard_match('a*a*', 'aaa')
assert wildcard_match('a*a*', 'aaaaaaaaaaaaaaaaaaaaaaaaaa')
assert wildcard_match('a*b*a*b*a*b*a*b*a*',
'akljd9gsdfbkjhaabajkhbbyiaahkjbjhbuykjakjhabkjhbabjhkaabbabbaaakljdfsjklababkjbsdabab')
assert not wildcard_match('a*na*ha', 'anananahahanahana')
def test_multi_globs():
assert wildcard_match('*a', 'a')
assert wildcard_match('**a', 'a')
assert wildcard_match('***a', 'a')
assert wildcard_match('**a*', 'a')
assert wildcard_match('**a**', 'a')
assert wildcard_match('a**b', 'ab')
assert wildcard_match('a**b', 'abb')
assert wildcard_match('*?', 'a')
assert wildcard_match('*?', 'aa')
assert wildcard_match('*??', 'aa')
assert not wildcard_match('*???', 'aa')
assert wildcard_match('*?', 'aaa')
assert wildcard_match('?', 'a')
assert not wildcard_match('??', 'a')
assert wildcard_match('?*', 'a')
assert wildcard_match('*?', 'a')
assert not wildcard_match('?*?', 'a')
assert wildcard_match('?*?', 'aa')
assert wildcard_match('*?*', 'a')
assert not wildcard_match('*?*a', 'a')
assert wildcard_match('*?*a*', 'ba')
================================================
FILE: tests/util.py
================================================
import json
import threading
from aws_xray_sdk.core.recorder import AWSXRayRecorder
from aws_xray_sdk.core.emitters.udp_emitter import UDPEmitter
from aws_xray_sdk.core.sampling.sampler import DefaultSampler
from aws_xray_sdk.core.utils.conversion import metadata_to_dict
class CircularReferenceClass:
"""Test class that can create circular references"""
def __init__(self, name):
self.name = name
self.ref = None
class StubbedEmitter(UDPEmitter):
def __init__(self, daemon_address='127.0.0.1:2000'):
super().__init__(daemon_address)
self._local = threading.local()
def send_entity(self, entity):
setattr(self._local, 'cache', entity)
def pop(self):
if hasattr(self._local, 'cache'):
entity = self._local.cache
else:
entity = None
self._local.__dict__.clear()
return entity
class StubbedSampler(DefaultSampler):
def start(self):
pass
def get_new_stubbed_recorder():
"""
Returns a new AWSXRayRecorder object with emitter stubbed
"""
from aws_xray_sdk.core.async_recorder import AsyncAWSXRayRecorder
recorder = AsyncAWSXRayRecorder()
recorder.configure(emitter=StubbedEmitter(), sampler=StubbedSampler())
return recorder
def entity_to_dict(trace_entity):
raw = json.loads(trace_entity.serialize())
return raw
def _search_entity(entity, name):
"""Helper function to that recursivly looks at subentities
Returns a serialized entity that matches the name given or None"""
if 'name' in entity:
my_name = entity['name']
if my_name == name:
return entity
else:
if "subsegments" in entity:
for s in entity['subsegments']:
result = _search_entity(s, name)
if result is not None:
return result
return None
def find_subsegment(segment, name):
"""Helper function to find a subsegment by name in the entity tree"""
segment = entity_to_dict(segment)
for entity in segment['subsegments']:
result = _search_entity(entity, name)
if result is not None:
return result
return None
def find_subsegment_by_annotation(segment, key, value):
"""Helper function to find a subsegment by annoation key & value in the entity tree"""
segment = entity_to_dict(segment)
for entity in segment['subsegments']:
result = _search_entity_by_annotation(entity, key, value)
if result is not None:
return result
return None
def _search_entity_by_annotation(entity, key, value):
"""Helper function to that recursivly looks at subentities
Returns a serialized entity that matches the annoation key & value given or None"""
if 'annotations' in entity:
if key in entity['annotations']:
my_value = entity['annotations'][key]
if my_value == value:
return entity
else:
if "subsegments" in entity:
for s in entity['subsegments']:
result = _search_entity_by_annotation(s, key, value)
if result is not None:
return result
return None
def test_metadata_to_dict_self_reference():
"""Test that self-referencing objects don't cause stack overflow"""
obj = CircularReferenceClass("self_ref")
obj.ref = obj # Self reference
# This should not cause stack overflow
result = metadata_to_dict(obj)
# The function should handle the self reference gracefully
assert isinstance(result, dict)
================================================
FILE: tox-distributioncheck.ini
================================================
[tox]
skipsdist = true
[testenv:distribution-check]
deps =
pytest > 5.2.0
aws-xray-sdk
commands =
pytest tests/distributioncheck
================================================
FILE: tox.ini
================================================
[tox]
skip_missing_interpreters = True
envlist =
py{37,38,39,310,311,312}-core
py{37,38,39,310,311,312}-ext-aiobotocore
py{37,38,39,310,311,312}-ext-aiohttp
py{37,38,39,310,311,312}-ext-botocore
py{37,38,39,310,311,312}-ext-bottle
py{37,38,39}-ext-django-2
py{37,38,39,310}-ext-django-3
; Django4 is only for python 3.8+
py{38,39,310,311,312}-ext-django-4
py{37,38,39,310,311,312}-ext-flask
py{37,38,39,310,311,312}-ext-flask_sqlalchemy
py{37,38,39,310,311,312}-ext-httplib
py{37,38,39,310,311,312}-ext-httpx
py{37,38,39,310,311,312}-ext-pg8000
py{37,38,39,310,311,312}-ext-psycopg2
py{37,38,39,310,311}-ext-psycopg
py{37,38,39,310,311,312}-ext-pymysql
py{37,38,39,310,311,312}-ext-pynamodb
py{37,38,39,310,311,312}-ext-requests
py{37,38,39,310,311,312}-ext-sqlalchemy
py{37,38,39,310,311,312}-ext-sqlalchemy_core
py{37,38,39,310,311,312}-ext-sqlite3
[testenv]
passenv = TOXENV,CI,CODECOV_*
deps =
; Testing packages
pytest > 3.0.0, < 8.0.0
pytest-benchmark
coverage == 7.2.7
codecov
; Packages common to all test environments
wrapt
; Python 3.5+ only deps
py{37,38,39,310,311,312}: pytest-asyncio == 0.21.2
; For pkg_resources
py{37,38,39,310,311,312}: setuptools
ext-aiobotocore: aiobotocore >= 0.10.0
ext-aiobotocore: pytest-asyncio
ext-aiohttp: aiohttp >= 3.3.0
ext-aiohttp: pytest-aiohttp < 1.1.0
ext-httpx: httpx >= 0.20
ext-httpx: pytest-asyncio >= 0.19
ext-requests: requests
ext-bottle: bottle >= 0.10
ext-bottle: webtest
ext-flask: flask >= 0.10
ext-flask_sqlalchemy: flask >= 0.10,<3.0.0
ext-flask_sqlalchemy: Flask-SQLAlchemy <= 2.5.1
ext-flask_sqlalchemy: sqlalchemy >=1.0.0,<2.0.0
ext-sqlalchemy: sqlalchemy >=1.0.0,<2.0.0
ext-sqlalchemy_core: sqlalchemy >=1.0.0,<2.0.0
ext-sqlalchemy_core: testing.postgresql
ext-sqlalchemy_core: psycopg2
ext-sqlalchemy_core: pymysql >= 1.0.0
ext-sqlalchemy_core: cryptography
ext-django-2: Django >=2.0,<3.0
ext-django-3: Django >=3.0,<4.0
ext-django-4: Django >=4.0,<5.0
ext-django: django-fake-model
py{37,38,39,310,311,312}-ext-pynamodb: pynamodb >=3.3.1,<6.0.0
ext-psycopg2: psycopg2
ext-psycopg2: testing.postgresql
ext-psycopg: psycopg
ext-psycopg: psycopg[pool]
ext-psycopg: testing.postgresql
ext-pg8000: pg8000 <= 1.20.0
ext-pg8000: testing.postgresql
py{37,38,39,310,311,312}-ext-pymysql: pymysql >= 1.0.0
py{37,38,39,310,311,312}-ext-pymysql: cryptography
setenv =
DJANGO_SETTINGS_MODULE = tests.ext.django.app.settings
AWS_SECRET_ACCESS_KEY = fake_key
AWS_ACCESS_KEY_ID=fake_id
commands =
coverage erase
py{37,38,39,310,311,312}-core: coverage run --append --source aws_xray_sdk -m pytest --ignore tests/ext {posargs}
ext-aiobotocore: coverage run --append --source aws_xray_sdk -m pytest tests/ext/aiobotocore {posargs}
ext-aiohttp: coverage run --append --source aws_xray_sdk -m pytest tests/ext/aiohttp {posargs}
ext-botocore: coverage run --append --source aws_xray_sdk -m pytest tests/ext/botocore {posargs}
ext-bottle: coverage run --append --source aws_xray_sdk -m pytest tests/ext/bottle {posargs}
ext-django: coverage run --append --source aws_xray_sdk -m pytest tests/ext/django {posargs}
ext-flask: coverage run --append --source aws_xray_sdk -m pytest tests/ext/flask {posargs}
ext-flask_sqlalchemy: coverage run --append --source aws_xray_sdk -m pytest tests/ext/flask_sqlalchemy {posargs}
ext-httplib: coverage run --append --source aws_xray_sdk -m pytest tests/ext/httplib {posargs}
ext-httpx: coverage run --append --source aws_xray_sdk -m pytest tests/ext/httpx {posargs}
ext-pg8000: coverage run --append --source aws_xray_sdk -m pytest tests/ext/pg8000 {posargs}
ext-psycopg2: coverage run --append --source aws_xray_sdk -m pytest tests/ext/psycopg2 {posargs}
ext-psycopg: coverage run --append --source aws_xray_sdk -m pytest tests/ext/psycopg {posargs}
ext-pymysql: coverage run --append --source aws_xray_sdk -m pytest tests/ext/pymysql {posargs}
ext-pynamodb: coverage run --append --source aws_xray_sdk -m pytest tests/ext/pynamodb {posargs}
ext-requests: coverage run --append --source aws_xray_sdk -m pytest tests/ext/requests {posargs}
ext-sqlalchemy: coverage run --append --source aws_xray_sdk -m pytest tests/ext/sqlalchemy {posargs}
py{37,38,39,310,311,312}-ext-sqlalchemy_core: coverage run --append --source aws_xray_sdk -m pytest tests/ext/sqlalchemy_core {posargs}
ext-sqlite3: coverage run --append --source aws_xray_sdk -m pytest tests/ext/sqlite3 {posargs}
; TODO: add additional logic to combine coverage from "core" and "ext" test runs
; codecov