[
  {
    "path": ".coveragerc",
    "content": "# .coveragerc to control coverage.py\n\n[report]\n# Regexes for lines to exclude from consideration\nexclude_also =\n    # Don't complain if non-runnable code isn't run:\n    if __name__ == .__main__.\n    def main\n    def get_arg_parser\n"
  },
  {
    "path": ".github/dependabot.yml",
    "content": "version: 2\nupdates:\n  - package-ecosystem: \"github-actions\"\n    directory: \"/\"\n    schedule:\n      interval: monthly\n    groups:\n      actions:\n        patterns:\n          - \"*\"\n    cooldown:\n      default-days: 7\n\n  - package-ecosystem: pip\n    directory: \"/\"\n    schedule:\n      interval: monthly\n    assignees:\n      - \"ezio-melotti\"\n    groups:\n      pip:\n        patterns:\n          - \"*\"\n    cooldown:\n      default-days: 7\n"
  },
  {
    "path": ".github/workflows/build-release.yml",
    "content": "on:\n  push:\n    paths-ignore:\n      - \".github/dependabot.yml\"\n      - \".github/workflows/lint.yml\"\n      - \".github/workflows/test.yml\"\n      - \".pre-commit-config.yaml\"\n      - \".ruff.toml\"\n      - \"README.md\"\n      - \"tests/**\"\n  pull_request:\n    paths-ignore:\n      - \".github/dependabot.yml\"\n      - \".github/workflows/lint.yml\"\n      - \".github/workflows/test.yml\"\n      - \".pre-commit-config.yaml\"\n      - \".ruff.toml\"\n      - \"README.md\"\n      - \"tests/**\"\n  workflow_dispatch:\n    inputs:\n      git_remote:\n        type: choice\n        description: \"Git remote to checkout\"\n        options:\n          - python\n          - savannahostrowski\n          - hugovk\n          - Yhg1s\n          - pablogsal\n          - ambv\n      git_commit:\n        type: string\n        description: \"Git commit to target for the release. Must use the full commit SHA, not the short ID\"\n      cpython_release:\n        type: string\n        description: \"CPython release number (ie '3.11.5', note without the 'v' prefix)\"\n\nname: \"Build release artifacts\"\n\npermissions: {}\n\n# Set from inputs for workflow_dispatch, or set defaults to test push/PR events\nenv:\n  GIT_REMOTE: ${{ github.event.inputs.git_remote || 'python' }}\n  GIT_COMMIT: ${{ github.event.inputs.git_commit || '55ea59e7dc35e1363b203ae4dd9cfc3a0ac0a844' }}\n  CPYTHON_RELEASE: ${{ github.event.inputs.cpython_release || '3.15.0a8' }}\n\njobs:\n  verify-input:\n    runs-on: ubuntu-24.04\n    timeout-minutes: 5\n    outputs:\n      build-docs: ${{ steps.select-jobs.outputs.docs }}\n      build-android: ${{ steps.select-jobs.outputs.android }}\n      build-ios: ${{ steps.select-jobs.outputs.ios }}\n    steps:\n      - name: \"Workflow run information\"\n        run: |\n          echo \"git_remote: $GIT_REMOTE\"\n          echo \"git_commit: $GIT_COMMIT\"\n          echo \"cpython_release: $CPYTHON_RELEASE\"\n\n          {\n            echo \"| Variable        | Value                |\"\n            echo \"| --------------- | -------------------- |\"\n            echo \"| git_remote      | \\`$GIT_REMOTE\\`      |\"\n            echo \"| git_commit      | \\`$GIT_COMMIT\\`      |\"\n            echo \"| cpython_release | \\`$CPYTHON_RELEASE\\` |\"\n          } >> \"$GITHUB_STEP_SUMMARY\"\n\n      - name: \"Checkout python/release-tools\"\n        uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2\n        with:\n          persist-credentials: false\n\n      - name: \"Checkout ${{ env.GIT_REMOTE }}/cpython\"\n        uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2\n        with:\n          persist-credentials: false\n          repository: \"${{ env.GIT_REMOTE }}/cpython\"\n          ref: \"v${{ env.CPYTHON_RELEASE }}\"\n          path: \"cpython\"\n\n      - name: \"Verify CPython commit matches tag\"\n        run: |\n          if [[ \"$GIT_COMMIT\" != \"$(cd cpython && git rev-parse HEAD)\" ]]; then\n            echo \"expected git commit ('$GIT_COMMIT') didn't match tagged commit ('$(git rev-parse HEAD)')\"\n            exit 1\n          fi\n\n      - name: \"Setup Python\"\n        uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0\n        with:\n          python-version: 3.12\n\n      - name: \"Select jobs\"\n        id: select-jobs\n        run: |\n          test_flag=\"\"\n          if [[ \"${{ github.event_name }}\" != \"workflow_dispatch\" ]]; then\n            test_flag=\"--test\"\n          fi\n          output=$(./select_jobs.py $test_flag \"$CPYTHON_RELEASE\")\n          echo \"$output\" | tee -a \"$GITHUB_OUTPUT\"\n\n          {\n            echo \"| Job     | Enabled |\"\n            echo \"| ------- | ------- |\"\n            echo \"$output\" | while IFS='=' read -r key value; do\n              echo \"| $key | $value |\"\n            done\n          } >> \"$GITHUB_STEP_SUMMARY\"\n\n  build-source:\n    runs-on: ubuntu-24.04\n    timeout-minutes: 15\n    needs:\n      - verify-input\n    steps:\n      - name: \"Checkout python/release-tools\"\n        uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2\n        with:\n          persist-credentials: false\n\n      - name: \"Checkout ${{ env.GIT_REMOTE }}/cpython\"\n        uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2\n        with:\n          persist-credentials: false\n          repository: \"${{ env.GIT_REMOTE }}/cpython\"\n          ref: \"v${{ env.CPYTHON_RELEASE }}\"\n          path: \"cpython\"\n\n      - name: \"Setup Python\"\n        uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0\n        with:\n          python-version: 3.12\n\n      - name: \"Install source dependencies\"\n        run: |\n          python -m pip install --no-deps \\\n            -r requirements.txt\n\n      - name: \"Build Python release artifacts\"\n        run: |\n          cd cpython\n          python ../release.py --export \"$CPYTHON_RELEASE\" --skip-docs\n\n      - name: \"Upload the source artifacts\"\n        uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1\n        with:\n          name: source\n          path: |\n            cpython/${{ env.CPYTHON_RELEASE }}/src\n\n  build-docs:\n    runs-on: ubuntu-24.04\n    timeout-minutes: 45\n    needs:\n      - verify-input\n    if: fromJSON(needs.verify-input.outputs.build-docs)\n    steps:\n      - name: \"Checkout ${{ env.GIT_REMOTE }}/cpython\"\n        uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2\n        with:\n          persist-credentials: false\n          repository: \"${{ env.GIT_REMOTE }}/cpython\"\n          ref: \"v${{ env.CPYTHON_RELEASE }}\"\n\n      - name: \"Setup Python\"\n        uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0\n        with:\n          python-version: 3.12\n\n      - name: \"Install docs dependencies\"\n        run: |\n          python -m pip install -r Doc/requirements.txt\n\n      - name: \"Build docs\"\n        env:\n          SPHINXOPTS: \"-j10\"\n        run: |\n          cd Doc\n          make dist-epub\n          make dist-html\n          make dist-texinfo\n          make dist-text\n\n      - name: \"Upload the docs artifacts\"\n        uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1\n        with:\n          name: docs\n          path: |\n            Doc/dist/\n\n  test-source:\n    runs-on: ubuntu-24.04\n    timeout-minutes: 60\n    needs:\n      - build-source\n    steps:\n      - name: \"Download the source artifacts\"\n        uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1\n        with:\n          name: source\n\n      - name: \"Test Python source tarballs\"\n        run: |\n          mkdir -p ./tmp/installation/\n          cp \"Python-$CPYTHON_RELEASE.tgz\" ./tmp/\n          cd tmp/\n          tar xvf \"Python-$CPYTHON_RELEASE.tgz\"\n          cd \"Python-$CPYTHON_RELEASE\"\n\n          ./configure \"--prefix=$(realpath '../installation/')\"\n          make -j\n          make install -j\n\n          cd ../installation\n          ./bin/python3 -m test -uall -j4\n\n  test-docs:\n    runs-on: ubuntu-24.04\n    timeout-minutes: 15\n    needs:\n      - build-docs\n    steps:\n      - name: \"Download the docs artifacts\"\n        uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1\n        with:\n          name: docs\n\n      - name: \"Set up Python\"\n        uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0\n        with:\n          python-version: \"3.x\"\n\n      - name: \"Install epubcheck\"\n        run: python -m pip install epubcheck\n\n      - name: \"Run epubcheck\"\n        continue-on-error: true\n        run: |\n          ls -la\n          epubcheck \"python-$CPYTHON_RELEASE-docs.epub\" &> epubcheck.txt\n\n      - name: \"Show epubcheck output\"\n        run: cat epubcheck.txt\n\n      - name: \"Check for fatal errors in EPUB\"\n        run: |\n          if grep -q \"^FATAL\" epubcheck.txt; then\n            echo \"Fatal errors found in EPUB:\"\n            grep \"^FATAL\" epubcheck.txt\n            exit 1\n          fi\n          echo \"No fatal errors found in EPUB\"\n\n  build-android:\n    name: build-android (${{ matrix.arch }})\n    needs:\n      - verify-input\n    if: fromJSON(needs.verify-input.outputs.build-android)\n\n    strategy:\n      matrix:\n        include:\n          - arch: aarch64\n            runs-on: macos-15\n          - arch: x86_64\n            runs-on: ubuntu-24.04\n\n    runs-on: ${{ matrix.runs-on }}\n    timeout-minutes: 60\n    env:\n      triplet: ${{ matrix.arch }}-linux-android\n    steps:\n      - name: \"Checkout ${{ env.GIT_REMOTE }}/cpython\"\n        uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2\n        with:\n          persist-credentials: false\n          repository: \"${{ env.GIT_REMOTE }}/cpython\"\n          ref: \"v${{ env.CPYTHON_RELEASE }}\"\n\n      # Python 3.15 moved the build tools to the Platforms directory. Add a\n      # compatibility shim to simplify execution. Can be removed when 3.14\n      # reaches EOL\n      - name: Set up compatibility symlink\n        run: |\n          if [ ! -e Platforms/Android ]; then\n            mkdir -p Platforms\n            ln -s ../Android Platforms/Android\n            ln -s ./android.py Android/__main__.py\n          fi\n\n      - name: Build and test\n        run: python3 Platforms/Android ci --fast-ci \"$triplet\"\n\n      - uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1\n        with:\n          name: ${{ env.triplet }}\n          path: cross-build/${{ env.triplet }}/dist/*\n          if-no-files-found: error\n\n  build-ios:\n    name: build-iOS\n    needs:\n      - verify-input\n    if: fromJSON(needs.verify-input.outputs.build-ios)\n    runs-on: macos-14\n    timeout-minutes: 60\n    steps:\n      - name: \"Checkout ${{ env.GIT_REMOTE }}/cpython\"\n        uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2\n        with:\n          persist-credentials: false\n          repository: \"${{ env.GIT_REMOTE }}/cpython\"\n          ref: \"v${{ env.CPYTHON_RELEASE }}\"\n\n      - name: Build and test\n        run: python3 Platforms/Apple ci iOS --slow-ci\n\n      - uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1\n        with:\n          name: ios\n          path: cross-build/dist/*\n          if-no-files-found: error\n"
  },
  {
    "path": ".github/workflows/lint.yml",
    "content": "name: Lint\n\non: [push, pull_request, workflow_dispatch]\n\npermissions: {}\n\nenv:\n  FORCE_COLOR: 1\n  RUFF_OUTPUT_FORMAT: github\n\njobs:\n  lint:\n    runs-on: ubuntu-latest\n\n    steps:\n      - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2\n        with:\n          persist-credentials: false\n      - uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0\n        with:\n          python-version: \"3.x\"\n          cache: pip\n      - uses: j178/prek-action@cbc2f23eb5539cf20d82d1aabd0d0ecbcc56f4e3 # v2.0.2\n\n      - name: Install dependencies\n        run: |\n          python3 -m pip install -U pip\n          python3 -m pip install -U tox\n\n      - name: Mypy\n        run: tox -e mypy\n\n      - name: Run PSScriptAnalyzer on PowerShell scripts\n        shell: pwsh\n        run: |\n          Invoke-ScriptAnalyzer -Path . -Recurse -Severity ParseError,Error -EnableExit\n"
  },
  {
    "path": ".github/workflows/test.yml",
    "content": "name: Test\n\non: [push, pull_request, workflow_dispatch]\n\npermissions: {}\n\nenv:\n  FORCE_COLOR: 1\n\njobs:\n  tests:\n    name: \"Tests\"\n    runs-on: ${{ matrix.os }}\n    strategy:\n      fail-fast: false\n      matrix:\n        python-version: [\"3.12\", \"3.13\", \"3.14\"]\n        os: [macos-latest, ubuntu-latest]\n    steps:\n      - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2\n        with:\n          persist-credentials: false\n      - uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0\n        with:\n          python-version: ${{ matrix.python-version }}\n          cache: pip\n          cache-dependency-path: dev-requirements.txt\n      - run: |\n          python -m pip install tox\n      - run: |\n          tox -e py\n\n      - name: Upload coverage\n        uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v6.0.0\n        with:\n          token: ${{ secrets.CODECOV_ORG_TOKEN }}\n"
  },
  {
    "path": ".gitignore",
    "content": "#\n.idea/\n# Byte-compiled / optimized / DLL files\n__pycache__/\n*.py[cod]\n*$py.class\n\n# C extensions\n*.so\n\n# Distribution / packaging\n.Python\nbuild/\ndevelop-eggs/\ndist/\ndownloads/\neggs/\n.eggs/\nlib64/\nparts/\nsdist/\nvar/\nwheels/\nshare/python-wheels/\n*.egg-info/\n.installed.cfg\n*.egg\nMANIFEST\n\n# PyInstaller\n#  Usually these files are written by a python script from a template\n#  before PyInstaller builds the exe, so as to inject date/other infos into it.\n*.manifest\n*.spec\n\n# Installer logs\npip-log.txt\npip-delete-this-directory.txt\n\n# Unit test / coverage reports\nhtmlcov/\n.tox/\n.nox/\n.coverage\n.coverage.*\n.cache\nnosetests.xml\ncoverage.xml\n*.cover\n*.py,cover\n.hypothesis/\n.pytest_cache/\ncover/\n\n# Translations\n*.mo\n*.pot\n\n# Django stuff:\n*.log\nlocal_settings.py\ndb.sqlite3\ndb.sqlite3-journal\n\n# Flask stuff:\ninstance/\n.webassets-cache\n\n# Scrapy stuff:\n.scrapy\n\n# Sphinx documentation\ndocs/_build/\n\n# PyBuilder\n.pybuilder/\ntarget/\n\n# Jupyter Notebook\n.ipynb_checkpoints\n\n# IPython\nprofile_default/\nipython_config.py\n\n# pyenv\n#   For a library or package, you might want to ignore these files since the code is\n#   intended to run in multiple environments; otherwise, check them in:\n# .python-version\n\n# pipenv\n#   According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.\n#   However, in case of collaboration, if having platform-specific dependencies or dependencies\n#   having no cross-platform support, pipenv may install dependencies that don't work, or not\n#   install all needed dependencies.\n#Pipfile.lock\n\n# PEP 582; used by e.g. github.com/David-OConnor/pyflow\n__pypackages__/\n\n# Celery stuff\ncelerybeat-schedule\ncelerybeat.pid\n\n# SageMath parsed files\n*.sage.py\n\n# Environments\n.env\n.venv\nenv/\nvenv/\nENV/\nenv.bak/\nvenv.bak/\n\n# Spyder project settings\n.spyderproject\n.spyproject\n\n# Rope project settings\n.ropeproject\n\n# mkdocs documentation\n/site\n\n# mypy\n.mypy_cache/\n.dmypy.json\ndmypy.json\n\n# Pyre type checker\n.pyre/\n\n# pytype static type analyzer\n.pytype/\n\n# Cython debug symbols\ncython_debug/\n"
  },
  {
    "path": ".pre-commit-config.yaml",
    "content": "repos:\n  - repo: https://github.com/astral-sh/ruff-pre-commit\n    rev: v0.14.10\n    hooks:\n      - id: ruff-check\n        args: [--exit-non-zero-on-fix]\n\n  - repo: https://github.com/psf/black-pre-commit-mirror\n    rev: 25.12.0\n    hooks:\n      - id: black\n\n  - repo: https://github.com/pre-commit/pre-commit-hooks\n    rev: v6.0.0\n    hooks:\n      - id: check-added-large-files\n      - id: check-case-conflict\n      - id: check-merge-conflict\n      - id: check-toml\n      - id: check-yaml\n        exclude: windows-release/(azure-pipelines|msi-steps).yml\n      - id: debug-statements\n      - id: end-of-file-fixer\n      - id: forbid-submodules\n      - id: trailing-whitespace\n\n  - repo: https://github.com/python-jsonschema/check-jsonschema\n    rev: 0.36.0\n    hooks:\n      - id: check-dependabot\n      - id: check-github-workflows\n\n  - repo: https://github.com/rhysd/actionlint\n    rev: v1.7.10\n    hooks:\n      - id: actionlint\n\n  - repo: https://github.com/woodruffw/zizmor-pre-commit\n    rev: v1.19.0\n    hooks:\n      - id: zizmor\n\n  - repo: https://github.com/tox-dev/pyproject-fmt\n    rev: v2.11.1\n    hooks:\n      - id: pyproject-fmt\n\n  - repo: https://github.com/abravalheri/validate-pyproject\n    rev: v0.24.1\n    hooks:\n      - id: validate-pyproject\n\n  - repo: https://github.com/tox-dev/tox-ini-fmt\n    rev: 1.7.1\n    hooks:\n      - id: tox-ini-fmt\n\n  - repo: meta\n    hooks:\n      - id: check-hooks-apply\n      - id: check-useless-excludes\n"
  },
  {
    "path": ".ruff.toml",
    "content": "fix = true\n\n[lint]\nselect = [\n  \"C4\",     # flake8-comprehensions\n  \"E\",      # pycodestyle errors\n  \"F\",      # pyflakes errors\n  \"I\",      # isort\n  \"ISC\",    # flake8-implicit-str-concat\n  \"LOG\",    # flake8-logging\n  \"PGH\",    # pygrep-hooks\n  \"RUF100\", # unused noqa (yesqa)\n  \"UP\",     # pyupgrade\n  \"W\",      # pycodestyle warnings\n  \"YTT\",    # flake8-2020\n]\nignore = [\n  \"E203\",   # Whitespace before ':'\n  \"E221\",   # Multiple spaces before operator\n  \"E226\",   # Missing whitespace around arithmetic operator\n  \"E241\",   # Multiple spaces after ','\n  \"E501\",   # Line too long\n]\n"
  },
  {
    "path": "LICENSE.txt",
    "content": "PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2\n--------------------------------------------\n\n1. This LICENSE AGREEMENT is between the Python Software Foundation\n(\"PSF\"), and the Individual or Organization (\"Licensee\") accessing and\notherwise using this software (\"Python\") in source or binary form and\nits associated documentation.\n\n2. Subject to the terms and conditions of this License Agreement, PSF hereby\ngrants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,\nanalyze, test, perform and/or display publicly, prepare derivative works,\ndistribute, and otherwise use Python alone or in any derivative version,\nprovided, however, that PSF's License Agreement and PSF's notice of copyright,\ni.e., \"Copyright (c) 2008 Python Software Foundation; All Rights Reserved\"\nare retained in Python alone or in any derivative version prepared by Licensee.\n\n3. In the event Licensee prepares a derivative work that is based on\nor incorporates Python or any part thereof, and wants to make\nthe derivative work available to others as provided herein, then\nLicensee hereby agrees to include in any such work a brief summary of\nthe changes made to Python.\n\n4. PSF is making Python available to Licensee on an \"AS IS\"\nbasis.  PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR\nIMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND\nDISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS\nFOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT\nINFRINGE ANY THIRD PARTY RIGHTS.\n\n5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON\nFOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS\nA RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,\nOR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.\n\n6. This License Agreement will automatically terminate upon a material\nbreach of its terms and conditions.\n\n7. Nothing in this License Agreement shall be deemed to create any\nrelationship of agency, partnership, or joint venture between PSF and\nLicensee.  This License Agreement does not grant permission to use PSF\ntrademarks or trade name in a trademark sense to endorse or promote\nproducts or services of Licensee, or any third party.\n\n8. By copying, installing or otherwise using Python, Licensee\nagrees to be bound by the terms and conditions of this License\nAgreement.\n"
  },
  {
    "path": "README.md",
    "content": "# release-tools\n\n[![.github/workflows/test.yml](https://github.com/python/release-tools/actions/workflows/test.yml/badge.svg)](https://github.com/python/release-tools/actions/workflows/test.yml)\n\nScripts for making (C)Python releases.\n"
  },
  {
    "path": "add_to_pydotorg.py",
    "content": "#!/usr/bin/env python\n\"\"\"\nScript to add ReleaseFile objects for Python releases on the new pydotorg.\nTo use (RELEASE is the full Python version number):\n\n* Copy this script to dl-files (it needs access to all the release files).\n  You could also download all files, then you need to use the \"--ftp-root\"\n  argument.\n\n* Make sure all download files are in place in the correct FTP subdirectory.\n\n* Create a new Release object via the Django admin (adding via API is\n  currently broken), the name MUST be \"Python RELEASE\".\n\n* Put an AUTH_INFO variable containing \"username:api_key\" in your environment.\n\n* Call this script as \"python add_to_pydotorg.py RELEASE\".\n\n  Each call will remove all previous file objects, so you can call the script\n  multiple times.\n\nGeorg Brandl, March 2014.\n\"\"\"\n\nimport argparse\nimport hashlib\nimport json\nimport os\nimport re\nimport subprocess\nimport sys\nfrom collections.abc import Generator\nfrom os import path\nfrom typing import Any, NoReturn\n\nimport requests\n\n\n# Copied from release.py\ndef error(*msgs: Any) -> NoReturn:\n    print(\"**ERROR**\", file=sys.stderr)\n    for msg in msgs:\n        print(msg, file=sys.stderr)\n    sys.exit(1)\n\n\n# Copied from release.py\ndef run_cmd(\n    cmd: list[str] | str, silent: bool = False, shell: bool = False, **kwargs: Any\n) -> None:\n    if shell:\n        cmd = \" \".join(cmd)\n    if not silent:\n        print(f\"Executing {cmd}\")\n    try:\n        if silent:\n            subprocess.check_call(cmd, shell=shell, stdout=subprocess.PIPE, **kwargs)\n        else:\n            subprocess.check_call(cmd, shell=shell, **kwargs)\n    except subprocess.CalledProcessError:\n        error(f\"{cmd} failed\")\n\n\ntry:\n    auth_info = os.environ[\"AUTH_INFO\"]\nexcept KeyError:\n    print(\n        \"Please set an environment variable named AUTH_INFO \"\n        'containing \"username:api_key\".'\n    )\n    sys.exit()\n\ndownload_root = \"https://www.python.org/ftp/python/\"\n\ntag_cre = re.compile(r\"(\\d+)(?:\\.(\\d+)(?:\\.(\\d+))?)?(?:([ab]|rc)(\\d+))?$\")\n\nheaders = {\"Authorization\": f\"ApiKey {auth_info}\", \"Content-Type\": \"application/json\"}\n\ngithub_oidc_provider = \"https://github.com/login/oauth\"\ngoogle_oidc_provider = \"https://accounts.google.com\"\n\n# Update this list when new release managers are added.\nrelease_to_sigstore_identity_and_oidc_issuer = {\n    \"3.10\": (\"pablogsal@python.org\", google_oidc_provider),\n    \"3.11\": (\"pablogsal@python.org\", google_oidc_provider),\n    \"3.12\": (\"thomas@python.org\", google_oidc_provider),\n    \"3.13\": (\"thomas@python.org\", google_oidc_provider),\n    \"3.14\": (\"hugo@python.org\", github_oidc_provider),\n    \"3.15\": (\"hugo@python.org\", github_oidc_provider),\n    \"3.16\": (\"savannah@python.org\", github_oidc_provider),\n    \"3.17\": (\"savannah@python.org\", github_oidc_provider),\n}\n\n\ndef macos_description(version: tuple[int, int, int]) -> str:\n    if version >= (3, 14):\n        return \"for macOS 10.15 and later\"\n    else:\n        return \"for macOS 10.13 and later\"\n\n\ndef get_file_descriptions(\n    release: str,\n) -> list[tuple[re.Pattern[str], tuple[str, str, bool, str]]]:\n    v = base_version_tuple(release)\n    rx = re.compile\n    # value is (file \"name\", OS slug, download button, file \"description\").\n    # OS=None means no ReleaseFile object. Only one matching *file* (not regex)\n    # per OS can have download=True.\n    return [\n        (rx(r\"\\.tgz$\"), (\"Gzipped source tarball\", \"source\", False, \"\")),\n        (rx(r\"\\.tar\\.xz$\"), (\"XZ compressed source tarball\", \"source\", True, \"\")),\n        (\n            rx(r\"windows-.+\\.json\"),\n            (\n                \"Windows release manifest\",\n                \"windows\",\n                False,\n                f\"Install with 'py install {v[0]}.{v[1]}'\",\n            ),\n        ),\n        (\n            rx(r\"-embed-amd64\\.zip$\"),\n            (\"Windows embeddable package (64-bit)\", \"windows\", False, \"\"),\n        ),\n        (\n            rx(r\"-embed-arm64\\.zip$\"),\n            (\"Windows embeddable package (ARM64)\", \"windows\", False, \"\"),\n        ),\n        (\n            rx(r\"-arm64\\.exe$\"),\n            (\"Windows installer (ARM64)\", \"windows\", False, \"Experimental\"),\n        ),\n        (\n            rx(r\"-amd64\\.exe$\"),\n            (\"Windows installer (64-bit)\", \"windows\", True, \"Recommended\"),\n        ),\n        (\n            rx(r\"-embed-win32\\.zip$\"),\n            (\"Windows embeddable package (32-bit)\", \"windows\", False, \"\"),\n        ),\n        (rx(r\"\\.exe$\"), (\"Windows installer (32-bit)\", \"windows\", False, \"\")),\n        (\n            rx(r\"-macos(x)?1[1-9](\\.[0-9]*)?\\.pkg$\"),\n            (\n                \"macOS installer\",\n                \"macos\",\n                True,\n                macos_description(v),\n            ),\n        ),\n        (\n            rx(r\"-aarch64-linux-android.tar.gz$\"),\n            (\"Android embeddable package (aarch64)\", \"android\", False, \"\"),\n        ),\n        (\n            rx(r\"-x86_64-linux-android.tar.gz$\"),\n            (\"Android embeddable package (x86_64)\", \"android\", False, \"\"),\n        ),\n        (\n            rx(r\"-iOS-XCframework.tar.gz$\"),\n            (\"iOS XCframework\", \"ios\", False, \"\"),\n        ),\n    ]\n\n\ndef slug_for(release: str) -> str:\n    return base_version(release).replace(\".\", \"\") + (\n        \"-\" + release[len(base_version(release)) :]\n        if release[len(base_version(release)) :]\n        else \"\"\n    )\n\n\ndef sigfile_for(release: str, rfile: str) -> str:\n    return download_root + f\"{release}/{rfile}.asc\"\n\n\ndef sha256sum_for(filename: str) -> str:\n    \"\"\"Returns SHA-256 checksum for filename.\"\"\"\n    return hashlib.sha256(open(filename, \"rb\").read()).hexdigest()\n\n\ndef filesize_for(filename: str) -> int:\n    return path.getsize(filename)\n\n\ndef make_slug(text: str) -> str:\n    return re.sub(\"[^a-zA-Z0-9_-]\", \"\", text.replace(\" \", \"-\"))\n\n\ndef base_version(release: str) -> str:\n    m = tag_cre.match(release)\n    assert m is not None, f\"Invalid release: {release}\"\n    return \".\".join(m.groups()[:3])\n\n\ndef base_version_tuple(release: str) -> tuple[int, int, int]:\n    m = tag_cre.match(release)\n    assert m is not None, f\"Invalid release: {release}\"\n    return int(m.groups()[0]), int(m.groups()[1]), int(m.groups()[2])\n\n\ndef minor_version(release: str) -> str:\n    m = tag_cre.match(release)\n    assert m is not None, f\"Invalid release: {release}\"\n    return \".\".join(m.groups()[:2])\n\n\ndef minor_version_tuple(release: str) -> tuple[int, int]:\n    m = tag_cre.match(release)\n    assert m is not None, f\"Invalid release: {release}\"\n    return int(m.groups()[0]), int(m.groups()[1])\n\n\ndef build_file_dict(\n    ftp_root: str,\n    release: str,\n    rfile: str,\n    rel_pk: int,\n    file_desc: str,\n    os_pk: int,\n    add_download: bool,\n    add_desc: str,\n) -> dict[str, Any]:\n    \"\"\"Return a dictionary with all needed fields for a ReleaseFile object.\"\"\"\n    filename = path.join(ftp_root, base_version(release), rfile)\n    d = {\n        \"name\": file_desc,\n        \"slug\": slug_for(release) + \"-\" + make_slug(file_desc)[:40],\n        \"os\": f\"/api/v1/downloads/os/{os_pk}/\",\n        \"release\": f\"/api/v1/downloads/release/{rel_pk}/\",\n        \"description\": add_desc,\n        \"is_source\": os_pk == 3,\n        \"url\": download_root + f\"{base_version(release)}/{rfile}\",\n        \"sha256_sum\": sha256sum_for(filename),\n        \"filesize\": filesize_for(filename),\n        \"download_button\": add_download,\n    }\n    # Upload GPG signature\n    if os.path.exists(filename + \".asc\"):\n        d[\"gpg_signature_file\"] = sigfile_for(base_version(release), rfile)\n    # Upload Sigstore signature\n    if os.path.exists(filename + \".sig\"):\n        d[\"sigstore_signature_file\"] = (\n            download_root + f\"{base_version(release)}/{rfile}.sig\"\n        )\n    # Upload Sigstore certificate\n    if os.path.exists(filename + \".crt\"):\n        d[\"sigstore_cert_file\"] = download_root + f\"{base_version(release)}/{rfile}.crt\"\n    # Upload Sigstore bundle\n    if os.path.exists(filename + \".sigstore\"):\n        d[\"sigstore_bundle_file\"] = (\n            download_root + f\"{base_version(release)}/{rfile}.sigstore\"\n        )\n    # Upload SPDX SBOM file\n    if os.path.exists(filename + \".spdx.json\"):\n        d[\"sbom_spdx2_file\"] = (\n            download_root + f\"{base_version(release)}/{rfile}.spdx.json\"\n        )\n\n    return d\n\n\ndef list_files(\n    ftp_root: str, release: str\n) -> Generator[tuple[str, str, str, bool, str], None, None]:\n    \"\"\"List all of the release's download files.\"\"\"\n    reldir = base_version(release)\n    for rfile in sorted(os.listdir(path.join(ftp_root, reldir))):\n        if not path.isfile(path.join(ftp_root, reldir, rfile)):\n            continue\n\n        if rfile.endswith((\".asc\", \".sig\", \".crt\", \".sigstore\", \".spdx.json\")):\n            continue\n\n        prefix, _, rest = rfile.partition(\"-\")\n\n        if prefix.lower() not in (\"python\", \"windows\"):\n            print(f\"    File {reldir}/{rfile} has wrong prefix\")\n            continue\n\n        if not rest.startswith((release + \"-\", release + \".\")):\n            print(f\"    File {reldir}/{rfile} has a different version\")\n            continue\n\n        for rx, info in get_file_descriptions(release):\n            if rx.search(rfile):\n                yield (rfile, *info)\n                break\n        else:\n            print(f\"    File {reldir}/{rfile} not recognized\")\n            continue\n\n\ndef query_object(base_url: str, objtype: str, **params: Any) -> int:\n    \"\"\"Find an API object by query parameters.\"\"\"\n    uri = base_url + f\"downloads/{objtype}/\"\n    uri += \"?\" + \"&\".join(f\"{k}={v}\" for k, v in params.items())\n    resp = requests.get(uri, headers=headers)\n    if resp.status_code != 200 or not json.loads(resp.text)[\"objects\"]:\n        raise RuntimeError(f\"no object for {objtype} params={params!r}\")\n    obj = json.loads(resp.text)[\"objects\"][0]\n    return int(obj[\"resource_uri\"].strip(\"/\").split(\"/\")[-1])\n\n\ndef post_object(base_url: str, objtype: str, datadict: dict[str, Any]) -> int:\n    \"\"\"Create a new API object.\"\"\"\n    resp = requests.post(\n        base_url + \"downloads/\" + objtype + \"/\",\n        data=json.dumps(datadict),\n        headers=headers,\n    )\n    if resp.status_code != 201:\n        try:\n            info = json.loads(resp.text)\n            print(info.get(\"error_message\", \"No error message.\"))\n            print(info.get(\"traceback\", \"\"))\n        except:  # noqa: E722\n            pass\n        print(f\"Creating {objtype} failed: {resp.status_code}\")\n        return -1\n    newloc = resp.headers[\"Location\"]\n    pk = int(newloc.strip(\"/\").split(\"/\")[-1])\n    return pk\n\n\ndef sign_release_files_with_sigstore(\n    ftp_root: str, release: str, release_files: list[tuple[str, str, str, bool, str]]\n) -> None:\n    filenames = [\n        ftp_root + f\"{base_version(release)}/{rfile}\" for rfile, *_ in release_files\n    ]\n\n    def has_sigstore_signature(filename: str) -> bool:\n        return os.path.exists(filename + \".sigstore\") or (\n            os.path.exists(filename + \".sig\") and os.path.exists(filename + \".crt\")\n        )\n\n    # Skip files that already have a signature (likely source distributions)\n    unsigned_files = [\n        filename for filename in filenames if not has_sigstore_signature(filename)\n    ]\n\n    if unsigned_files:\n        print(\"Signing release files with Sigstore\")\n        for filename in unsigned_files:\n            cert_file = filename + \".crt\"\n            sig_file = filename + \".sig\"\n            bundle_file = filename + \".sigstore\"\n\n            run_cmd(\n                [\n                    \"python3\",\n                    \"-m\",\n                    \"sigstore\",\n                    \"sign\",\n                    \"--oidc-disable-ambient-providers\",\n                    \"--signature\",\n                    sig_file,\n                    \"--certificate\",\n                    cert_file,\n                    \"--bundle\",\n                    bundle_file,\n                    filename,\n                ]\n            )\n\n            run_cmd([\"chmod\", \"644\", sig_file])\n            run_cmd([\"chmod\", \"644\", cert_file])\n            run_cmd([\"chmod\", \"644\", bundle_file])\n    else:\n        print(\"All release files already signed with Sigstore\")\n\n    # Verify all the files we expect to be signed with sigstore\n    # against the documented release manager identities and providers.\n    try:\n        sigstore_identity_and_oidc_issuer = (\n            release_to_sigstore_identity_and_oidc_issuer[minor_version(release)]\n        )\n    except KeyError:\n        error([\"No release manager defined for Python release \" + release])\n    sigstore_identity, sigstore_oidc_issuer = sigstore_identity_and_oidc_issuer\n\n    print(\"Verifying release files were signed correctly with Sigstore\")\n    sigstore_verify_argv = [\n        \"python3\",\n        \"-m\",\n        \"sigstore\",\n        \"verify\",\n        \"identity\",\n        \"--cert-identity\",\n        sigstore_identity,\n        \"--cert-oidc-issuer\",\n        sigstore_oidc_issuer,\n    ]\n    for filename in filenames:\n        filename_crt = filename + \".crt\"\n        filename_sig = filename + \".sig\"\n        filename_sigstore = filename + \".sigstore\"\n\n        if os.path.exists(filename_sigstore):\n            run_cmd(\n                sigstore_verify_argv + [\"--bundle\", filename_sigstore, filename],\n                stderr=subprocess.STDOUT,  # Sigstore sends stderr on success.\n            )\n\n        # We use an 'or' here to error out if one of the files is missing.\n        if os.path.exists(filename_sig) or os.path.exists(filename_crt):\n            run_cmd(\n                sigstore_verify_argv\n                + [\n                    \"--certificate\",\n                    filename_crt,\n                    \"--signature\",\n                    filename_sig,\n                    filename,\n                ],\n                stderr=subprocess.STDOUT,  # Sigstore sends stderr on success.\n            )\n\n\ndef parse_args() -> argparse.Namespace:\n    def ensure_trailing_slash(s: str) -> str:\n        if not s.endswith(\"/\"):\n            s += \"/\"\n        return s\n\n    parser = argparse.ArgumentParser()\n    parser.add_argument(\n        \"--base-url\",\n        metavar=\"URL\",\n        type=ensure_trailing_slash,\n        default=\"https://www.python.org/api/v1/\",\n        help=\"API URL; defaults to %(default)s\",\n    )\n    parser.add_argument(\n        \"--ftp-root\",\n        metavar=\"DIR\",\n        type=ensure_trailing_slash,\n        default=\"/srv/www.python.org/ftp/python/\",\n        help=\"FTP root; defaults to %(default)s\",\n    )\n    parser.add_argument(\n        \"release\",\n        help=\"Python version number, e.g. 3.14.0rc2\",\n    )\n    return parser.parse_args()\n\n\ndef main() -> None:\n    args = parse_args()\n    rel = args.release\n    print(\"Querying python.org for release\", rel)\n    rel_pk = query_object(args.base_url, \"release\", name=\"Python+\" + rel)\n    print(\"Found Release object: id =\", rel_pk)\n\n    release_files = list(list_files(args.ftp_root, rel))\n    sign_release_files_with_sigstore(args.ftp_root, rel, release_files)\n    n = 0\n    file_dicts = {}\n    for rfile, file_desc, os_slug, add_download, add_desc in release_files:\n        if not os_slug:\n            continue\n        os_pk = query_object(args.base_url, \"os\", slug=os_slug)\n        file_dict = build_file_dict(\n            args.ftp_root, rel, rfile, rel_pk, file_desc, os_pk, add_download, add_desc\n        )\n        key = file_dict[\"slug\"]\n        print(\"Creating ReleaseFile object for\", rfile, key)\n        if key in file_dicts:\n            raise RuntimeError(f\"duplicate slug generated: {key}\")\n        file_dicts[key] = file_dict\n    print(\"Deleting previous release files\")\n    resp = requests.delete(\n        args.base_url + f\"downloads/release_file/?release={rel_pk}\", headers=headers\n    )\n    if resp.status_code != 204:\n        raise RuntimeError(f\"deleting previous releases failed: {resp.status_code}\")\n    for file_dict in file_dicts.values():\n        file_pk = post_object(args.base_url, \"release_file\", file_dict)\n        if file_pk >= 0:\n            print(\"Created as id =\", file_pk)\n            n += 1\n    print(f\"Done - {n} files added\")\n\n\nif __name__ == \"__main__\" and not sys.flags.interactive:\n    main()\n"
  },
  {
    "path": "buildbotapi.py",
    "content": "import json\nfrom dataclasses import dataclass\nfrom typing import Any, cast\n\nfrom aiohttp.client import ClientSession\n\nJSON = dict[str, Any]\n\n\n@dataclass\nclass Builder:\n    builderid: int\n    description: str | None\n    name: str\n    tags: list[str]\n\n    def __init__(self, **kwargs: Any) -> None:\n        self.__dict__.update(**kwargs)\n\n    def __hash__(self) -> int:\n        return hash(self.builderid)\n\n\nclass BuildBotAPI:\n    def __init__(self, session: ClientSession) -> None:\n        self._session = session\n\n    async def authenticate(self, token: str) -> None:\n        await self._session.get(\n            \"https://buildbot.python.org/all/auth/login\", params={\"token\": token}\n        )\n\n    async def _fetch_text(self, url: str) -> str:\n        async with self._session.get(url) as resp:\n            return await resp.text()\n\n    async def _fetch_json(self, url: str) -> JSON:\n        return cast(JSON, json.loads(await self._fetch_text(url)))\n\n    async def stable_builders(self, branch: str | None = None) -> dict[int, Builder]:\n        stable_builders = {\n            id: builder\n            for (id, builder) in (await self.all_builders(branch=branch)).items()\n            if \"stable\" in builder.tags\n        }\n        return stable_builders\n\n    async def all_builders(self, branch: str | None = None) -> dict[int, Builder]:\n        url = \"https://buildbot.python.org/all/api/v2/builders\"\n        if branch is not None:\n            url = f\"{url}?tags__contains={branch}\"\n        _builders: dict[str, Any] = await self._fetch_json(url)\n        builders = _builders[\"builders\"]\n        all_builders = {\n            builder[\"builderid\"]: Builder(**builder) for builder in builders\n        }\n        return all_builders\n\n    async def is_builder_failing_currently(self, builder: Builder) -> bool:\n        builds_: dict[str, Any] = await self._fetch_json(\n            f\"https://buildbot.python.org/all/api/v2/builds?complete__eq=true\"\n            f\"&&builderid__eq={builder.builderid}&&order=-complete_at\"\n            f\"&&limit=1\"\n        )\n        builds = builds_[\"builds\"]\n        if not builds:\n            return False\n        (build,) = builds\n        if build[\"results\"] == 2:\n            return True\n        return False\n"
  },
  {
    "path": "dev-requirements.in",
    "content": "pyfakefs\npytest\npytest-aiohttp\npytest-cov\npytest-mock\n"
  },
  {
    "path": "dev-requirements.txt",
    "content": "#\n# This file is autogenerated by pip-compile with Python 3.12\n# by the following command:\n#\n#    pip-compile --generate-hashes --output-file=dev-requirements.txt dev-requirements.in\n#\naiohappyeyeballs==2.6.1 \\\n    --hash=sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558 \\\n    --hash=sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8\n    # via aiohttp\naiohttp==3.13.3 \\\n    --hash=sha256:01ad2529d4b5035578f5081606a465f3b814c542882804e2e8cda61adf5c71bf \\\n    --hash=sha256:042e9e0bcb5fba81886c8b4fbb9a09d6b8a00245fd8d88e4d989c1f96c74164c \\\n    --hash=sha256:05861afbbec40650d8a07ea324367cb93e9e8cc7762e04dd4405df99fa65159c \\\n    --hash=sha256:084911a532763e9d3dd95adf78a78f4096cd5f58cdc18e6fdbc1b58417a45423 \\\n    --hash=sha256:0add0900ff220d1d5c5ebbf99ed88b0c1bbf87aa7e4262300ed1376a6b13414f \\\n    --hash=sha256:0db318f7a6f065d84cb1e02662c526294450b314a02bd9e2a8e67f0d8564ce40 \\\n    --hash=sha256:10b47b7ba335d2e9b1239fa571131a87e2d8ec96b333e68b2a305e7a98b0bae2 \\\n    --hash=sha256:1449ceddcdbcf2e0446957863af03ebaaa03f94c090f945411b61269e2cb5daf \\\n    --hash=sha256:147e422fd1223005c22b4fe080f5d93ced44460f5f9c105406b753612b587821 \\\n    --hash=sha256:1cb93e166e6c28716c8c6aeb5f99dfb6d5ccf482d29fe9bf9a794110e6d0ab64 \\\n    --hash=sha256:215a685b6fbbfcf71dfe96e3eba7a6f58f10da1dfdf4889c7dd856abe430dca7 \\\n    --hash=sha256:2712039939ec963c237286113c68dbad80a82a4281543f3abf766d9d73228998 \\\n    --hash=sha256:27234ef6d85c914f9efeb77ff616dbf4ad2380be0cda40b4db086ffc7ddd1b7d \\\n    --hash=sha256:28e027cf2f6b641693a09f631759b4d9ce9165099d2b5d92af9bd4e197690eea \\\n    --hash=sha256:2b8d8ddba8f95ba17582226f80e2de99c7a7948e66490ef8d947e272a93e9463 \\\n    --hash=sha256:2ba0eea45eb5cc3172dbfc497c066f19c41bac70963ea1a67d51fc92e4cf9a80 \\\n    --hash=sha256:2be0e9ccf23e8a94f6f0650ce06042cefc6ac703d0d7ab6c7a917289f2539ad4 \\\n    --hash=sha256:2e41b18a58da1e474a057b3d35248d8320029f61d70a37629535b16a0c8f3767 \\\n    --hash=sha256:2eb752b102b12a76ca02dff751a801f028b4ffbbc478840b473597fc91a9ed43 \\\n    --hash=sha256:2fc82186fadc4a8316768d61f3722c230e2c1dcab4200d52d2ebdf2482e47592 \\\n    --hash=sha256:2fff83cfc93f18f215896e3a190e8e5cb413ce01553901aca925176e7568963a \\\n    --hash=sha256:31a83ea4aead760dfcb6962efb1d861db48c34379f2ff72db9ddddd4cda9ea2e \\\n    --hash=sha256:34749271508078b261c4abb1767d42b8d0c0cc9449c73a4df494777dc55f0687 \\\n    --hash=sha256:34bac00a67a812570d4a460447e1e9e06fae622946955f939051e7cc895cfab8 \\\n    --hash=sha256:37239e9f9a7ea9ac5bf6b92b0260b01f8a22281996da609206a84df860bc1261 \\\n    --hash=sha256:37da61e244d1749798c151421602884db5270faf479cf0ef03af0ff68954c9dd \\\n    --hash=sha256:3b61b7169ababd7802f9568ed96142616a9118dd2be0d1866e920e77ec8fa92a \\\n    --hash=sha256:3d9908a48eb7416dc1f4524e69f1d32e5d90e3981e4e37eb0aa1cd18f9cfa2a4 \\\n    --hash=sha256:3dd4dce1c718e38081c8f35f323209d4c1df7d4db4bab1b5c88a6b4d12b74587 \\\n    --hash=sha256:4021b51936308aeea0367b8f006dc999ca02bc118a0cc78c303f50a2ff6afb91 \\\n    --hash=sha256:40c5e40ecc29ba010656c18052b877a1c28f84344825efa106705e835c28530f \\\n    --hash=sha256:425c126c0dc43861e22cb1c14ba4c8e45d09516d0a3ae0a3f7494b79f5f233a3 \\\n    --hash=sha256:44531a36aa2264a1860089ffd4dce7baf875ee5a6079d5fb42e261c704ef7344 \\\n    --hash=sha256:48e377758516d262bde50c2584fc6c578af272559c409eecbdd2bae1601184d6 \\\n    --hash=sha256:49a03727c1bba9a97d3e93c9f93ca03a57300f484b6e935463099841261195d3 \\\n    --hash=sha256:4ae5b5a0e1926e504c81c5b84353e7a5516d8778fbbff00429fe7b05bb25cbce \\\n    --hash=sha256:4e239d501f73d6db1522599e14b9b321a7e3b1de66ce33d53a765d975e9f4808 \\\n    --hash=sha256:56339a36b9f1fc708260c76c87e593e2afb30d26de9ae1eb445b5e051b98a7a1 \\\n    --hash=sha256:568f416a4072fbfae453dcf9a99194bbb8bdeab718e08ee13dfa2ba0e4bebf29 \\\n    --hash=sha256:5b179331a481cb5529fca8b432d8d3c7001cb217513c94cd72d668d1248688a3 \\\n    --hash=sha256:5b6073099fb654e0a068ae678b10feff95c5cae95bbfcbfa7af669d361a8aa6b \\\n    --hash=sha256:5d2d94f1f5fcbe40838ac51a6ab5704a6f9ea42e72ceda48de5e6b898521da51 \\\n    --hash=sha256:5dff64413671b0d3e7d5918ea490bdccb97a4ad29b3f311ed423200b2203e01c \\\n    --hash=sha256:5e1d8c8b8f1d91cd08d8f4a3c2b067bfca6ec043d3ff36de0f3a715feeedf926 \\\n    --hash=sha256:5f8ca7f2bb6ba8348a3614c7918cc4bb73268c5ac2a207576b7afea19d3d9f64 \\\n    --hash=sha256:642f752c3eb117b105acbd87e2c143de710987e09860d674e068c4c2c441034f \\\n    --hash=sha256:65d2ccb7eabee90ce0503c17716fc77226be026dcc3e65cce859a30db715025b \\\n    --hash=sha256:693781c45a4033d31d4187d2436f5ac701e7bbfe5df40d917736108c1cc7436e \\\n    --hash=sha256:694976222c711d1d00ba131904beb60534f93966562f64440d0c9d41b8cdb440 \\\n    --hash=sha256:697753042d57f4bf7122cab985bf15d0cef23c770864580f5af4f52023a56bd6 \\\n    --hash=sha256:69c56fbc1993fa17043e24a546959c0178fe2b5782405ad4559e6c13975c15e3 \\\n    --hash=sha256:6de499a1a44e7de70735d0b39f67c8f25eb3d91eb3103be99ca0fa882cdd987d \\\n    --hash=sha256:6fc0e2337d1a4c3e6acafda6a78a39d4c14caea625124817420abceed36e2415 \\\n    --hash=sha256:75ca857eba4e20ce9f546cd59c7007b33906a4cd48f2ff6ccf1ccfc3b646f279 \\\n    --hash=sha256:7a4a94eb787e606d0a09404b9c38c113d3b099d508021faa615d70a0131907ce \\\n    --hash=sha256:7b5e8fe4de30df199155baaf64f2fcd604f4c678ed20910db8e2c66dc4b11603 \\\n    --hash=sha256:7bfdc049127717581866fa4708791220970ce291c23e28ccf3922c700740fdc0 \\\n    --hash=sha256:7e63f210bc1b57ef699035f2b4b6d9ce096b5914414a49b0997c839b2bd2223c \\\n    --hash=sha256:7f9120f7093c2a32d9647abcaf21e6ad275b4fbec5b55969f978b1a97c7c86bf \\\n    --hash=sha256:8057c98e0c8472d8846b9c79f56766bcc57e3e8ac7bfd510482332366c56c591 \\\n    --hash=sha256:80dd4c21b0f6237676449c6baaa1039abae86b91636b6c91a7f8e61c87f89540 \\\n    --hash=sha256:81e97251d9298386c2b7dbeb490d3d1badbdc69107fb8c9299dd04eb39bddc0e \\\n    --hash=sha256:82611aeec80eb144416956ec85b6ca45a64d76429c1ed46ae1b5f86c6e0c9a26 \\\n    --hash=sha256:8542f41a62bcc58fc7f11cf7c90e0ec324ce44950003feb70640fc2a9092c32a \\\n    --hash=sha256:859bd3f2156e81dd01432f5849fc73e2243d4a487c4fd26609b1299534ee1845 \\\n    --hash=sha256:87797e645d9d8e222e04160ee32aa06bc5c163e8499f24db719e7852ec23093a \\\n    --hash=sha256:87b9aab6d6ed88235aa2970294f496ff1a1f9adcd724d800e9b952395a80ffd9 \\\n    --hash=sha256:8a60e60746623925eab7d25823329941aee7242d559baa119ca2b253c88a7bd6 \\\n    --hash=sha256:90455115e5da1c3c51ab619ac57f877da8fd6d73c05aacd125c5ae9819582aba \\\n    --hash=sha256:90751b8eed69435bac9ff4e3d2f6b3af1f57e37ecb0fbeee59c0174c9e2d41df \\\n    --hash=sha256:947c26539750deeaee933b000fb6517cc770bbd064bad6033f1cff4803881e43 \\\n    --hash=sha256:96d604498a7c782cb15a51c406acaea70d8c027ee6b90c569baa6e7b93073679 \\\n    --hash=sha256:988a8c5e317544fdf0d39871559e67b6341065b87fceac641108c2096d5506b7 \\\n    --hash=sha256:9a9dc347e5a3dc7dfdbc1f82da0ef29e388ddb2ed281bfce9dd8248a313e62b7 \\\n    --hash=sha256:9ae8dd55c8e6c4257eae3a20fd2c8f41edaea5992ed67156642493b8daf3cecc \\\n    --hash=sha256:9af5e68ee47d6534d36791bbe9b646d2a7c7deb6fc24d7943628edfbb3581f29 \\\n    --hash=sha256:9b174f267b5cfb9a7dba9ee6859cecd234e9a681841eb85068059bc867fb8f02 \\\n    --hash=sha256:9bf9f7a65e7aa20dd764151fb3d616c81088f91f8df39c3893a536e279b4b984 \\\n    --hash=sha256:9d4c940f02f49483b18b079d1c27ab948721852b281f8b015c058100e9421dd1 \\\n    --hash=sha256:9ebf57d09e131f5323464bd347135a88622d1c0976e88ce15b670e7ad57e4bd6 \\\n    --hash=sha256:a19884d2ee70b06d9204b2727a7b9f983d0c684c650254679e716b0b77920632 \\\n    --hash=sha256:a1e53262fd202e4b40b70c3aff944a8155059beedc8a89bba9dc1f9ef06a1b56 \\\n    --hash=sha256:a2212ad43c0833a873d0fb3c63fa1bacedd4cf6af2fee62bf4b739ceec3ab239 \\\n    --hash=sha256:a45530014d7a1e09f4a55f4f43097ba0fd155089372e105e4bff4ca76cb1b168 \\\n    --hash=sha256:a949eee43d3782f2daae4f4a2819b2cb9b0c5d3b7f7a927067cc84dafdbb9f88 \\\n    --hash=sha256:add1da70de90a2569c5e15249ff76a631ccacfe198375eead4aadf3b8dc849dc \\\n    --hash=sha256:af71fff7bac6bb7508956696dce8f6eec2bbb045eceb40343944b1ae62b5ef11 \\\n    --hash=sha256:b04be762396457bef43f3597c991e192ee7da460a4953d7e647ee4b1c28e7046 \\\n    --hash=sha256:b0d95340658b9d2f11d9697f59b3814a9d3bb4b7a7c20b131df4bcef464037c0 \\\n    --hash=sha256:b1a6102b4d3ebc07dad44fbf07b45bb600300f15b552ddf1851b5390202ea2e3 \\\n    --hash=sha256:b46020d11d23fe16551466c77823df9cc2f2c1e63cc965daf67fa5eec6ca1877 \\\n    --hash=sha256:b556c85915d8efaed322bf1bdae9486aa0f3f764195a0fb6ee962e5c71ef5ce1 \\\n    --hash=sha256:b903a4dfee7d347e2d87697d0713be59e0b87925be030c9178c5faa58ea58d5c \\\n    --hash=sha256:b928f30fe49574253644b1ca44b1b8adbd903aa0da4b9054a6c20fc7f4092a25 \\\n    --hash=sha256:b99281b0704c103d4e11e72a76f1b543d4946fea7dd10767e7e1b5f00d4e5704 \\\n    --hash=sha256:bae5c2ed2eae26cc382020edad80d01f36cb8e746da40b292e68fec40421dc6a \\\n    --hash=sha256:bb4f7475e359992b580559e008c598091c45b5088f28614e855e42d39c2f1033 \\\n    --hash=sha256:bbe7d4cecacb439e2e2a8a1a7b935c25b812af7a5fd26503a66dadf428e79ec1 \\\n    --hash=sha256:bfc1cc2fe31a6026a8a88e4ecfb98d7f6b1fec150cfd708adbfd1d2f42257c29 \\\n    --hash=sha256:c014c7ea7fb775dd015b2d3137378b7be0249a448a1612268b5a90c2d81de04d \\\n    --hash=sha256:c048058117fd649334d81b4b526e94bde3ccaddb20463a815ced6ecbb7d11160 \\\n    --hash=sha256:c0e2d366af265797506f0283487223146af57815b388623f0357ef7eac9b209d \\\n    --hash=sha256:c19b90316ad3b24c69cd78d5c9b4f3aa4497643685901185b65166293d36a00f \\\n    --hash=sha256:c685f2d80bb67ca8c3837823ad76196b3694b0159d232206d1e461d3d434666f \\\n    --hash=sha256:c6b8568a3bb5819a0ad087f16d40e5a3fb6099f39ea1d5625a3edc1e923fc538 \\\n    --hash=sha256:d32764c6c9aafb7fb55366a224756387cd50bfa720f32b88e0e6fa45b27dcf29 \\\n    --hash=sha256:d5a372fd5afd301b3a89582817fdcdb6c34124787c70dbcc616f259013e7eef7 \\\n    --hash=sha256:d60ac9663f44168038586cab2157e122e46bdef09e9368b37f2d82d354c23f72 \\\n    --hash=sha256:dca68018bf48c251ba17c72ed479f4dafe9dbd5a73707ad8d28a38d11f3d42af \\\n    --hash=sha256:de2c184bb1fe2cbd2cefba613e9db29a5ab559323f994b6737e370d3da0ac455 \\\n    --hash=sha256:e3531d63d3bdfa7e3ac5e9b27b2dd7ec9df3206a98e0b3445fa906f233264c57 \\\n    --hash=sha256:e50a2e1404f063427c9d027378472316201a2290959a295169bcf25992d04558 \\\n    --hash=sha256:e636b3c5f61da31a92bf0d91da83e58fdfa96f178ba682f11d24f31944cdd28c \\\n    --hash=sha256:ea37047c6b367fd4bd632bff8077449b8fa034b69e812a18e0132a00fae6e808 \\\n    --hash=sha256:f33ed1a2bf1997a36661874b017f5c4b760f41266341af36febaf271d179f6d7 \\\n    --hash=sha256:f76c1e3fe7d7c8afad7ed193f89a292e1999608170dcc9751a7462a87dfd5bc0 \\\n    --hash=sha256:f9444f105664c4ce47a2a7171a2418bce5b7bae45fb610f4e2c36045d85911d3 \\\n    --hash=sha256:fc290605db2a917f6e81b0e1e0796469871f5af381ce15c604a3c5c7e51cb730 \\\n    --hash=sha256:fc353029f176fd2b3ec6cfc71be166aba1936fe5d73dd1992ce289ca6647a9aa \\\n    --hash=sha256:fee0c6bc7db1de362252affec009707a17478a00ec69f797d23ca256e36d5940\n    # via pytest-aiohttp\naiosignal==1.4.0 \\\n    --hash=sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e \\\n    --hash=sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7\n    # via aiohttp\nattrs==24.3.0 \\\n    --hash=sha256:8f5c07333d543103541ba7be0e2ce16eeee8130cb0b3f9238ab904ce1e85baff \\\n    --hash=sha256:ac96cd038792094f438ad1f6ff80837353805ac950cd2aa0e0625ef19850c308\n    # via aiohttp\ncoverage[toml]==7.10.7 \\\n    --hash=sha256:03ffc58aacdf65d2a82bbeb1ffe4d01ead4017a21bfd0454983b88ca73af94b9 \\\n    --hash=sha256:097c1591f5af4496226d5783d036bf6fd6cd0cbc132e071b33861de756efb880 \\\n    --hash=sha256:0b944ee8459f515f28b851728ad224fa2d068f1513ef6b7ff1efafeb2185f999 \\\n    --hash=sha256:0ebbaddb2c19b71912c6f2518e791aa8b9f054985a0769bdb3a53ebbc765c6a1 \\\n    --hash=sha256:10b24412692df990dbc34f8fb1b6b13d236ace9dfdd68df5b28c2e39cafbba13 \\\n    --hash=sha256:10b6ba00ab1132a0ce4428ff68cf50a25efd6840a42cdf4239c9b99aad83be8b \\\n    --hash=sha256:121da30abb574f6ce6ae09840dae322bef734480ceafe410117627aa54f76d82 \\\n    --hash=sha256:18afb24843cbc175687225cab1138c95d262337f5473512010e46831aa0c2973 \\\n    --hash=sha256:1b4fd784344d4e52647fd7857b2af5b3fbe6c239b0b5fa63e94eb67320770e0f \\\n    --hash=sha256:1ca6db7c8807fb9e755d0379ccc39017ce0a84dcd26d14b5a03b78563776f681 \\\n    --hash=sha256:1ef2319dd15a0b009667301a3f84452a4dc6fddfd06b0c5c53ea472d3989fbf0 \\\n    --hash=sha256:2120043f147bebb41c85b97ac45dd173595ff14f2a584f2963891cbcc3091541 \\\n    --hash=sha256:212f8f2e0612778f09c55dd4872cb1f64a1f2b074393d139278ce902064d5b32 \\\n    --hash=sha256:240af60539987ced2c399809bd34f7c78e8abe0736af91c3d7d0e795df633d17 \\\n    --hash=sha256:2a78cd46550081a7909b3329e2266204d584866e8d97b898cd7fb5ac8d888b1a \\\n    --hash=sha256:2af88deffcc8a4d5974cf2d502251bc3b2db8461f0b66d80a449c33757aa9f40 \\\n    --hash=sha256:2c8b9a0636f94c43cd3576811e05b89aa9bc2d0a85137affc544ae5cb0e4bfbd \\\n    --hash=sha256:2fafd773231dd0378fdba66d339f84904a8e57a262f583530f4f156ab83863e6 \\\n    --hash=sha256:314f2c326ded3f4b09be11bc282eb2fc861184bc95748ae67b360ac962770be7 \\\n    --hash=sha256:33a5e6396ab684cb43dc7befa386258acb2d7fae7f67330ebb85ba4ea27938eb \\\n    --hash=sha256:3445258bcded7d4aa630ab8296dea4d3f15a255588dd535f980c193ab6b95f3f \\\n    --hash=sha256:35f5e3f9e455bb17831876048355dca0f758b6df22f49258cb5a91da23ef437d \\\n    --hash=sha256:39508ffda4f343c35f3236fe8d1a6634a51f4581226a1262769d7f970e73bffe \\\n    --hash=sha256:399a0b6347bcd3822be369392932884b8216d0944049ae22925631a9b3d4ba4c \\\n    --hash=sha256:3a622ac801b17198020f09af3eaf45666b344a0d69fc2a6ffe2ea83aeef1d807 \\\n    --hash=sha256:4376538f36b533b46f8971d3a3e63464f2c7905c9800db97361c43a2b14792ab \\\n    --hash=sha256:4b583b97ab2e3efe1b3e75248a9b333bd3f8b0b1b8e5b45578e05e5850dfb2c2 \\\n    --hash=sha256:4b6f236edf6e2f9ae8fcd1332da4e791c1b6ba0dc16a2dc94590ceccb482e546 \\\n    --hash=sha256:4da86b6d62a496e908ac2898243920c7992499c1712ff7c2b6d837cc69d9467e \\\n    --hash=sha256:50aa94fb1fb9a397eaa19c0d5ec15a5edd03a47bf1a3a6111a16b36e190cff65 \\\n    --hash=sha256:567f5c155eda8df1d3d439d40a45a6a5f029b429b06648235f1e7e51b522b396 \\\n    --hash=sha256:5a02d5a850e2979b0a014c412573953995174743a3f7fa4ea5a6e9a3c5617431 \\\n    --hash=sha256:5e1e9802121405ede4b0133aa4340ad8186a1d2526de5b7c3eca519db7bb89fb \\\n    --hash=sha256:5f33166f0dfcce728191f520bd2692914ec70fac2713f6bf3ce59c3deacb4699 \\\n    --hash=sha256:606cc265adc9aaedcc84f1f064f0e8736bc45814f15a357e30fca7ecc01504e0 \\\n    --hash=sha256:635adb9a4507c9fd2ed65f39693fa31c9a3ee3a8e6dc64df033e8fdf52a7003f \\\n    --hash=sha256:65646bb0359386e07639c367a22cf9b5bf6304e8630b565d0626e2bdf329227a \\\n    --hash=sha256:67f8c5cbcd3deb7a60b3345dffc89a961a484ed0af1f6f73de91705cc6e31235 \\\n    --hash=sha256:69212fbccdbd5b0e39eac4067e20a4a5256609e209547d86f740d68ad4f04911 \\\n    --hash=sha256:6b8b09c1fad947c84bbbc95eca841350fad9cbfa5a2d7ca88ac9f8d836c92e23 \\\n    --hash=sha256:6be8ed3039ae7f7ac5ce058c308484787c86e8437e72b30bf5e88b8ea10f3c87 \\\n    --hash=sha256:6e16e07d85ca0cf8bafe5f5d23a0b850064e8e945d5677492b06bbe6f09cc699 \\\n    --hash=sha256:736f227fb490f03c6488f9b6d45855f8e0fd749c007f9303ad30efab0e73c05a \\\n    --hash=sha256:73ab1601f84dc804f7812dc297e93cd99381162da39c47040a827d4e8dafe63b \\\n    --hash=sha256:77eb4c747061a6af8d0f7bdb31f1e108d172762ef579166ec84542f711d90256 \\\n    --hash=sha256:78a384e49f46b80fb4c901d52d92abe098e78768ed829c673fbb53c498bef73a \\\n    --hash=sha256:7bb3b9ddb87ef7725056572368040c32775036472d5a033679d1fa6c8dc08417 \\\n    --hash=sha256:7ea7c6c9d0d286d04ed3541747e6597cbe4971f22648b68248f7ddcd329207f0 \\\n    --hash=sha256:7fe650342addd8524ca63d77b2362b02345e5f1a093266787d210c70a50b471a \\\n    --hash=sha256:813922f35bd800dca9994c5971883cbc0d291128a5de6b167c7aa697fcf59360 \\\n    --hash=sha256:83082a57783239717ceb0ad584de3c69cf581b2a95ed6bf81ea66034f00401c0 \\\n    --hash=sha256:8421e088bc051361b01c4b3a50fd39a4b9133079a2229978d9d30511fd05231b \\\n    --hash=sha256:86b0e7308289ddde73d863b7683f596d8d21c7d8664ce1dee061d0bcf3fbb4bb \\\n    --hash=sha256:88127d40df529336a9836870436fc2751c339fbaed3a836d42c93f3e4bd1d0a2 \\\n    --hash=sha256:8fb190658865565c549b6b4706856d6a7b09302c797eb2cf8e7fe9dabb043f0d \\\n    --hash=sha256:912e6ebc7a6e4adfdbb1aec371ad04c68854cd3bf3608b3514e7ff9062931d8a \\\n    --hash=sha256:925a1edf3d810537c5a3abe78ec5530160c5f9a26b1f4270b40e62cc79304a1e \\\n    --hash=sha256:93c1b03552081b2a4423091d6fb3787265b8f86af404cff98d1b5342713bdd69 \\\n    --hash=sha256:972b9e3a4094b053a4e46832b4bc829fc8a8d347160eb39d03f1690316a99c14 \\\n    --hash=sha256:981a651f543f2854abd3b5fcb3263aac581b18209be49863ba575de6edf4c14d \\\n    --hash=sha256:99e4aa63097ab1118e75a848a28e40d68b08a5e19ce587891ab7fd04475e780f \\\n    --hash=sha256:9fa6e4dd51fe15d8738708a973470f67a855ca50002294852e9571cdbd9433f2 \\\n    --hash=sha256:a0ec07fd264d0745ee396b666d47cef20875f4ff2375d7c4f58235886cc1ef0c \\\n    --hash=sha256:a2d9a3b260cc1d1dbdb1c582e63ddcf5363426a1a68faa0f5da28d8ee3c722a0 \\\n    --hash=sha256:a3cc8638b2480865eaa3926d192e64ce6c51e3d29c849e09d5b4ad95efae5399 \\\n    --hash=sha256:a609f9c93113be646f44c2a0256d6ea375ad047005d7f57a5c15f614dc1b2f59 \\\n    --hash=sha256:a62c6ef0d50e6de320c270ff91d9dd0a05e7250cac2a800b7784bae474506e63 \\\n    --hash=sha256:a6442c59a8ac8b85812ce33bc4d05bde3fb22321fa8294e2a5b487c3505f611b \\\n    --hash=sha256:a7b55a944a7f43892e28ad4bc0561dfd5f0d73e605d1aa5c3c976b52aea121d2 \\\n    --hash=sha256:a8b6f03672aa6734e700bbcd65ff050fd19cddfec4b031cc8cf1c6967de5a68e \\\n    --hash=sha256:affef7c76a9ef259187ef31599a9260330e0335a3011732c4b9effa01e1cd6e0 \\\n    --hash=sha256:b06f260b16ead11643a5a9f955bd4b5fd76c1a4c6796aeade8520095b75de520 \\\n    --hash=sha256:b1c81d0e5e160651879755c9c675b974276f135558cf4ba79fee7b8413a515df \\\n    --hash=sha256:b281d5eca50189325cfe1f365fafade89b14b4a78d9b40b05ddd1fc7d2a10a9c \\\n    --hash=sha256:b51dcd060f18c19290d9b8a9dd1e0181538df2ce0717f562fff6cf74d9fc0b5b \\\n    --hash=sha256:b7b8288eb7cdd268b0304632da8cb0bb93fadcfec2fe5712f7b9cc8f4d487be2 \\\n    --hash=sha256:b9be91986841a75042b3e3243d0b3cb0b2434252b977baaf0cd56e960fe1e46f \\\n    --hash=sha256:ba58bbcd1b72f136080c0bccc2400d66cc6115f3f906c499013d065ac33a4b61 \\\n    --hash=sha256:bb45474711ba385c46a0bfe696c695a929ae69ac636cda8f532be9e8c93d720a \\\n    --hash=sha256:bc01f57ca26269c2c706e838f6422e2a8788e41b3e3c65e2f41148212e57cd59 \\\n    --hash=sha256:bc91b314cef27742da486d6839b677b3f2793dfe52b51bbbb7cf736d5c29281c \\\n    --hash=sha256:bda5e34f8a75721c96085903c6f2197dc398c20ffd98df33f866a9c8fd95f4bf \\\n    --hash=sha256:c134869d5ffe34547d14e174c866fd8fe2254918cc0a95e99052903bc1543e07 \\\n    --hash=sha256:c41e71c9cfb854789dee6fc51e46743a6d138b1803fab6cb860af43265b42ea6 \\\n    --hash=sha256:c4e16bd7761c5e454f4efd36f345286d6f7c5fa111623c355691e2755cae3b9e \\\n    --hash=sha256:c7315339eae3b24c2d2fa1ed7d7a38654cba34a13ef19fbcb9425da46d3dc594 \\\n    --hash=sha256:c79124f70465a150e89340de5963f936ee97097d2ef76c869708c4248c63ca49 \\\n    --hash=sha256:cac0fdca17b036af3881a9d2729a850b76553f3f716ccb0360ad4dbc06b3b843 \\\n    --hash=sha256:cc87dd1b6eaf0b848eebb1c86469b9f72a1891cb42ac7adcfbce75eadb13dd14 \\\n    --hash=sha256:cce2109b6219f22ece99db7644b9622f54a4e915dad65660ec435e89a3ea7cc3 \\\n    --hash=sha256:d41213ea25a86f69efd1575073d34ea11aabe075604ddf3d148ecfec9e1e96a1 \\\n    --hash=sha256:dc7c389dce432500273eaf48f410b37886be9208b2dd5710aaf7c57fd442c698 \\\n    --hash=sha256:dd5e856ebb7bfb7672b0086846db5afb4567a7b9714b8a0ebafd211ec7ce6a15 \\\n    --hash=sha256:e1ed71194ef6dea7ed2d5cb5f7243d4bcd334bfb63e59878519be558078f848d \\\n    --hash=sha256:e201e015644e207139f7e2351980feb7040e6f4b2c2978892f3e3789d1c125e5 \\\n    --hash=sha256:e28299d9f2e889e6d51b1f043f58d5f997c373cc12e6403b90df95b8b047c13e \\\n    --hash=sha256:f3c887f96407cea3916294046fc7dab611c2552beadbed4ea901cbc6a40cc7a0 \\\n    --hash=sha256:f49a05acd3dfe1ce9715b657e28d138578bc40126760efb962322c56e9ca344b \\\n    --hash=sha256:f4ab143ab113be368a3e9b795f9cd7906c5ef407d6173fe9675a902e1fffc239 \\\n    --hash=sha256:f51328ffe987aecf6d09f3cd9d979face89a617eacdaea43e7b3080777f647ba \\\n    --hash=sha256:f57b2a3c8353d3e04acf75b3fed57ba41f5c0646bbf1d10c7c282291c97936b4 \\\n    --hash=sha256:f7941f6f2fe6dd6807a1208737b8a0cbcf1cc6d7b07d24998ad2d63590868260 \\\n    --hash=sha256:fc04cc7a3db33664e0c2d10eb8990ff6b3536f6842c9590ae8da4c614b9ed05a \\\n    --hash=sha256:fff7b9c3f19957020cac546c70025331113d2e61537f6e2441bc7657913de7d3\n    # via pytest-cov\nfrozenlist==1.5.0 \\\n    --hash=sha256:000a77d6034fbad9b6bb880f7ec073027908f1b40254b5d6f26210d2dab1240e \\\n    --hash=sha256:03d33c2ddbc1816237a67f66336616416e2bbb6beb306e5f890f2eb22b959cdf \\\n    --hash=sha256:04a5c6babd5e8fb7d3c871dc8b321166b80e41b637c31a995ed844a6139942b6 \\\n    --hash=sha256:0996c66760924da6e88922756d99b47512a71cfd45215f3570bf1e0b694c206a \\\n    --hash=sha256:0cc974cc93d32c42e7b0f6cf242a6bd941c57c61b618e78b6c0a96cb72788c1d \\\n    --hash=sha256:0f253985bb515ecd89629db13cb58d702035ecd8cfbca7d7a7e29a0e6d39af5f \\\n    --hash=sha256:11aabdd62b8b9c4b84081a3c246506d1cddd2dd93ff0ad53ede5defec7886b28 \\\n    --hash=sha256:12f78f98c2f1c2429d42e6a485f433722b0061d5c0b0139efa64f396efb5886b \\\n    --hash=sha256:140228863501b44b809fb39ec56b5d4071f4d0aa6d216c19cbb08b8c5a7eadb9 \\\n    --hash=sha256:1431d60b36d15cda188ea222033eec8e0eab488f39a272461f2e6d9e1a8e63c2 \\\n    --hash=sha256:15538c0cbf0e4fa11d1e3a71f823524b0c46299aed6e10ebb4c2089abd8c3bec \\\n    --hash=sha256:15b731db116ab3aedec558573c1a5eec78822b32292fe4f2f0345b7f697745c2 \\\n    --hash=sha256:17dcc32fc7bda7ce5875435003220a457bcfa34ab7924a49a1c19f55b6ee185c \\\n    --hash=sha256:1893f948bf6681733aaccf36c5232c231e3b5166d607c5fa77773611df6dc336 \\\n    --hash=sha256:189f03b53e64144f90990d29a27ec4f7997d91ed3d01b51fa39d2dbe77540fd4 \\\n    --hash=sha256:1a8ea951bbb6cacd492e3948b8da8c502a3f814f5d20935aae74b5df2b19cf3d \\\n    --hash=sha256:1b96af8c582b94d381a1c1f51ffaedeb77c821c690ea5f01da3d70a487dd0a9b \\\n    --hash=sha256:1e76bfbc72353269c44e0bc2cfe171900fbf7f722ad74c9a7b638052afe6a00c \\\n    --hash=sha256:2150cc6305a2c2ab33299453e2968611dacb970d2283a14955923062c8d00b10 \\\n    --hash=sha256:226d72559fa19babe2ccd920273e767c96a49b9d3d38badd7c91a0fdeda8ea08 \\\n    --hash=sha256:237f6b23ee0f44066219dae14c70ae38a63f0440ce6750f868ee08775073f942 \\\n    --hash=sha256:29d94c256679247b33a3dc96cce0f93cbc69c23bf75ff715919332fdbb6a32b8 \\\n    --hash=sha256:2b5e23253bb709ef57a8e95e6ae48daa9ac5f265637529e4ce6b003a37b2621f \\\n    --hash=sha256:2d0da8bbec082bf6bf18345b180958775363588678f64998c2b7609e34719b10 \\\n    --hash=sha256:2f3f7a0fbc219fb4455264cae4d9f01ad41ae6ee8524500f381de64ffaa077d5 \\\n    --hash=sha256:30c72000fbcc35b129cb09956836c7d7abf78ab5416595e4857d1cae8d6251a6 \\\n    --hash=sha256:31115ba75889723431aa9a4e77d5f398f5cf976eea3bdf61749731f62d4a4a21 \\\n    --hash=sha256:31a9ac2b38ab9b5a8933b693db4939764ad3f299fcaa931a3e605bc3460e693c \\\n    --hash=sha256:366d8f93e3edfe5a918c874702f78faac300209a4d5bf38352b2c1bdc07a766d \\\n    --hash=sha256:374ca2dabdccad8e2a76d40b1d037f5bd16824933bf7bcea3e59c891fd4a0923 \\\n    --hash=sha256:44c49271a937625619e862baacbd037a7ef86dd1ee215afc298a417ff3270608 \\\n    --hash=sha256:45e0896250900b5aa25180f9aec243e84e92ac84bd4a74d9ad4138ef3f5c97de \\\n    --hash=sha256:498524025a5b8ba81695761d78c8dd7382ac0b052f34e66939c42df860b8ff17 \\\n    --hash=sha256:50cf5e7ee9b98f22bdecbabf3800ae78ddcc26e4a435515fc72d97903e8488e0 \\\n    --hash=sha256:52ef692a4bc60a6dd57f507429636c2af8b6046db8b31b18dac02cbc8f507f7f \\\n    --hash=sha256:561eb1c9579d495fddb6da8959fd2a1fca2c6d060d4113f5844b433fc02f2641 \\\n    --hash=sha256:5a3ba5f9a0dfed20337d3e966dc359784c9f96503674c2faf015f7fe8e96798c \\\n    --hash=sha256:5b6a66c18b5b9dd261ca98dffcb826a525334b2f29e7caa54e182255c5f6a65a \\\n    --hash=sha256:5c28f4b5dbef8a0d8aad0d4de24d1e9e981728628afaf4ea0792f5d0939372f0 \\\n    --hash=sha256:5d7f5a50342475962eb18b740f3beecc685a15b52c91f7d975257e13e029eca9 \\\n    --hash=sha256:6321899477db90bdeb9299ac3627a6a53c7399c8cd58d25da094007402b039ab \\\n    --hash=sha256:6482a5851f5d72767fbd0e507e80737f9c8646ae7fd303def99bfe813f76cf7f \\\n    --hash=sha256:666534d15ba8f0fda3f53969117383d5dc021266b3c1a42c9ec4855e4b58b9d3 \\\n    --hash=sha256:683173d371daad49cffb8309779e886e59c2f369430ad28fe715f66d08d4ab1a \\\n    --hash=sha256:6e9080bb2fb195a046e5177f10d9d82b8a204c0736a97a153c2466127de87784 \\\n    --hash=sha256:73f2e31ea8dd7df61a359b731716018c2be196e5bb3b74ddba107f694fbd7604 \\\n    --hash=sha256:7437601c4d89d070eac8323f121fcf25f88674627505334654fd027b091db09d \\\n    --hash=sha256:76e4753701248476e6286f2ef492af900ea67d9706a0155335a40ea21bf3b2f5 \\\n    --hash=sha256:7707a25d6a77f5d27ea7dc7d1fc608aa0a478193823f88511ef5e6b8a48f9d03 \\\n    --hash=sha256:7948140d9f8ece1745be806f2bfdf390127cf1a763b925c4a805c603df5e697e \\\n    --hash=sha256:7a1a048f9215c90973402e26c01d1cff8a209e1f1b53f72b95c13db61b00f953 \\\n    --hash=sha256:7d57d8f702221405a9d9b40f9da8ac2e4a1a8b5285aac6100f3393675f0a85ee \\\n    --hash=sha256:7f3c8c1dacd037df16e85227bac13cca58c30da836c6f936ba1df0c05d046d8d \\\n    --hash=sha256:81d5af29e61b9c8348e876d442253723928dce6433e0e76cd925cd83f1b4b817 \\\n    --hash=sha256:828afae9f17e6de596825cf4228ff28fbdf6065974e5ac1410cecc22f699d2b3 \\\n    --hash=sha256:87f724d055eb4785d9be84e9ebf0f24e392ddfad00b3fe036e43f489fafc9039 \\\n    --hash=sha256:8969190d709e7c48ea386db202d708eb94bdb29207a1f269bab1196ce0dcca1f \\\n    --hash=sha256:90646abbc7a5d5c7c19461d2e3eeb76eb0b204919e6ece342feb6032c9325ae9 \\\n    --hash=sha256:91d6c171862df0a6c61479d9724f22efb6109111017c87567cfeb7b5d1449fdf \\\n    --hash=sha256:9272fa73ca71266702c4c3e2d4a28553ea03418e591e377a03b8e3659d94fa76 \\\n    --hash=sha256:92b5278ed9d50fe610185ecd23c55d8b307d75ca18e94c0e7de328089ac5dcba \\\n    --hash=sha256:97160e245ea33d8609cd2b8fd997c850b56db147a304a262abc2b3be021a9171 \\\n    --hash=sha256:977701c081c0241d0955c9586ffdd9ce44f7a7795df39b9151cd9a6fd0ce4cfb \\\n    --hash=sha256:9b7dc0c4338e6b8b091e8faf0db3168a37101943e687f373dce00959583f7439 \\\n    --hash=sha256:9b93d7aaa36c966fa42efcaf716e6b3900438632a626fb09c049f6a2f09fc631 \\\n    --hash=sha256:9bbcdfaf4af7ce002694a4e10a0159d5a8d20056a12b05b45cea944a4953f972 \\\n    --hash=sha256:9c2623347b933fcb9095841f1cc5d4ff0b278addd743e0e966cb3d460278840d \\\n    --hash=sha256:a2fe128eb4edeabe11896cb6af88fca5346059f6c8d807e3b910069f39157869 \\\n    --hash=sha256:a72b7a6e3cd2725eff67cd64c8f13335ee18fc3c7befc05aed043d24c7b9ccb9 \\\n    --hash=sha256:a9fe0f1c29ba24ba6ff6abf688cb0b7cf1efab6b6aa6adc55441773c252f7411 \\\n    --hash=sha256:b97f7b575ab4a8af9b7bc1d2ef7f29d3afee2226bd03ca3875c16451ad5a7723 \\\n    --hash=sha256:bdac3c7d9b705d253b2ce370fde941836a5f8b3c5c2b8fd70940a3ea3af7f4f2 \\\n    --hash=sha256:c03eff4a41bd4e38415cbed054bbaff4a075b093e2394b6915dca34a40d1e38b \\\n    --hash=sha256:c16d2fa63e0800723139137d667e1056bee1a1cf7965153d2d104b62855e9b99 \\\n    --hash=sha256:c1fac3e2ace2eb1052e9f7c7db480818371134410e1f5c55d65e8f3ac6d1407e \\\n    --hash=sha256:ce3aa154c452d2467487765e3adc730a8c153af77ad84096bc19ce19a2400840 \\\n    --hash=sha256:cee6798eaf8b1416ef6909b06f7dc04b60755206bddc599f52232606e18179d3 \\\n    --hash=sha256:d1b3eb7b05ea246510b43a7e53ed1653e55c2121019a97e60cad7efb881a97bb \\\n    --hash=sha256:d994863bba198a4a518b467bb971c56e1db3f180a25c6cf7bb1949c267f748c3 \\\n    --hash=sha256:dd47a5181ce5fcb463b5d9e17ecfdb02b678cca31280639255ce9d0e5aa67af0 \\\n    --hash=sha256:dd94994fc91a6177bfaafd7d9fd951bc8689b0a98168aa26b5f543868548d3ca \\\n    --hash=sha256:de537c11e4aa01d37db0d403b57bd6f0546e71a82347a97c6a9f0dcc532b3a45 \\\n    --hash=sha256:df6e2f325bfee1f49f81aaac97d2aa757c7646534a06f8f577ce184afe2f0a9e \\\n    --hash=sha256:e66cc454f97053b79c2ab09c17fbe3c825ea6b4de20baf1be28919460dd7877f \\\n    --hash=sha256:e79225373c317ff1e35f210dd5f1344ff31066ba8067c307ab60254cd3a78ad5 \\\n    --hash=sha256:f1577515d35ed5649d52ab4319db757bb881ce3b2b796d7283e6634d99ace307 \\\n    --hash=sha256:f1e6540b7fa044eee0bb5111ada694cf3dc15f2b0347ca125ee9ca984d5e9e6e \\\n    --hash=sha256:f2ac49a9bedb996086057b75bf93538240538c6d9b38e57c82d51f75a73409d2 \\\n    --hash=sha256:f47c9c9028f55a04ac254346e92977bf0f166c483c74b4232bee19a6697e4778 \\\n    --hash=sha256:f5f9da7f5dbc00a604fe74aa02ae7c98bcede8a3b8b9666f9f86fc13993bc71a \\\n    --hash=sha256:fd74520371c3c4175142d02a976aee0b4cb4a7cc912a60586ffd8d5929979b30 \\\n    --hash=sha256:feeb64bc9bcc6b45c6311c9e9b99406660a9c05ca8a5b30d14a78555088b0b3a\n    # via\n    #   aiohttp\n    #   aiosignal\nidna==3.10 \\\n    --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \\\n    --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3\n    # via yarl\niniconfig==2.0.0 \\\n    --hash=sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3 \\\n    --hash=sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374\n    # via pytest\nmultidict==6.1.0 \\\n    --hash=sha256:052e10d2d37810b99cc170b785945421141bf7bb7d2f8799d431e7db229c385f \\\n    --hash=sha256:06809f4f0f7ab7ea2cabf9caca7d79c22c0758b58a71f9d32943ae13c7ace056 \\\n    --hash=sha256:071120490b47aa997cca00666923a83f02c7fbb44f71cf7f136df753f7fa8761 \\\n    --hash=sha256:0c3f390dc53279cbc8ba976e5f8035eab997829066756d811616b652b00a23a3 \\\n    --hash=sha256:0e2b90b43e696f25c62656389d32236e049568b39320e2735d51f08fd362761b \\\n    --hash=sha256:0e5f362e895bc5b9e67fe6e4ded2492d8124bdf817827f33c5b46c2fe3ffaca6 \\\n    --hash=sha256:10524ebd769727ac77ef2278390fb0068d83f3acb7773792a5080f2b0abf7748 \\\n    --hash=sha256:10a9b09aba0c5b48c53761b7c720aaaf7cf236d5fe394cd399c7ba662d5f9966 \\\n    --hash=sha256:16e5f4bf4e603eb1fdd5d8180f1a25f30056f22e55ce51fb3d6ad4ab29f7d96f \\\n    --hash=sha256:188215fc0aafb8e03341995e7c4797860181562380f81ed0a87ff455b70bf1f1 \\\n    --hash=sha256:189f652a87e876098bbc67b4da1049afb5f5dfbaa310dd67c594b01c10388db6 \\\n    --hash=sha256:1ca0083e80e791cffc6efce7660ad24af66c8d4079d2a750b29001b53ff59ada \\\n    --hash=sha256:1e16bf3e5fc9f44632affb159d30a437bfe286ce9e02754759be5536b169b305 \\\n    --hash=sha256:2090f6a85cafc5b2db085124d752757c9d251548cedabe9bd31afe6363e0aff2 \\\n    --hash=sha256:20b9b5fbe0b88d0bdef2012ef7dee867f874b72528cf1d08f1d59b0e3850129d \\\n    --hash=sha256:22ae2ebf9b0c69d206c003e2f6a914ea33f0a932d4aa16f236afc049d9958f4a \\\n    --hash=sha256:22f3105d4fb15c8f57ff3959a58fcab6ce36814486500cd7485651230ad4d4ef \\\n    --hash=sha256:23bfd518810af7de1116313ebd9092cb9aa629beb12f6ed631ad53356ed6b86c \\\n    --hash=sha256:27e5fc84ccef8dfaabb09d82b7d179c7cf1a3fbc8a966f8274fcb4ab2eb4cadb \\\n    --hash=sha256:3380252550e372e8511d49481bd836264c009adb826b23fefcc5dd3c69692f60 \\\n    --hash=sha256:3702ea6872c5a2a4eeefa6ffd36b042e9773f05b1f37ae3ef7264b1163c2dcf6 \\\n    --hash=sha256:37bb93b2178e02b7b618893990941900fd25b6b9ac0fa49931a40aecdf083fe4 \\\n    --hash=sha256:3914f5aaa0f36d5d60e8ece6a308ee1c9784cd75ec8151062614657a114c4478 \\\n    --hash=sha256:3a37ffb35399029b45c6cc33640a92bef403c9fd388acce75cdc88f58bd19a81 \\\n    --hash=sha256:3c8b88a2ccf5493b6c8da9076fb151ba106960a2df90c2633f342f120751a9e7 \\\n    --hash=sha256:3e97b5e938051226dc025ec80980c285b053ffb1e25a3db2a3aa3bc046bf7f56 \\\n    --hash=sha256:3ec660d19bbc671e3a6443325f07263be452c453ac9e512f5eb935e7d4ac28b3 \\\n    --hash=sha256:3efe2c2cb5763f2f1b275ad2bf7a287d3f7ebbef35648a9726e3b69284a4f3d6 \\\n    --hash=sha256:483a6aea59cb89904e1ceabd2b47368b5600fb7de78a6e4a2c2987b2d256cf30 \\\n    --hash=sha256:4867cafcbc6585e4b678876c489b9273b13e9fff9f6d6d66add5e15d11d926cb \\\n    --hash=sha256:48e171e52d1c4d33888e529b999e5900356b9ae588c2f09a52dcefb158b27506 \\\n    --hash=sha256:4a9cb68166a34117d6646c0023c7b759bf197bee5ad4272f420a0141d7eb03a0 \\\n    --hash=sha256:4b820514bfc0b98a30e3d85462084779900347e4d49267f747ff54060cc33925 \\\n    --hash=sha256:4e18b656c5e844539d506a0a06432274d7bd52a7487e6828c63a63d69185626c \\\n    --hash=sha256:4e9f48f58c2c523d5a06faea47866cd35b32655c46b443f163d08c6d0ddb17d6 \\\n    --hash=sha256:50b3a2710631848991d0bf7de077502e8994c804bb805aeb2925a981de58ec2e \\\n    --hash=sha256:55b6d90641869892caa9ca42ff913f7ff1c5ece06474fbd32fb2cf6834726c95 \\\n    --hash=sha256:57feec87371dbb3520da6192213c7d6fc892d5589a93db548331954de8248fd2 \\\n    --hash=sha256:58130ecf8f7b8112cdb841486404f1282b9c86ccb30d3519faf301b2e5659133 \\\n    --hash=sha256:5845c1fd4866bb5dd3125d89b90e57ed3138241540897de748cdf19de8a2fca2 \\\n    --hash=sha256:59bfeae4b25ec05b34f1956eaa1cb38032282cd4dfabc5056d0a1ec4d696d3aa \\\n    --hash=sha256:5b48204e8d955c47c55b72779802b219a39acc3ee3d0116d5080c388970b76e3 \\\n    --hash=sha256:5c09fcfdccdd0b57867577b719c69e347a436b86cd83747f179dbf0cc0d4c1f3 \\\n    --hash=sha256:6180c0ae073bddeb5a97a38c03f30c233e0a4d39cd86166251617d1bbd0af436 \\\n    --hash=sha256:682b987361e5fd7a139ed565e30d81fd81e9629acc7d925a205366877d8c8657 \\\n    --hash=sha256:6b5d83030255983181005e6cfbac1617ce9746b219bc2aad52201ad121226581 \\\n    --hash=sha256:6bb5992037f7a9eff7991ebe4273ea7f51f1c1c511e6a2ce511d0e7bdb754492 \\\n    --hash=sha256:73eae06aa53af2ea5270cc066dcaf02cc60d2994bbb2c4ef5764949257d10f43 \\\n    --hash=sha256:76f364861c3bfc98cbbcbd402d83454ed9e01a5224bb3a28bf70002a230f73e2 \\\n    --hash=sha256:820c661588bd01a0aa62a1283f20d2be4281b086f80dad9e955e690c75fb54a2 \\\n    --hash=sha256:82176036e65644a6cc5bd619f65f6f19781e8ec2e5330f51aa9ada7504cc1926 \\\n    --hash=sha256:87701f25a2352e5bf7454caa64757642734da9f6b11384c1f9d1a8e699758057 \\\n    --hash=sha256:9079dfc6a70abe341f521f78405b8949f96db48da98aeb43f9907f342f627cdc \\\n    --hash=sha256:90f8717cb649eea3504091e640a1b8568faad18bd4b9fcd692853a04475a4b80 \\\n    --hash=sha256:957cf8e4b6e123a9eea554fa7ebc85674674b713551de587eb318a2df3e00255 \\\n    --hash=sha256:99f826cbf970077383d7de805c0681799491cb939c25450b9b5b3ced03ca99f1 \\\n    --hash=sha256:9f636b730f7e8cb19feb87094949ba54ee5357440b9658b2a32a5ce4bce53972 \\\n    --hash=sha256:a114d03b938376557927ab23f1e950827c3b893ccb94b62fd95d430fd0e5cf53 \\\n    --hash=sha256:a185f876e69897a6f3325c3f19f26a297fa058c5e456bfcff8015e9a27e83ae1 \\\n    --hash=sha256:a7a9541cd308eed5e30318430a9c74d2132e9a8cb46b901326272d780bf2d423 \\\n    --hash=sha256:aa466da5b15ccea564bdab9c89175c762bc12825f4659c11227f515cee76fa4a \\\n    --hash=sha256:aaed8b0562be4a0876ee3b6946f6869b7bcdb571a5d1496683505944e268b160 \\\n    --hash=sha256:ab7c4ceb38d91570a650dba194e1ca87c2b543488fe9309b4212694174fd539c \\\n    --hash=sha256:ac10f4c2b9e770c4e393876e35a7046879d195cd123b4f116d299d442b335bcd \\\n    --hash=sha256:b04772ed465fa3cc947db808fa306d79b43e896beb677a56fb2347ca1a49c1fa \\\n    --hash=sha256:b1c416351ee6271b2f49b56ad7f308072f6f44b37118d69c2cad94f3fa8a40d5 \\\n    --hash=sha256:b225d95519a5bf73860323e633a664b0d85ad3d5bede6d30d95b35d4dfe8805b \\\n    --hash=sha256:b2f59caeaf7632cc633b5cf6fc449372b83bbdf0da4ae04d5be36118e46cc0aa \\\n    --hash=sha256:b58c621844d55e71c1b7f7c498ce5aa6985d743a1a59034c57a905b3f153c1ef \\\n    --hash=sha256:bf6bea52ec97e95560af5ae576bdac3aa3aae0b6758c6efa115236d9e07dae44 \\\n    --hash=sha256:c08be4f460903e5a9d0f76818db3250f12e9c344e79314d1d570fc69d7f4eae4 \\\n    --hash=sha256:c7053d3b0353a8b9de430a4f4b4268ac9a4fb3481af37dfe49825bf45ca24156 \\\n    --hash=sha256:c943a53e9186688b45b323602298ab727d8865d8c9ee0b17f8d62d14b56f0753 \\\n    --hash=sha256:ce2186a7df133a9c895dea3331ddc5ddad42cdd0d1ea2f0a51e5d161e4762f28 \\\n    --hash=sha256:d093be959277cb7dee84b801eb1af388b6ad3ca6a6b6bf1ed7585895789d027d \\\n    --hash=sha256:d094ddec350a2fb899fec68d8353c78233debde9b7d8b4beeafa70825f1c281a \\\n    --hash=sha256:d1a9dd711d0877a1ece3d2e4fea11a8e75741ca21954c919406b44e7cf971304 \\\n    --hash=sha256:d569388c381b24671589335a3be6e1d45546c2988c2ebe30fdcada8457a31008 \\\n    --hash=sha256:d618649d4e70ac6efcbba75be98b26ef5078faad23592f9b51ca492953012429 \\\n    --hash=sha256:d83a047959d38a7ff552ff94be767b7fd79b831ad1cd9920662db05fec24fe72 \\\n    --hash=sha256:d8fff389528cad1618fb4b26b95550327495462cd745d879a8c7c2115248e399 \\\n    --hash=sha256:da1758c76f50c39a2efd5e9859ce7d776317eb1dd34317c8152ac9251fc574a3 \\\n    --hash=sha256:db7457bac39421addd0c8449933ac32d8042aae84a14911a757ae6ca3eef1392 \\\n    --hash=sha256:e27bbb6d14416713a8bd7aaa1313c0fc8d44ee48d74497a0ff4c3a1b6ccb5167 \\\n    --hash=sha256:e617fb6b0b6953fffd762669610c1c4ffd05632c138d61ac7e14ad187870669c \\\n    --hash=sha256:e9aa71e15d9d9beaad2c6b9319edcdc0a49a43ef5c0a4c8265ca9ee7d6c67774 \\\n    --hash=sha256:ec2abea24d98246b94913b76a125e855eb5c434f7c46546046372fe60f666351 \\\n    --hash=sha256:f179dee3b863ab1c59580ff60f9d99f632f34ccb38bf67a33ec6b3ecadd0fd76 \\\n    --hash=sha256:f4c035da3f544b1882bac24115f3e2e8760f10a0107614fc9839fd232200b875 \\\n    --hash=sha256:f67f217af4b1ff66c68a87318012de788dd95fcfeb24cc889011f4e1c7454dfd \\\n    --hash=sha256:f90c822a402cb865e396a504f9fc8173ef34212a342d92e362ca498cad308e28 \\\n    --hash=sha256:ff3827aef427c89a25cc96ded1759271a93603aba9fb977a6d264648ebf989db\n    # via\n    #   aiohttp\n    #   yarl\npackaging==23.2 \\\n    --hash=sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5 \\\n    --hash=sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7\n    # via pytest\npluggy==1.6.0 \\\n    --hash=sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3 \\\n    --hash=sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746\n    # via\n    #   pytest\n    #   pytest-cov\npropcache==0.2.1 \\\n    --hash=sha256:03ff9d3f665769b2a85e6157ac8b439644f2d7fd17615a82fa55739bc97863f4 \\\n    --hash=sha256:049324ee97bb67285b49632132db351b41e77833678432be52bdd0289c0e05e4 \\\n    --hash=sha256:081a430aa8d5e8876c6909b67bd2d937bfd531b0382d3fdedb82612c618bc41a \\\n    --hash=sha256:0f022d381747f0dfe27e99d928e31bc51a18b65bb9e481ae0af1380a6725dd1f \\\n    --hash=sha256:12d1083f001ace206fe34b6bdc2cb94be66d57a850866f0b908972f90996b3e9 \\\n    --hash=sha256:14d86fe14b7e04fa306e0c43cdbeebe6b2c2156a0c9ce56b815faacc193e320d \\\n    --hash=sha256:160291c60081f23ee43d44b08a7e5fb76681221a8e10b3139618c5a9a291b84e \\\n    --hash=sha256:1672137af7c46662a1c2be1e8dc78cb6d224319aaa40271c9257d886be4363a6 \\\n    --hash=sha256:19a0f89a7bb9d8048d9c4370c9c543c396e894c76be5525f5e1ad287f1750ddf \\\n    --hash=sha256:1ac2f5fe02fa75f56e1ad473f1175e11f475606ec9bd0be2e78e4734ad575034 \\\n    --hash=sha256:1cd9a1d071158de1cc1c71a26014dcdfa7dd3d5f4f88c298c7f90ad6f27bb46d \\\n    --hash=sha256:1ffc3cca89bb438fb9c95c13fc874012f7b9466b89328c3c8b1aa93cdcfadd16 \\\n    --hash=sha256:297878dc9d0a334358f9b608b56d02e72899f3b8499fc6044133f0d319e2ec30 \\\n    --hash=sha256:2d3af2e79991102678f53e0dbf4c35de99b6b8b58f29a27ca0325816364caaba \\\n    --hash=sha256:30b43e74f1359353341a7adb783c8f1b1c676367b011709f466f42fda2045e95 \\\n    --hash=sha256:3156628250f46a0895f1f36e1d4fbe062a1af8718ec3ebeb746f1d23f0c5dc4d \\\n    --hash=sha256:31f5af773530fd3c658b32b6bdc2d0838543de70eb9a2156c03e410f7b0d3aae \\\n    --hash=sha256:3935bfa5fede35fb202c4b569bb9c042f337ca4ff7bd540a0aa5e37131659348 \\\n    --hash=sha256:39d51fbe4285d5db5d92a929e3e21536ea3dd43732c5b177c7ef03f918dff9f2 \\\n    --hash=sha256:3f77ce728b19cb537714499928fe800c3dda29e8d9428778fc7c186da4c09a64 \\\n    --hash=sha256:4160d9283bd382fa6c0c2b5e017acc95bc183570cd70968b9202ad6d8fc48dce \\\n    --hash=sha256:4a571d97dbe66ef38e472703067021b1467025ec85707d57e78711c085984e54 \\\n    --hash=sha256:4e6281aedfca15301c41f74d7005e6e3f4ca143584ba696ac69df4f02f40d629 \\\n    --hash=sha256:52277518d6aae65536e9cea52d4e7fd2f7a66f4aa2d30ed3f2fcea620ace3c54 \\\n    --hash=sha256:556fc6c10989f19a179e4321e5d678db8eb2924131e64652a51fe83e4c3db0e1 \\\n    --hash=sha256:574faa3b79e8ebac7cb1d7930f51184ba1ccf69adfdec53a12f319a06030a68b \\\n    --hash=sha256:58791550b27d5488b1bb52bc96328456095d96206a250d28d874fafe11b3dfaf \\\n    --hash=sha256:5b750a8e5a1262434fb1517ddf64b5de58327f1adc3524a5e44c2ca43305eb0b \\\n    --hash=sha256:5d97151bc92d2b2578ff7ce779cdb9174337390a535953cbb9452fb65164c587 \\\n    --hash=sha256:5eee736daafa7af6d0a2dc15cc75e05c64f37fc37bafef2e00d77c14171c2097 \\\n    --hash=sha256:6445804cf4ec763dc70de65a3b0d9954e868609e83850a47ca4f0cb64bd79fea \\\n    --hash=sha256:647894f5ae99c4cf6bb82a1bb3a796f6e06af3caa3d32e26d2350d0e3e3faf24 \\\n    --hash=sha256:66d4cfda1d8ed687daa4bc0274fcfd5267873db9a5bc0418c2da19273040eeb7 \\\n    --hash=sha256:6a9a8c34fb7bb609419a211e59da8887eeca40d300b5ea8e56af98f6fbbb1541 \\\n    --hash=sha256:6b3f39a85d671436ee3d12c017f8fdea38509e4f25b28eb25877293c98c243f6 \\\n    --hash=sha256:6b6fb63ae352e13748289f04f37868099e69dba4c2b3e271c46061e82c745634 \\\n    --hash=sha256:70693319e0b8fd35dd863e3e29513875eb15c51945bf32519ef52927ca883bc3 \\\n    --hash=sha256:781e65134efaf88feb447e8c97a51772aa75e48b794352f94cb7ea717dedda0d \\\n    --hash=sha256:819ce3b883b7576ca28da3861c7e1a88afd08cc8c96908e08a3f4dd64a228034 \\\n    --hash=sha256:857112b22acd417c40fa4595db2fe28ab900c8c5fe4670c7989b1c0230955465 \\\n    --hash=sha256:887d9b0a65404929641a9fabb6452b07fe4572b269d901d622d8a34a4e9043b2 \\\n    --hash=sha256:8b3489ff1ed1e8315674d0775dc7d2195fb13ca17b3808721b54dbe9fd020faf \\\n    --hash=sha256:92fc4500fcb33899b05ba73276dfb684a20d31caa567b7cb5252d48f896a91b1 \\\n    --hash=sha256:9403db39be1393618dd80c746cb22ccda168efce239c73af13c3763ef56ffc04 \\\n    --hash=sha256:98110aa363f1bb4c073e8dcfaefd3a5cea0f0834c2aab23dda657e4dab2f53b5 \\\n    --hash=sha256:999779addc413181912e984b942fbcc951be1f5b3663cd80b2687758f434c583 \\\n    --hash=sha256:9caac6b54914bdf41bcc91e7eb9147d331d29235a7c967c150ef5df6464fd1bb \\\n    --hash=sha256:a7a078f5d37bee6690959c813977da5291b24286e7b962e62a94cec31aa5188b \\\n    --hash=sha256:a7e65eb5c003a303b94aa2c3852ef130230ec79e349632d030e9571b87c4698c \\\n    --hash=sha256:a96dc1fa45bd8c407a0af03b2d5218392729e1822b0c32e62c5bf7eeb5fb3958 \\\n    --hash=sha256:aca405706e0b0a44cc6bfd41fbe89919a6a56999157f6de7e182a990c36e37bc \\\n    --hash=sha256:accb6150ce61c9c4b7738d45550806aa2b71c7668c6942f17b0ac182b6142fd4 \\\n    --hash=sha256:ad1af54a62ffe39cf34db1aa6ed1a1873bd548f6401db39d8e7cd060b9211f82 \\\n    --hash=sha256:ae1aa1cd222c6d205853b3013c69cd04515f9d6ab6de4b0603e2e1c33221303e \\\n    --hash=sha256:b2d0a12018b04f4cb820781ec0dffb5f7c7c1d2a5cd22bff7fb055a2cb19ebce \\\n    --hash=sha256:b480c6a4e1138e1aa137c0079b9b6305ec6dcc1098a8ca5196283e8a49df95a9 \\\n    --hash=sha256:b74c261802d3d2b85c9df2dfb2fa81b6f90deeef63c2db9f0e029a3cac50b518 \\\n    --hash=sha256:ba278acf14471d36316159c94a802933d10b6a1e117b8554fe0d0d9b75c9d536 \\\n    --hash=sha256:bb6178c241278d5fe853b3de743087be7f5f4c6f7d6d22a3b524d323eecec505 \\\n    --hash=sha256:bf72af5e0fb40e9babf594308911436c8efde3cb5e75b6f206c34ad18be5c052 \\\n    --hash=sha256:bfd3223c15bebe26518d58ccf9a39b93948d3dcb3e57a20480dfdd315356baff \\\n    --hash=sha256:c214999039d4f2a5b2073ac506bba279945233da8c786e490d411dfc30f855c1 \\\n    --hash=sha256:c2f992c07c0fca81655066705beae35fc95a2fa7366467366db627d9f2ee097f \\\n    --hash=sha256:cba4cfa1052819d16699e1d55d18c92b6e094d4517c41dd231a8b9f87b6fa681 \\\n    --hash=sha256:cea7daf9fc7ae6687cf1e2c049752f19f146fdc37c2cc376e7d0032cf4f25347 \\\n    --hash=sha256:cf6c4150f8c0e32d241436526f3c3f9cbd34429492abddbada2ffcff506c51af \\\n    --hash=sha256:d09c333d36c1409d56a9d29b3a1b800a42c76a57a5a8907eacdbce3f18768246 \\\n    --hash=sha256:d27b84d5880f6d8aa9ae3edb253c59d9f6642ffbb2c889b78b60361eed449787 \\\n    --hash=sha256:d2ccec9ac47cf4e04897619c0e0c1a48c54a71bdf045117d3a26f80d38ab1fb0 \\\n    --hash=sha256:d71264a80f3fcf512eb4f18f59423fe82d6e346ee97b90625f283df56aee103f \\\n    --hash=sha256:d93f3307ad32a27bda2e88ec81134b823c240aa3abb55821a8da553eed8d9439 \\\n    --hash=sha256:d9631c5e8b5b3a0fda99cb0d29c18133bca1e18aea9effe55adb3da1adef80d3 \\\n    --hash=sha256:ddfab44e4489bd79bda09d84c430677fc7f0a4939a73d2bba3073036f487a0a6 \\\n    --hash=sha256:e7048abd75fe40712005bcfc06bb44b9dfcd8e101dda2ecf2f5aa46115ad07ca \\\n    --hash=sha256:e73091191e4280403bde6c9a52a6999d69cdfde498f1fdf629105247599b57ec \\\n    --hash=sha256:e800776a79a5aabdb17dcc2346a7d66d0777e942e4cd251defeb084762ecd17d \\\n    --hash=sha256:edc9fc7051e3350643ad929df55c451899bb9ae6d24998a949d2e4c87fb596d3 \\\n    --hash=sha256:f089118d584e859c62b3da0892b88a83d611c2033ac410e929cb6754eec0ed16 \\\n    --hash=sha256:f174bbd484294ed9fdf09437f889f95807e5f229d5d93588d34e92106fbf6717 \\\n    --hash=sha256:f508b0491767bb1f2b87fdfacaba5f7eddc2f867740ec69ece6d1946d29029a6 \\\n    --hash=sha256:f7a31fc1e1bd362874863fdeed71aed92d348f5336fd84f2197ba40c59f061bd \\\n    --hash=sha256:f9479aa06a793c5aeba49ce5c5692ffb51fcd9a7016e017d555d5e2b0045d212\n    # via\n    #   aiohttp\n    #   yarl\npyfakefs==6.2.0 \\\n    --hash=sha256:0968a49db692694ffed420e54a9f1cbae4636637b880e8ab09c8ccc0f11bd7ae \\\n    --hash=sha256:e59a36db447bf509ce9c97ab3d1510c08cc51895c5311325a560a5e5b5dc1940\n    # via -r dev-requirements.in\npygments==2.20.0 \\\n    --hash=sha256:6757cd03768053ff99f3039c1a36d6c0aa0b263438fcab17520b30a303a82b5f \\\n    --hash=sha256:81a9e26dd42fd28a23a2d169d86d7ac03b46e2f8b59ed4698fb4785f946d0176\n    # via pytest\npytest==9.0.3 \\\n    --hash=sha256:2c5efc453d45394fdd706ade797c0a81091eccd1d6e4bccfcd476e2b8e0ab5d9 \\\n    --hash=sha256:b86ada508af81d19edeb213c681b1d48246c1a91d304c6c81a427674c17eb91c\n    # via\n    #   -r dev-requirements.in\n    #   pytest-aiohttp\n    #   pytest-asyncio\n    #   pytest-cov\n    #   pytest-mock\npytest-aiohttp==1.1.0 \\\n    --hash=sha256:147de8cb164f3fc9d7196967f109ab3c0b93ea3463ab50631e56438eab7b5adc \\\n    --hash=sha256:f39a11693a0dce08dd6c542d241e199dd8047a6e6596b2bcfa60d373f143456d\n    # via -r dev-requirements.in\npytest-asyncio==1.3.0 \\\n    --hash=sha256:611e26147c7f77640e6d0a92a38ed17c3e9848063698d5c93d5aa7aa11cebff5 \\\n    --hash=sha256:d7f52f36d231b80ee124cd216ffb19369aa168fc10095013c6b014a34d3ee9e5\n    # via pytest-aiohttp\npytest-cov==7.1.0 \\\n    --hash=sha256:30674f2b5f6351aa09702a9c8c364f6a01c27aae0c1366ae8016160d1efc56b2 \\\n    --hash=sha256:a0461110b7865f9a271aa1b51e516c9a95de9d696734a2f71e3e78f46e1d4678\n    # via -r dev-requirements.in\npytest-mock==3.15.1 \\\n    --hash=sha256:0a25e2eb88fe5168d535041d09a4529a188176ae608a6d249ee65abc0949630d \\\n    --hash=sha256:1849a238f6f396da19762269de72cb1814ab44416fa73a8686deac10b0d87a0f\n    # via -r dev-requirements.in\ntyping-extensions==4.15.0 \\\n    --hash=sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466 \\\n    --hash=sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548\n    # via\n    #   aiosignal\n    #   pytest-asyncio\nyarl==1.18.3 \\\n    --hash=sha256:00e5a1fea0fd4f5bfa7440a47eff01d9822a65b4488f7cff83155a0f31a2ecba \\\n    --hash=sha256:02ddb6756f8f4517a2d5e99d8b2f272488e18dd0bfbc802f31c16c6c20f22193 \\\n    --hash=sha256:045b8482ce9483ada4f3f23b3774f4e1bf4f23a2d5c912ed5170f68efb053318 \\\n    --hash=sha256:09c7907c8548bcd6ab860e5f513e727c53b4a714f459b084f6580b49fa1b9cee \\\n    --hash=sha256:0b0cad37311123211dc91eadcb322ef4d4a66008d3e1bdc404808992260e1a0e \\\n    --hash=sha256:0b3c92fa08759dbf12b3a59579a4096ba9af8dd344d9a813fc7f5070d86bbab1 \\\n    --hash=sha256:0fb2171a4486bb075316ee754c6d8382ea6eb8b399d4ec62fde2b591f879778a \\\n    --hash=sha256:1a74a13a4c857a84a845505fd2d68e54826a2cd01935a96efb1e9d86c728e186 \\\n    --hash=sha256:1d407181cfa6e70077df3377938c08012d18893f9f20e92f7d2f314a437c30b1 \\\n    --hash=sha256:1dd4bdd05407ced96fed3d7f25dbbf88d2ffb045a0db60dbc247f5b3c5c25d50 \\\n    --hash=sha256:25b411eddcfd56a2f0cd6a384e9f4f7aa3efee14b188de13048c25b5e91f1640 \\\n    --hash=sha256:2d06d3005e668744e11ed80812e61efd77d70bb7f03e33c1598c301eea20efbb \\\n    --hash=sha256:2ec9bbba33b2d00999af4631a3397d1fd78290c48e2a3e52d8dd72db3a067ac8 \\\n    --hash=sha256:3236da9272872443f81fedc389bace88408f64f89f75d1bdb2256069a8730ccc \\\n    --hash=sha256:35098b24e0327fc4ebdc8ffe336cee0a87a700c24ffed13161af80124b7dc8e5 \\\n    --hash=sha256:41f7ce59d6ee7741af71d82020346af364949314ed3d87553763a2df1829cc58 \\\n    --hash=sha256:436c4fc0a4d66b2badc6c5fc5ef4e47bb10e4fd9bf0c79524ac719a01f3607c2 \\\n    --hash=sha256:4891ed92157e5430874dad17b15eb1fda57627710756c27422200c52d8a4e393 \\\n    --hash=sha256:4ac515b860c36becb81bb84b667466885096b5fc85596948548b667da3bf9f24 \\\n    --hash=sha256:5094d9206c64181d0f6e76ebd8fb2f8fe274950a63890ee9e0ebfd58bf9d787b \\\n    --hash=sha256:54d6921f07555713b9300bee9c50fb46e57e2e639027089b1d795ecd9f7fa910 \\\n    --hash=sha256:578e281c393af575879990861823ef19d66e2b1d0098414855dd367e234f5b3c \\\n    --hash=sha256:5a3f356548e34a70b0172d8890006c37be92995f62d95a07b4a42e90fba54272 \\\n    --hash=sha256:602d98f2c2d929f8e697ed274fbadc09902c4025c5a9963bf4e9edfc3ab6f7ed \\\n    --hash=sha256:61b1a825a13bef4a5f10b1885245377d3cd0bf87cba068e1d9a88c2ae36880e1 \\\n    --hash=sha256:61e5e68cb65ac8f547f6b5ef933f510134a6bf31bb178be428994b0cb46c2a04 \\\n    --hash=sha256:61ee62ead9b68b9123ec24bc866cbef297dd266175d53296e2db5e7f797f902d \\\n    --hash=sha256:6333c5a377c8e2f5fae35e7b8f145c617b02c939d04110c76f29ee3676b5f9a5 \\\n    --hash=sha256:6748dbf9bfa5ba1afcc7556b71cda0d7ce5f24768043a02a58846e4a443d808d \\\n    --hash=sha256:67a283dd2882ac98cc6318384f565bffc751ab564605959df4752d42483ad889 \\\n    --hash=sha256:75674776d96d7b851b6498f17824ba17849d790a44d282929c42dbb77d4f17ae \\\n    --hash=sha256:757e81cae69244257d125ff31663249b3013b5dc0a8520d73694aed497fb195b \\\n    --hash=sha256:77a6e85b90a7641d2e07184df5557132a337f136250caafc9ccaa4a2a998ca2c \\\n    --hash=sha256:7c33dd1931a95e5d9a772d0ac5e44cac8957eaf58e3c8da8c1414de7dd27c576 \\\n    --hash=sha256:7df647e8edd71f000a5208fe6ff8c382a1de8edfbccdbbfe649d263de07d8c34 \\\n    --hash=sha256:7e2ee16578af3b52ac2f334c3b1f92262f47e02cc6193c598502bd46f5cd1477 \\\n    --hash=sha256:80316a8bd5109320d38eef8833ccf5f89608c9107d02d2a7f985f98ed6876990 \\\n    --hash=sha256:82123d0c954dc58db301f5021a01854a85bf1f3bb7d12ae0c01afc414a882ca2 \\\n    --hash=sha256:84b2deecba4a3f1a398df819151eb72d29bfeb3b69abb145a00ddc8d30094512 \\\n    --hash=sha256:8503ad47387b8ebd39cbbbdf0bf113e17330ffd339ba1144074da24c545f0069 \\\n    --hash=sha256:877d209b6aebeb5b16c42cbb377f5f94d9e556626b1bfff66d7b0d115be88d0a \\\n    --hash=sha256:8874027a53e3aea659a6d62751800cf6e63314c160fd607489ba5c2edd753cf6 \\\n    --hash=sha256:88a19f62ff30117e706ebc9090b8ecc79aeb77d0b1f5ec10d2d27a12bc9f66d0 \\\n    --hash=sha256:8d39d351e7faf01483cc7ff7c0213c412e38e5a340238826be7e0e4da450fdc8 \\\n    --hash=sha256:90adb47ad432332d4f0bc28f83a5963f426ce9a1a8809f5e584e704b82685dcb \\\n    --hash=sha256:913829534200eb0f789d45349e55203a091f45c37a2674678744ae52fae23efa \\\n    --hash=sha256:93b2e109287f93db79210f86deb6b9bbb81ac32fc97236b16f7433db7fc437d8 \\\n    --hash=sha256:9d41beda9dc97ca9ab0b9888cb71f7539124bc05df02c0cff6e5acc5a19dcc6e \\\n    --hash=sha256:a440a2a624683108a1b454705ecd7afc1c3438a08e890a1513d468671d90a04e \\\n    --hash=sha256:a4bb030cf46a434ec0225bddbebd4b89e6471814ca851abb8696170adb163985 \\\n    --hash=sha256:a9ca04806f3be0ac6d558fffc2fdf8fcef767e0489d2684a21912cc4ed0cd1b8 \\\n    --hash=sha256:ac1801c45cbf77b6c99242eeff4fffb5e4e73a800b5c4ad4fc0be5def634d2e1 \\\n    --hash=sha256:ac36703a585e0929b032fbaab0707b75dc12703766d0b53486eabd5139ebadd5 \\\n    --hash=sha256:b1771de9944d875f1b98a745bc547e684b863abf8f8287da8466cf470ef52690 \\\n    --hash=sha256:b464c4ab4bfcb41e3bfd3f1c26600d038376c2de3297760dfe064d2cb7ea8e10 \\\n    --hash=sha256:b4f6450109834af88cb4cc5ecddfc5380ebb9c228695afc11915a0bf82116789 \\\n    --hash=sha256:b57f4f58099328dfb26c6a771d09fb20dbbae81d20cfb66141251ea063bd101b \\\n    --hash=sha256:b643562c12680b01e17239be267bc306bbc6aac1f34f6444d1bded0c5ce438ca \\\n    --hash=sha256:b958ddd075ddba5b09bb0be8a6d9906d2ce933aee81100db289badbeb966f54e \\\n    --hash=sha256:b9d60031cf568c627d028239693fd718025719c02c9f55df0a53e587aab951b5 \\\n    --hash=sha256:ba23302c0c61a9999784e73809427c9dbedd79f66a13d84ad1b1943802eaaf59 \\\n    --hash=sha256:ba87babd629f8af77f557b61e49e7c7cac36f22f871156b91e10a6e9d4f829e9 \\\n    --hash=sha256:c017a3b6df3a1bd45b9fa49a0f54005e53fbcad16633870104b66fa1a30a29d8 \\\n    --hash=sha256:c1e1cc06da1491e6734f0ea1e6294ce00792193c463350626571c287c9a704db \\\n    --hash=sha256:c654d5207c78e0bd6d749f6dae1dcbbfde3403ad3a4b11f3c5544d9906969dde \\\n    --hash=sha256:c69697d3adff5aa4f874b19c0e4ed65180ceed6318ec856ebc423aa5850d84f7 \\\n    --hash=sha256:c7d79f7d9aabd6011004e33b22bc13056a3e3fb54794d138af57f5ee9d9032cb \\\n    --hash=sha256:ccaa3a4b521b780a7e771cc336a2dba389a0861592bbce09a476190bb0c8b4b3 \\\n    --hash=sha256:ccd17349166b1bee6e529b4add61727d3f55edb7babbe4069b5764c9587a8cc6 \\\n    --hash=sha256:ce1af883b94304f493698b00d0f006d56aea98aeb49d75ec7d98cd4a777e9285 \\\n    --hash=sha256:d0e883008013c0e4aef84dcfe2a0b172c4d23c2669412cf5b3371003941f72bb \\\n    --hash=sha256:d980e0325b6eddc81331d3f4551e2a333999fb176fd153e075c6d1c2530aa8a8 \\\n    --hash=sha256:e17c9361d46a4d5addf777c6dd5eab0715a7684c2f11b88c67ac37edfba6c482 \\\n    --hash=sha256:e2c08cc9b16f4f4bc522771d96734c7901e7ebef70c6c5c35dd0f10845270bcd \\\n    --hash=sha256:e35ef8683211db69ffe129a25d5634319a677570ab6b2eba4afa860f54eeaf75 \\\n    --hash=sha256:e3b9fd71836999aad54084906f8663dffcd2a7fb5cdafd6c37713b2e72be1760 \\\n    --hash=sha256:ef9f7768395923c3039055c14334ba4d926f3baf7b776c923c93d80195624782 \\\n    --hash=sha256:f52a265001d830bc425f82ca9eabda94a64a4d753b07d623a9f2863fde532b53 \\\n    --hash=sha256:f91c4803173928a25e1a55b943c81f55b8872f0018be83e3ad4938adffb77dd2 \\\n    --hash=sha256:fbd6748e8ab9b41171bb95c6142faf068f5ef1511935a0aa07025438dd9a9bc1 \\\n    --hash=sha256:fe57328fbc1bfd0bd0514470ac692630f3901c0ee39052ae47acd1d90a436719 \\\n    --hash=sha256:fea09ca13323376a2fdfb353a5fa2e59f90cd18d7ca4eaa1fd31f0a8b4f91e62\n    # via aiohttp\n"
  },
  {
    "path": "mypy-requirements.txt",
    "content": "aiohttp==3.13.5\nalive_progress>=3.3.0\nmypy==1.20.2\npyfakefs\npytest\npytest-mock\npython-gnupg  # untyped :(\nsigstore==3.6.7\ntypes-paramiko\ntypes-requests\n"
  },
  {
    "path": "pyproject.toml",
    "content": "[tool.pytest.ini_options]\nasyncio_mode = \"auto\"\nasyncio_default_fixture_loop_scope = \"function\"\n\n[tool.mypy]\npython_version = \"3.12\"\npretty = true\nstrict = true\n\n# Extra checks that aren't included in --strict\nenable_error_code = \"ignore-without-code,redundant-expr,truthy-iterable\"\nextra_checks = true\nwarn_unreachable = true\n\nexclude = [\n  \"^tests/test_release_tag.py$\",\n  \"^tests/test_run_release.py$\",\n  \"^tests/test_sbom.py$\",\n  \"^windows-release/merge-and-upload.py$\",\n  \"^windows-release/purge.py$\",\n]\n"
  },
  {
    "path": "release.py",
    "content": "#!/usr/bin/env python3\n\n\"\"\"An assistant for making Python releases.\n\nOriginal code by Benjamin Peterson\nAdditions by Barry Warsaw, Georg Brandl and Benjamin Peterson\n\"\"\"\n\nfrom __future__ import annotations\n\nimport datetime\nimport glob\nimport hashlib\nimport json\nimport optparse\nimport os\nimport re\nimport readline  # noqa: F401\nimport shutil\nimport subprocess\nimport sys\nimport tempfile\nimport urllib.request\nfrom collections.abc import Callable, Generator, Sequence\nfrom contextlib import contextmanager\nfrom dataclasses import dataclass\nfrom functools import cache\nfrom pathlib import Path\nfrom typing import (\n    Any,\n    Literal,\n    Protocol,\n    Self,\n    overload,\n)\nfrom urllib.request import urlopen\n\nCOMMASPACE = \", \"\nSPACE = \" \"\ntag_cre = re.compile(r\"(\\d+)(?:\\.(\\d+)(?:\\.(\\d+))?)?(?:([ab]|rc)(\\d+))?$\")\n\n\nclass ReleaseShelf(Protocol):\n    def close(self) -> None: ...\n\n    @overload\n    def get(self, key: Literal[\"finished\"], default: bool | None = None) -> bool: ...\n\n    @overload\n    def get(\n        self, key: Literal[\"completed_tasks\"], default: list[Task] | None = None\n    ) -> list[Task]: ...\n\n    @overload\n    def get(self, key: Literal[\"gpg_key\"], default: str | None = None) -> str: ...\n\n    @overload\n    def get(self, key: Literal[\"git_repo\"], default: Path | None = None) -> Path: ...\n\n    @overload\n    def get(self, key: Literal[\"auth_info\"], default: str | None = None) -> str: ...\n\n    @overload\n    def get(self, key: Literal[\"ssh_user\"], default: str | None = None) -> str: ...\n\n    @overload\n    def get(\n        self, key: Literal[\"ssh_key\"], default: str | None = None\n    ) -> str | None: ...\n\n    @overload\n    def get(self, key: Literal[\"sign_gpg\"], default: bool | None = None) -> bool: ...\n\n    @overload\n    def get(\n        self, key: Literal[\"security_release\"], default: bool | None = None\n    ) -> bool: ...\n\n    @overload\n    def get(self, key: Literal[\"release\"], default: Tag | None = None) -> Tag: ...\n\n    @overload\n    def __getitem__(self, key: Literal[\"finished\"]) -> bool: ...\n\n    @overload\n    def __getitem__(self, key: Literal[\"completed_tasks\"]) -> list[Task]: ...\n\n    @overload\n    def __getitem__(self, key: Literal[\"gpg_key\"]) -> str: ...\n\n    @overload\n    def __getitem__(self, key: Literal[\"git_repo\"]) -> Path: ...\n\n    @overload\n    def __getitem__(self, key: Literal[\"auth_info\"]) -> str: ...\n\n    @overload\n    def __getitem__(self, key: Literal[\"ssh_user\"]) -> str: ...\n\n    @overload\n    def __getitem__(self, key: Literal[\"ssh_key\"]) -> str | None: ...\n\n    @overload\n    def __getitem__(self, key: Literal[\"sign_gpg\"]) -> bool: ...\n\n    @overload\n    def __getitem__(self, key: Literal[\"security_release\"]) -> bool: ...\n\n    @overload\n    def __getitem__(self, key: Literal[\"release\"]) -> Tag: ...\n\n    @overload\n    def __setitem__(self, key: Literal[\"finished\"], value: bool) -> None: ...\n\n    @overload\n    def __setitem__(\n        self, key: Literal[\"completed_tasks\"], value: list[Task]\n    ) -> None: ...\n\n    @overload\n    def __setitem__(self, key: Literal[\"gpg_key\"], value: str) -> None: ...\n\n    @overload\n    def __setitem__(self, key: Literal[\"git_repo\"], value: Path) -> None: ...\n\n    @overload\n    def __setitem__(self, key: Literal[\"auth_info\"], value: str) -> None: ...\n\n    @overload\n    def __setitem__(self, key: Literal[\"ssh_user\"], value: str) -> None: ...\n\n    @overload\n    def __setitem__(self, key: Literal[\"ssh_key\"], value: str | None) -> None: ...\n\n    @overload\n    def __setitem__(self, key: Literal[\"sign_gpg\"], value: bool) -> None: ...\n\n    @overload\n    def __setitem__(self, key: Literal[\"security_release\"], value: bool) -> None: ...\n\n    @overload\n    def __setitem__(self, key: Literal[\"release\"], value: Tag) -> None: ...\n\n\n@dataclass\nclass Task:\n    function: Callable[[ReleaseShelf], None]\n    description: str\n\n    def __call__(self, db: ReleaseShelf) -> Any:\n        return getattr(self, \"function\")(db)\n\n\nclass Tag:\n    def __init__(self, tag_name: str) -> None:\n        # if tag is \".\", use current directory name as tag\n        # e.g. if current directory name is \"3.4.6\",\n        # \"release.py --bump 3.4.6\" and \"release.py --bump .\" are the same\n        if tag_name == \".\":\n            tag_name = os.path.basename(os.getcwd())\n        result = tag_cre.match(tag_name)\n        if result is None:\n            error(f\"tag {tag_name} is not valid\")\n        assert result is not None\n        data = list(result.groups())\n        if data[3] is None:\n            # A final release.\n            self.is_final = True\n            data[3] = \"f\"\n        else:\n            self.is_final = False\n        # For everything else, None means 0.\n        for i, thing in enumerate(data):\n            if thing is None:\n                data[i] = 0\n        self.major = int(data[0])\n        self.minor = int(data[1])\n        self.patch = int(data[2])\n        self.level = data[3]\n        self.serial = int(data[4])\n        # This has the effect of normalizing the version.\n        self.text = self.normalized()\n        if self.level != \"f\":\n            assert self.level is not None\n            self.text += self.level + str(self.serial)\n        self.basic_version = f\"{self.major}.{self.minor}\"\n\n    def __str__(self) -> str:\n        return self.text\n\n    def normalized(self) -> str:\n        return f\"{self.major}.{self.minor}.{self.patch}\"\n\n    @property\n    def branch(self) -> str:\n        if self.is_alpha_release or self.is_feature_freeze_release:\n            return \"main\"\n        return f\"{self.major}.{self.minor}\"\n\n    @property\n    def is_alpha_release(self) -> bool:\n        return self.level == \"a\"\n\n    @property\n    def is_release_candidate(self) -> bool:\n        return self.level == \"rc\"\n\n    @property\n    def is_feature_freeze_release(self) -> bool:\n        return self.level == \"b\" and self.serial == 1\n\n    @property\n    def is_security_release(self) -> bool:\n        url = \"https://peps.python.org/api/release-cycle.json\"\n        with urlopen(url) as response:\n            data = json.loads(response.read())\n        return str(data[self.basic_version][\"status\"]) == \"security\"\n\n    @property\n    def nickname(self) -> str:\n        return self.text.replace(\".\", \"\")\n\n    @property\n    def gitname(self) -> str:\n        return \"v\" + self.text\n\n    @property\n    def long_name(self) -> str:\n        if self.is_final:\n            return self.text\n\n        level = {\n            \"a\": \"alpha\",\n            \"b\": \"beta\",\n            \"rc\": \"release candidate\",\n        }[self.level]\n        return f\"{self.normalized()} {level} {self.serial}\"\n\n    def next_minor_release(self) -> Self:\n        return self.__class__(f\"{self.major}.{int(self.minor)+1}.0a0\")\n\n    def as_tuple(self) -> tuple[int, int, int, str, int]:\n        assert isinstance(self.level, str)\n        return self.major, self.minor, self.patch, self.level, self.serial\n\n    @property\n    def committed_at(self) -> datetime.datetime:\n        # Fetch the epoch of the tagged commit for build reproducibility.\n        proc = subprocess.run(\n            [\"git\", \"log\", self.gitname, \"-1\", \"--pretty=%ct\"], stdout=subprocess.PIPE\n        )\n        if proc.returncode != 0:\n            error(f\"Couldn't fetch the epoch of tag {self.gitname}\")\n        return datetime.datetime.fromtimestamp(\n            int(proc.stdout.decode().strip()), tz=datetime.timezone.utc\n        )\n\n    @property\n    def includes_docs(self) -> bool:\n        \"\"\"True if docs should be included in the release\"\"\"\n        return self.is_final or self.is_release_candidate\n\n    @property\n    def doc_version(self) -> str:\n        \"\"\"Text used for notes in docs like 'Added in x.y'\"\"\"\n        # - ignore levels (alpha/beta/rc are preparation for the full release)\n        # - use just X.Y for patch 0\n        if self.patch == 0:\n            return f\"{self.major}.{self.minor}\"\n        else:\n            return f\"{self.major}.{self.minor}.{self.patch}\"\n\n\ndef error(*msgs: str) -> None:\n    print(\"**ERROR**\", file=sys.stderr)\n    for msg in msgs:\n        print(msg, file=sys.stderr)\n    sys.exit(1)\n\n\ndef run_cmd(\n    cmd: Sequence[str] | str, silent: bool = False, shell: bool = False, **kwargs: Any\n) -> None:\n    if shell:\n        cmd = SPACE.join(cmd)\n    if not silent:\n        print(f\"Executing {cmd}\")\n    try:\n        if silent:\n            subprocess.check_call(cmd, shell=shell, stdout=subprocess.PIPE, **kwargs)\n        else:\n            subprocess.check_call(cmd, shell=shell, **kwargs)\n    except subprocess.CalledProcessError:\n        error(f\"{cmd} failed\")\n\n\ndef ask_question(question: str) -> bool:\n    answer = \"\"\n    print(question)\n    while answer not in (\"yes\", \"no\"):\n        answer = input(\"Enter yes or no: \")\n        if answer == \"yes\":\n            return True\n        elif answer == \"no\":\n            return False\n        else:\n            print(\"Please enter yes or no.\")\n    return True\n\n\nreadme_re = re.compile(r\"This is Python version 3\\.\\d\").match\n\n\ndef chdir_to_repo_root() -> str:\n    # find the root of the local CPython repo\n    # note that we can't ask git, because we might\n    # be in an exported directory tree!\n\n    # we intentionally start in a (probably nonexistent) subtree\n    # the first thing the while loop does is .., basically\n    path = os.path.abspath(\"garglemox\")\n    while True:\n        next_path = os.path.dirname(path)\n        if next_path == path:\n            sys.exit(\"You're not inside a CPython repo right now!\")\n        path = next_path\n\n        os.chdir(path)\n\n        def test_first_line(\n            filename: str,\n            test: Callable[[str], object],\n        ) -> bool:\n            if not os.path.exists(filename):\n                return False\n            with open(filename) as f:\n                lines = f.read().split(\"\\n\")\n                if not (lines and test(lines[0])):\n                    return False\n            return True\n\n        if not test_first_line(\"README.rst\", readme_re):\n            continue\n        if not test_first_line(\"LICENSE\", \"A. HISTORY OF THE SOFTWARE\".__eq__):\n            continue\n        if not os.path.exists(\"Include/Python.h\"):\n            continue\n        if not os.path.exists(\"Python/ceval.c\"):\n            continue\n\n        break\n\n    root = path\n    return root\n\n\ndef get_output(args: list[str]) -> bytes:\n    return subprocess.check_output(args)\n\n\ndef check_env() -> None:\n    if \"EDITOR\" not in os.environ:\n        error(\"editor not detected.\", \"Please set your EDITOR environment variable\")\n    if not os.path.exists(\".git\"):\n        error(\"CWD is not a git clone\")\n\n\ndef get_arg_parser() -> optparse.OptionParser:\n    usage = \"%prog [options] tagname\"\n    p = optparse.OptionParser(usage=usage)\n    p.add_option(\n        \"-b\",\n        \"--bump\",\n        default=False,\n        action=\"store_true\",\n        help=\"bump the revision number in important files\",\n    )\n    p.add_option(\n        \"-e\",\n        \"--export\",\n        default=False,\n        action=\"store_true\",\n        help=\"Export the git tag to a tarball and build docs\",\n    )\n    p.add_option(\n        \"-u\",\n        \"--upload\",\n        metavar=\"username\",\n        help=\"Upload the tarballs and docs to dinsdale\",\n    )\n    p.add_option(\n        \"-m\",\n        \"--branch\",\n        default=False,\n        action=\"store_true\",\n        help=\"Create a maintenance branch to go along with the release\",\n    )\n    p.add_option(\n        \"-t\",\n        \"--tag\",\n        default=False,\n        action=\"store_true\",\n        help=\"Tag the release in Subversion\",\n    )\n    p.add_option(\n        \"-d\",\n        \"--done\",\n        default=False,\n        action=\"store_true\",\n        help=\"Do post-release cleanups (i.e.  you're done!)\",\n    )\n    p.add_option(\n        \"--skip-docs\",\n        default=False,\n        action=\"store_true\",\n        help=\"Skip building the documentation during export\",\n    )\n    return p\n\n\ndef constant_replace(\n    filename: str,\n    updated_constants: str,\n    comment_start: str = \"/*\",\n    comment_end: str = \"*/\",\n) -> None:\n    \"\"\"Inserts in between --start constant-- and --end constant-- in a file\"\"\"\n    start_tag = comment_start + \"--start constants--\" + comment_end\n    end_tag = comment_start + \"--end constants--\" + comment_end\n    with open(filename, encoding=\"ascii\") as infile, open(\n        filename + \".new\", \"w\", encoding=\"ascii\"\n    ) as outfile:\n        found_constants = False\n        waiting_for_end = False\n        for line in infile:\n            if line[:-1] == start_tag:\n                print(start_tag, file=outfile)\n                print(updated_constants, file=outfile)\n                print(end_tag, file=outfile)\n                waiting_for_end = True\n                found_constants = True\n            elif line[:-1] == end_tag:\n                waiting_for_end = False\n            elif waiting_for_end:\n                pass\n            else:\n                outfile.write(line)\n    if not found_constants:\n        error(f\"Constant section delimiters not found: {filename}\")\n    os.rename(filename + \".new\", filename)\n\n\ndef tweak_patchlevel(\n    tag: Tag, filename: str = \"Include/patchlevel.h\", done: bool = False\n) -> None:\n    print(f\"Updating {filename}...\", end=\" \")\n    template = '''\n#define PY_MAJOR_VERSION\\t{tag.major}\n#define PY_MINOR_VERSION\\t{tag.minor}\n#define PY_MICRO_VERSION\\t{tag.patch}\n#define PY_RELEASE_LEVEL\\t{level_def}\n#define PY_RELEASE_SERIAL\\t{tag.serial}\n\n/* Version as a string */\n#define PY_VERSION      \\t\\\"{tag.text}{plus}\"'''.strip()\n    assert isinstance(tag.level, str)\n    level_def = {\n        \"a\": \"PY_RELEASE_LEVEL_ALPHA\",\n        \"b\": \"PY_RELEASE_LEVEL_BETA\",\n        \"rc\": \"PY_RELEASE_LEVEL_GAMMA\",\n        \"f\": \"PY_RELEASE_LEVEL_FINAL\",\n    }[tag.level]\n    new_constants = template.format(\n        tag=tag, level_def=level_def, plus=done and \"+\" or \"\"\n    )\n    if tag.as_tuple() >= (3, 7, 0, \"a\", 3):\n        new_constants = new_constants.expandtabs()\n    constant_replace(filename, new_constants)\n    print(\"done\")\n\n\n@cache\ndef get_pep_number(version: str) -> str:\n    \"\"\"Fetch PEP number for a Python version from peps.python.org.\n\n    Returns the PEP number as a string, or \"TODO\" if not found.\n    \"\"\"\n    url = \"https://peps.python.org/api/release-cycle.json\"\n    with urllib.request.urlopen(url, timeout=10) as response:\n        data = json.loads(response.read().decode())\n        if version in data:\n            pep = data[version].get(\"pep\")\n            if pep:\n                return str(pep)\n    return \"TODO\"\n\n\ndef tweak_readme(tag: Tag, filename: str = \"README.rst\") -> None:\n    print(f\"Updating {filename}...\", end=\" \")\n    readme = Path(filename)\n\n    # Update first line: \"This is Python version X.Y.Z {release_level} N\"\n    # and update length of underline in second line to match.\n    lines = readme.read_text().split(\"\\n\")\n    this_is = f\"This is Python version {tag.long_name}\"\n    underline = \"=\" * len(this_is)\n    lines[0] = this_is\n    lines[1] = underline\n    content = \"\\n\".join(lines)\n\n    DOCS_URL = r\"https://docs\\.python\\.org/\"\n    X_Y = r\"\\d+\\.\\d+\"\n\n    # Replace in: 3.14 <https://docs.python.org/3.14/whatsnew/3.14.html>`_\n    content = re.sub(\n        rf\"{X_Y} (<{DOCS_URL}){X_Y}(/whatsnew/){X_Y}(\\.html>`_)\",\n        rf\"{tag.basic_version} \\g<1>{tag.basic_version}\\g<2>{tag.basic_version}\\g<3>\",\n        content,\n    )\n\n    # Replace in: `Documentation for Python 3.14 <https://docs.python.org/3.14/>`_\n    content = re.sub(\n        rf\"(`Documentation for Python ){X_Y}( <{DOCS_URL}){X_Y}(/>`_)\",\n        rf\"\\g<1>{tag.basic_version}\\g<2>{tag.basic_version}\\g<3>\",\n        content,\n    )\n\n    # Get PEP number for this version\n    pep_number = get_pep_number(tag.basic_version)\n    pep_padded = pep_number.zfill(4) if pep_number != \"TODO\" else \"TODO\"\n\n    # Replace in: `PEP 745 <https://peps.python.org/pep-0745/>`__ for Python 3.14\n    content = re.sub(\n        rf\"(`PEP )\\d+( <https://peps\\.python\\.org/pep-)\\d+(/>`__ for Python ){X_Y}\",\n        rf\"\\g<1>{pep_number}\\g<2>{pep_padded}\\g<3>{tag.basic_version}\",\n        content,\n    )\n\n    readme.write_text(content)\n    print(\"done\")\n\n\ndef bump(tag: Tag) -> None:\n    print(f\"Bumping version to {tag}\")\n\n    tweak_patchlevel(tag)\n    tweak_readme(tag)\n\n    extra_work = False\n    other_files = []\n    if tag.patch == 0 and tag.level == \"a\" and tag.serial == 0:\n        extra_work = True\n        other_files += [\n            \"configure.ac\",\n            \"Doc/tutorial/interpreter.rst\",\n            \"Doc/tutorial/stdlib.rst\",\n            \"Doc/tutorial/stdlib2.rst\",\n            \"PC/pyconfig.h.in\",\n            \"PCbuild/rt.bat\",\n            \".github/ISSUE_TEMPLATE/bug.yml\",\n            \".github/ISSUE_TEMPLATE/crash.yml\",\n        ]\n    print(\"\\nManual editing time...\")\n    for filename in other_files:\n        if os.path.exists(filename):\n            print(f\"Edit {filename}\")\n            manual_edit(filename)\n        else:\n            print(f\"Skipping {filename}\")\n\n    print(\"Bumped revision\")\n    if extra_work:\n        print(\"configure.ac has changed; re-run autotools!\")\n    print(\"Please commit and use --tag\")\n\n\ndef manual_edit(filename: str) -> None:\n    editor = os.environ[\"EDITOR\"].split()\n    run_cmd([*editor, filename])\n\n\n@contextmanager\ndef pushd(new: str) -> Generator[None, None, None]:\n    print(f\"chdir'ing to {new}\")\n    old = os.getcwd()\n    os.chdir(new)\n    try:\n        yield\n    finally:\n        os.chdir(old)\n\n\ndef make_dist(name: str) -> None:\n    try:\n        os.mkdir(name)\n    except OSError:\n        if os.path.isdir(name):\n            print(f\"WARNING: dist dir {name} already exists\", file=sys.stderr)\n        else:\n            error(f\"{name}/ is not a directory\")\n    else:\n        print(f\"created dist directory {name}\")\n\n\ndef tarball(source: str, clamp_mtime: str) -> None:\n    \"\"\"Build tarballs for a directory.\"\"\"\n    print(\"Making .tgz\")\n    base = os.path.basename(source)\n    tgz = os.path.join(\"src\", base + \".tgz\")\n    xz = os.path.join(\"src\", base + \".tar.xz\")\n    # Recommended options for creating reproducible tarballs from:\n    # https://www.gnu.org/software/tar/manual/html_node/Reproducibility.html#Reproducibility\n    # and https://reproducible-builds.org/docs/archives/\n    repro_options = [\n        # Sorts the entries in the tarball by name.\n        \"--sort=name\",\n        # Sets a maximum 'modified time' of entries in tarball.\n        f\"--mtime={clamp_mtime}\",\n        \"--clamp-mtime\",\n        # Sets the owner uid and gid to 0.\n        \"--owner=0\",\n        \"--group=0\",\n        \"--numeric-owner\",\n        # Omits process ID, file access, and status change times.\n        \"--pax-option=exthdr.name=%d/PaxHeaders/%f,delete=atime,delete=ctime\",\n        # Omit irrelevant info about file permissions.\n        \"--mode=go+u,go-w\",\n    ]\n    run_cmd(\n        [\n            \"tar\",\n            \"cf\",\n            tgz,\n            *repro_options,\n            \"--use-compress-program\",\n            \"gzip --no-name -9\",\n            source,\n        ]\n    )\n    print(\"Making .tar.xz\")\n    run_cmd([\"tar\", \"cJf\", xz, *repro_options, source])\n    print(\"Calculating SHA-256 sums\")\n    checksum_tgz = hashlib.sha256()\n    with open(tgz, \"rb\") as data:\n        checksum_tgz.update(data.read())\n    checksum_xz = hashlib.sha256()\n    with open(xz, \"rb\") as data:\n        checksum_xz.update(data.read())\n    print(f\"  {checksum_tgz.hexdigest()}  {os.path.getsize(tgz):8}  {tgz}\")\n    print(f\"  {checksum_xz.hexdigest()}  {os.path.getsize(xz):8}  {xz}\")\n\n\ndef export(tag: Tag, silent: bool = False, skip_docs: bool = False) -> None:\n    make_dist(tag.text)\n    print(\"Exporting tag:\", tag.text)\n    archivename = f\"Python-{tag.text}\"\n    # I have not figured out how to get git to directly produce an\n    # archive directory like hg can, so use git to produce a temporary\n    # tarball then expand it with tar.\n    archivetempfile = f\"{archivename}.tar\"\n    run_cmd(\n        [\n            \"git\",\n            \"archive\",\n            \"--format=tar\",\n            f\"--prefix={archivename}/\",\n            \"-o\",\n            archivetempfile,\n            tag.gitname,\n        ],\n        silent=silent,\n    )\n    with pushd(tag.text):\n        archivetempfile = f\"../{archivetempfile}\"\n        run_cmd([\"tar\", \"-xf\", archivetempfile], silent=silent)\n        os.unlink(archivetempfile)\n        with pushd(archivename):\n            # Touch a few files that get generated so they're up-to-date in\n            # the tarball.\n            #\n            # Note, with the demise of \"make touch\" and the hg touch\n            # extension, touches should not be needed anymore,\n            # but keep it for now as a reminder.\n            maybe_touchables = [\n                \"Include/internal/pycore_ast.h\",\n                \"Include/internal/pycore_ast_state.h\",\n                \"Python/Python-ast.c\",\n                \"Python/opcode_targets.h\",\n            ]\n            touchables = [file for file in maybe_touchables if os.path.exists(file)]\n            print(\n                \"Touching:\",\n                COMMASPACE.join(name.rsplit(\"/\", 1)[-1] for name in touchables),\n            )\n            for name in touchables:\n                os.utime(name, None)\n\n            # build docs *before* we do \"blurb export\"\n            # because docs now depend on Misc/NEWS.d\n            # and we remove Misc/NEWS.d as part of cleanup for export\n            #\n            # If --skip-docs is provided we don't build and docs.\n            if not skip_docs and (tag.is_final or tag.level == \"rc\"):\n                docdist = build_docs()\n\n            print(\"Using blurb to build Misc/NEWS\")\n            run_cmd([\"blurb\", \"merge\"], silent=silent)\n\n            # Remove files we don't want to ship in tarballs.\n            print(\"Removing VCS .*ignore, .git*, Misc/NEWS.d, et al\")\n            for name in (\".gitattributes\", \".gitignore\"):\n                try:\n                    os.unlink(name)\n                except OSError:\n                    pass\n\n            # Remove directories we don't want to ship in tarballs.\n            run_cmd([\"blurb\", \"export\"], silent=silent)\n            for name in (\".azure-pipelines\", \".git\", \".github\", \"Misc/mypy\"):\n                shutil.rmtree(name, ignore_errors=True)\n\n        if not skip_docs and (tag.is_final or tag.level == \"rc\"):\n            shutil.copytree(docdist, \"docs\")\n\n        with pushd(os.path.join(archivename, \"Doc\")):\n            print(\"Removing doc build artifacts\")\n            shutil.rmtree(\"venv\", ignore_errors=True)\n            shutil.rmtree(\"build\", ignore_errors=True)\n            shutil.rmtree(\"dist\", ignore_errors=True)\n\n        with pushd(archivename):\n            print(\"Zapping pycs\")\n            run_cmd(\n                [\n                    \"find\",\n                    \".\",\n                    \"-depth\",\n                    \"-name\",\n                    \"__pycache__\",\n                    \"-exec\",\n                    \"rm\",\n                    \"-rf\",\n                    \"{}\",\n                    \";\",\n                ],\n                silent=silent,\n            )\n            run_cmd(\n                [\"find\", \".\", \"-name\", \"*.py[co]\", \"-exec\", \"rm\", \"-f\", \"{}\", \";\"],\n                silent=silent,\n            )\n\n        os.mkdir(\"src\")\n        tarball(archivename, tag.committed_at.strftime(\"%Y-%m-%d %H:%M:%SZ\"))\n    print()\n    print(f\"**Now extract the archives in {tag.text}/src and run the tests**\")\n    print(\"**You may also want to run make install and re-test**\")\n\n\ndef build_docs() -> str:\n    \"\"\"Build and tarball the documentation\"\"\"\n    print(\"Building docs\")\n    with tempfile.TemporaryDirectory() as venv:\n        run_cmd([\"python3\", \"-m\", \"venv\", venv])\n        pip = os.path.join(venv, \"bin\", \"pip\")\n        run_cmd([pip, \"install\", \"-r\", \"Doc/requirements.txt\"])\n        sphinx_build = os.path.join(venv, \"bin\", \"sphinx-build\")\n        blurb = os.path.join(venv, \"bin\", \"blurb\")\n        docs_env = {\n            **os.environ,\n            \"BLURB\": blurb,\n            \"SPHINXBUILD\": sphinx_build,\n            \"SPHINXOPTS\": \"-j10\",\n        }\n        with pushd(\"Doc\"):\n            run_cmd((\"make\", \"dist-epub\"), env=docs_env)\n            run_cmd((\"make\", \"dist-html\"), env=docs_env)\n            run_cmd((\"make\", \"dist-texinfo\"), env=docs_env)\n            run_cmd((\"make\", \"dist-text\"), env=docs_env)\n            return os.path.abspath(\"dist\")\n\n\ndef upload(tag: Tag, username: str) -> None:\n    \"\"\"scp everything to dinsdale\"\"\"\n    address = f'\"{username}@dinsdale.python.org:'\n\n    def scp(from_loc: str, to_loc: str) -> None:\n        run_cmd([\"scp\", from_loc, address + to_loc])\n\n    with pushd(tag.text):\n        print(\"Uploading source tarballs\")\n        scp(\"src\", f\"/data/python-releases/{tag.nickname}\")\n        print(\"Upload doc tarballs\")\n        scp(\"docs\", f\"/data/python-releases/doc/{tag.nickname}\")\n        print(\n            \"* Now change the permissions on the tarballs so they are \"\n            \"writable by the webmaster group. *\"\n        )\n\n\ndef make_tag(tag: Tag, *, sign_gpg: bool = True) -> bool:\n    # make sure we've run blurb export\n    good_files = glob.glob(\"Misc/NEWS.d/\" + str(tag) + \".rst\")\n    bad_files = list(glob.glob(\"Misc/NEWS.d/next/*/0*.rst\"))\n    bad_files.extend(glob.glob(\"Misc/NEWS.d/next/*/2*.rst\"))\n    if bad_files or not good_files:\n        print('It doesn\\'t look like you ran \"blurb release\" yet.')\n        if bad_files:\n            print(\"There are still reST files in NEWS.d/next/...\")\n        if not good_files:\n            print(f\"There is no Misc/NEWS.d/{tag}.rst file.\")\n        if not ask_question(\"Are you sure you want to tag?\"):\n            print(\"Aborting.\")\n            return False\n\n    # make sure we're on the correct branch\n    if tag.patch > 0:\n        if (\n            get_output([\"git\", \"name-rev\", \"--name-only\", \"HEAD\"]).strip().decode()\n            != f\"branch-{tag}\"\n        ):\n            print(\"It doesn't look like you're on the correct branch.\")\n            if not ask_question(\"Are you sure you want to tag?\"):\n                print(\"Aborting.\")\n                return False\n\n    if sign_gpg:\n        print(\"Signing tag\")\n        uid = os.environ.get(\"GPG_KEY_FOR_RELEASE\")\n        if not uid:\n            print(\"List of available private keys:\")\n            run_cmd(['gpg -K | grep -A 1 \"^sec\"'], shell=True)\n            uid = input(\"Please enter key ID to use for signing: \")\n        run_cmd(\n            [\"git\", \"tag\", \"-s\", \"-u\", uid, tag.gitname, \"-m\", \"Python \" + str(tag)]\n        )\n    else:\n        print(\"Creating tag\")\n        run_cmd([\"git\", \"tag\", tag.gitname, \"-m\", \"Python \" + str(tag)])\n\n    return True\n\n\ndef done(tag: Tag) -> None:\n    tweak_patchlevel(tag, done=True)\n\n\ndef main(argv: Any) -> None:\n    chdir_to_repo_root()\n    parser = get_arg_parser()\n    options, args = parser.parse_args(argv)\n    if options.skip_docs and not options.export:\n        error(\"--skip-docs option has no effect without --export\")\n    if len(args) != 2:\n        if \"RELEASE_TAG\" not in os.environ:\n            parser.print_usage()\n            sys.exit(1)\n        tagname = os.environ[\"RELEASE_TAG\"]\n    else:\n        tagname = args[1]\n    tag = Tag(tagname)\n    if not (options.export or options.upload):\n        check_env()\n    if options.bump:\n        bump(tag)\n    if options.tag:\n        make_tag(tag)\n    if options.export:\n        export(tag, skip_docs=options.skip_docs)\n    if options.upload:\n        upload(tag, options.upload)\n    if options.done:\n        done(tag)\n\n\nif __name__ == \"__main__\":\n    main(sys.argv)\n"
  },
  {
    "path": "requirements.in",
    "content": "--only-binary :all:\n\nparamiko\nalive_progress>=3.3.0\npython-gnupg\naiohttp\nblurb>=1.2.1\n# Pending https://github.com/python/release-tools/pull/289\nsigstore>=3,<4\n"
  },
  {
    "path": "requirements.txt",
    "content": "#\n# This file is autogenerated by pip-compile with Python 3.12\n# by the following command:\n#\n#    pip-compile --generate-hashes --output-file=requirements.txt requirements.in\n#\n--only-binary :all:\n\nabout-time==4.2.1 \\\n    --hash=sha256:8bbf4c75fe13cbd3d72f49a03b02c5c7dca32169b6d49117c257e7eb3eaee341\n    # via alive-progress\naiohappyeyeballs==2.6.1 \\\n    --hash=sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8\n    # via aiohttp\naiohttp==3.13.3 \\\n    --hash=sha256:01ad2529d4b5035578f5081606a465f3b814c542882804e2e8cda61adf5c71bf \\\n    --hash=sha256:042e9e0bcb5fba81886c8b4fbb9a09d6b8a00245fd8d88e4d989c1f96c74164c \\\n    --hash=sha256:05861afbbec40650d8a07ea324367cb93e9e8cc7762e04dd4405df99fa65159c \\\n    --hash=sha256:084911a532763e9d3dd95adf78a78f4096cd5f58cdc18e6fdbc1b58417a45423 \\\n    --hash=sha256:0add0900ff220d1d5c5ebbf99ed88b0c1bbf87aa7e4262300ed1376a6b13414f \\\n    --hash=sha256:0db318f7a6f065d84cb1e02662c526294450b314a02bd9e2a8e67f0d8564ce40 \\\n    --hash=sha256:10b47b7ba335d2e9b1239fa571131a87e2d8ec96b333e68b2a305e7a98b0bae2 \\\n    --hash=sha256:1449ceddcdbcf2e0446957863af03ebaaa03f94c090f945411b61269e2cb5daf \\\n    --hash=sha256:147e422fd1223005c22b4fe080f5d93ced44460f5f9c105406b753612b587821 \\\n    --hash=sha256:1cb93e166e6c28716c8c6aeb5f99dfb6d5ccf482d29fe9bf9a794110e6d0ab64 \\\n    --hash=sha256:215a685b6fbbfcf71dfe96e3eba7a6f58f10da1dfdf4889c7dd856abe430dca7 \\\n    --hash=sha256:2712039939ec963c237286113c68dbad80a82a4281543f3abf766d9d73228998 \\\n    --hash=sha256:27234ef6d85c914f9efeb77ff616dbf4ad2380be0cda40b4db086ffc7ddd1b7d \\\n    --hash=sha256:28e027cf2f6b641693a09f631759b4d9ce9165099d2b5d92af9bd4e197690eea \\\n    --hash=sha256:2b8d8ddba8f95ba17582226f80e2de99c7a7948e66490ef8d947e272a93e9463 \\\n    --hash=sha256:2ba0eea45eb5cc3172dbfc497c066f19c41bac70963ea1a67d51fc92e4cf9a80 \\\n    --hash=sha256:2be0e9ccf23e8a94f6f0650ce06042cefc6ac703d0d7ab6c7a917289f2539ad4 \\\n    --hash=sha256:2e41b18a58da1e474a057b3d35248d8320029f61d70a37629535b16a0c8f3767 \\\n    --hash=sha256:2eb752b102b12a76ca02dff751a801f028b4ffbbc478840b473597fc91a9ed43 \\\n    --hash=sha256:2fc82186fadc4a8316768d61f3722c230e2c1dcab4200d52d2ebdf2482e47592 \\\n    --hash=sha256:2fff83cfc93f18f215896e3a190e8e5cb413ce01553901aca925176e7568963a \\\n    --hash=sha256:31a83ea4aead760dfcb6962efb1d861db48c34379f2ff72db9ddddd4cda9ea2e \\\n    --hash=sha256:34749271508078b261c4abb1767d42b8d0c0cc9449c73a4df494777dc55f0687 \\\n    --hash=sha256:34bac00a67a812570d4a460447e1e9e06fae622946955f939051e7cc895cfab8 \\\n    --hash=sha256:37239e9f9a7ea9ac5bf6b92b0260b01f8a22281996da609206a84df860bc1261 \\\n    --hash=sha256:37da61e244d1749798c151421602884db5270faf479cf0ef03af0ff68954c9dd \\\n    --hash=sha256:3b61b7169ababd7802f9568ed96142616a9118dd2be0d1866e920e77ec8fa92a \\\n    --hash=sha256:3d9908a48eb7416dc1f4524e69f1d32e5d90e3981e4e37eb0aa1cd18f9cfa2a4 \\\n    --hash=sha256:3dd4dce1c718e38081c8f35f323209d4c1df7d4db4bab1b5c88a6b4d12b74587 \\\n    --hash=sha256:4021b51936308aeea0367b8f006dc999ca02bc118a0cc78c303f50a2ff6afb91 \\\n    --hash=sha256:40c5e40ecc29ba010656c18052b877a1c28f84344825efa106705e835c28530f \\\n    --hash=sha256:425c126c0dc43861e22cb1c14ba4c8e45d09516d0a3ae0a3f7494b79f5f233a3 \\\n    --hash=sha256:44531a36aa2264a1860089ffd4dce7baf875ee5a6079d5fb42e261c704ef7344 \\\n    --hash=sha256:48e377758516d262bde50c2584fc6c578af272559c409eecbdd2bae1601184d6 \\\n    --hash=sha256:49a03727c1bba9a97d3e93c9f93ca03a57300f484b6e935463099841261195d3 \\\n    --hash=sha256:4ae5b5a0e1926e504c81c5b84353e7a5516d8778fbbff00429fe7b05bb25cbce \\\n    --hash=sha256:4e239d501f73d6db1522599e14b9b321a7e3b1de66ce33d53a765d975e9f4808 \\\n    --hash=sha256:56339a36b9f1fc708260c76c87e593e2afb30d26de9ae1eb445b5e051b98a7a1 \\\n    --hash=sha256:568f416a4072fbfae453dcf9a99194bbb8bdeab718e08ee13dfa2ba0e4bebf29 \\\n    --hash=sha256:5b179331a481cb5529fca8b432d8d3c7001cb217513c94cd72d668d1248688a3 \\\n    --hash=sha256:5b6073099fb654e0a068ae678b10feff95c5cae95bbfcbfa7af669d361a8aa6b \\\n    --hash=sha256:5d2d94f1f5fcbe40838ac51a6ab5704a6f9ea42e72ceda48de5e6b898521da51 \\\n    --hash=sha256:5dff64413671b0d3e7d5918ea490bdccb97a4ad29b3f311ed423200b2203e01c \\\n    --hash=sha256:5e1d8c8b8f1d91cd08d8f4a3c2b067bfca6ec043d3ff36de0f3a715feeedf926 \\\n    --hash=sha256:5f8ca7f2bb6ba8348a3614c7918cc4bb73268c5ac2a207576b7afea19d3d9f64 \\\n    --hash=sha256:642f752c3eb117b105acbd87e2c143de710987e09860d674e068c4c2c441034f \\\n    --hash=sha256:65d2ccb7eabee90ce0503c17716fc77226be026dcc3e65cce859a30db715025b \\\n    --hash=sha256:693781c45a4033d31d4187d2436f5ac701e7bbfe5df40d917736108c1cc7436e \\\n    --hash=sha256:694976222c711d1d00ba131904beb60534f93966562f64440d0c9d41b8cdb440 \\\n    --hash=sha256:697753042d57f4bf7122cab985bf15d0cef23c770864580f5af4f52023a56bd6 \\\n    --hash=sha256:69c56fbc1993fa17043e24a546959c0178fe2b5782405ad4559e6c13975c15e3 \\\n    --hash=sha256:6de499a1a44e7de70735d0b39f67c8f25eb3d91eb3103be99ca0fa882cdd987d \\\n    --hash=sha256:6fc0e2337d1a4c3e6acafda6a78a39d4c14caea625124817420abceed36e2415 \\\n    --hash=sha256:75ca857eba4e20ce9f546cd59c7007b33906a4cd48f2ff6ccf1ccfc3b646f279 \\\n    --hash=sha256:7a4a94eb787e606d0a09404b9c38c113d3b099d508021faa615d70a0131907ce \\\n    --hash=sha256:7b5e8fe4de30df199155baaf64f2fcd604f4c678ed20910db8e2c66dc4b11603 \\\n    --hash=sha256:7bfdc049127717581866fa4708791220970ce291c23e28ccf3922c700740fdc0 \\\n    --hash=sha256:7e63f210bc1b57ef699035f2b4b6d9ce096b5914414a49b0997c839b2bd2223c \\\n    --hash=sha256:7f9120f7093c2a32d9647abcaf21e6ad275b4fbec5b55969f978b1a97c7c86bf \\\n    --hash=sha256:8057c98e0c8472d8846b9c79f56766bcc57e3e8ac7bfd510482332366c56c591 \\\n    --hash=sha256:80dd4c21b0f6237676449c6baaa1039abae86b91636b6c91a7f8e61c87f89540 \\\n    --hash=sha256:81e97251d9298386c2b7dbeb490d3d1badbdc69107fb8c9299dd04eb39bddc0e \\\n    --hash=sha256:82611aeec80eb144416956ec85b6ca45a64d76429c1ed46ae1b5f86c6e0c9a26 \\\n    --hash=sha256:8542f41a62bcc58fc7f11cf7c90e0ec324ce44950003feb70640fc2a9092c32a \\\n    --hash=sha256:859bd3f2156e81dd01432f5849fc73e2243d4a487c4fd26609b1299534ee1845 \\\n    --hash=sha256:87797e645d9d8e222e04160ee32aa06bc5c163e8499f24db719e7852ec23093a \\\n    --hash=sha256:87b9aab6d6ed88235aa2970294f496ff1a1f9adcd724d800e9b952395a80ffd9 \\\n    --hash=sha256:8a60e60746623925eab7d25823329941aee7242d559baa119ca2b253c88a7bd6 \\\n    --hash=sha256:90455115e5da1c3c51ab619ac57f877da8fd6d73c05aacd125c5ae9819582aba \\\n    --hash=sha256:90751b8eed69435bac9ff4e3d2f6b3af1f57e37ecb0fbeee59c0174c9e2d41df \\\n    --hash=sha256:947c26539750deeaee933b000fb6517cc770bbd064bad6033f1cff4803881e43 \\\n    --hash=sha256:96d604498a7c782cb15a51c406acaea70d8c027ee6b90c569baa6e7b93073679 \\\n    --hash=sha256:988a8c5e317544fdf0d39871559e67b6341065b87fceac641108c2096d5506b7 \\\n    --hash=sha256:9a9dc347e5a3dc7dfdbc1f82da0ef29e388ddb2ed281bfce9dd8248a313e62b7 \\\n    --hash=sha256:9ae8dd55c8e6c4257eae3a20fd2c8f41edaea5992ed67156642493b8daf3cecc \\\n    --hash=sha256:9af5e68ee47d6534d36791bbe9b646d2a7c7deb6fc24d7943628edfbb3581f29 \\\n    --hash=sha256:9b174f267b5cfb9a7dba9ee6859cecd234e9a681841eb85068059bc867fb8f02 \\\n    --hash=sha256:9bf9f7a65e7aa20dd764151fb3d616c81088f91f8df39c3893a536e279b4b984 \\\n    --hash=sha256:9d4c940f02f49483b18b079d1c27ab948721852b281f8b015c058100e9421dd1 \\\n    --hash=sha256:9ebf57d09e131f5323464bd347135a88622d1c0976e88ce15b670e7ad57e4bd6 \\\n    --hash=sha256:a19884d2ee70b06d9204b2727a7b9f983d0c684c650254679e716b0b77920632 \\\n    --hash=sha256:a1e53262fd202e4b40b70c3aff944a8155059beedc8a89bba9dc1f9ef06a1b56 \\\n    --hash=sha256:a2212ad43c0833a873d0fb3c63fa1bacedd4cf6af2fee62bf4b739ceec3ab239 \\\n    --hash=sha256:a45530014d7a1e09f4a55f4f43097ba0fd155089372e105e4bff4ca76cb1b168 \\\n    --hash=sha256:add1da70de90a2569c5e15249ff76a631ccacfe198375eead4aadf3b8dc849dc \\\n    --hash=sha256:af71fff7bac6bb7508956696dce8f6eec2bbb045eceb40343944b1ae62b5ef11 \\\n    --hash=sha256:b04be762396457bef43f3597c991e192ee7da460a4953d7e647ee4b1c28e7046 \\\n    --hash=sha256:b0d95340658b9d2f11d9697f59b3814a9d3bb4b7a7c20b131df4bcef464037c0 \\\n    --hash=sha256:b1a6102b4d3ebc07dad44fbf07b45bb600300f15b552ddf1851b5390202ea2e3 \\\n    --hash=sha256:b46020d11d23fe16551466c77823df9cc2f2c1e63cc965daf67fa5eec6ca1877 \\\n    --hash=sha256:b556c85915d8efaed322bf1bdae9486aa0f3f764195a0fb6ee962e5c71ef5ce1 \\\n    --hash=sha256:b903a4dfee7d347e2d87697d0713be59e0b87925be030c9178c5faa58ea58d5c \\\n    --hash=sha256:b928f30fe49574253644b1ca44b1b8adbd903aa0da4b9054a6c20fc7f4092a25 \\\n    --hash=sha256:b99281b0704c103d4e11e72a76f1b543d4946fea7dd10767e7e1b5f00d4e5704 \\\n    --hash=sha256:bae5c2ed2eae26cc382020edad80d01f36cb8e746da40b292e68fec40421dc6a \\\n    --hash=sha256:bb4f7475e359992b580559e008c598091c45b5088f28614e855e42d39c2f1033 \\\n    --hash=sha256:bbe7d4cecacb439e2e2a8a1a7b935c25b812af7a5fd26503a66dadf428e79ec1 \\\n    --hash=sha256:bfc1cc2fe31a6026a8a88e4ecfb98d7f6b1fec150cfd708adbfd1d2f42257c29 \\\n    --hash=sha256:c014c7ea7fb775dd015b2d3137378b7be0249a448a1612268b5a90c2d81de04d \\\n    --hash=sha256:c048058117fd649334d81b4b526e94bde3ccaddb20463a815ced6ecbb7d11160 \\\n    --hash=sha256:c0e2d366af265797506f0283487223146af57815b388623f0357ef7eac9b209d \\\n    --hash=sha256:c19b90316ad3b24c69cd78d5c9b4f3aa4497643685901185b65166293d36a00f \\\n    --hash=sha256:c685f2d80bb67ca8c3837823ad76196b3694b0159d232206d1e461d3d434666f \\\n    --hash=sha256:c6b8568a3bb5819a0ad087f16d40e5a3fb6099f39ea1d5625a3edc1e923fc538 \\\n    --hash=sha256:d32764c6c9aafb7fb55366a224756387cd50bfa720f32b88e0e6fa45b27dcf29 \\\n    --hash=sha256:d5a372fd5afd301b3a89582817fdcdb6c34124787c70dbcc616f259013e7eef7 \\\n    --hash=sha256:d60ac9663f44168038586cab2157e122e46bdef09e9368b37f2d82d354c23f72 \\\n    --hash=sha256:dca68018bf48c251ba17c72ed479f4dafe9dbd5a73707ad8d28a38d11f3d42af \\\n    --hash=sha256:de2c184bb1fe2cbd2cefba613e9db29a5ab559323f994b6737e370d3da0ac455 \\\n    --hash=sha256:e3531d63d3bdfa7e3ac5e9b27b2dd7ec9df3206a98e0b3445fa906f233264c57 \\\n    --hash=sha256:e50a2e1404f063427c9d027378472316201a2290959a295169bcf25992d04558 \\\n    --hash=sha256:e636b3c5f61da31a92bf0d91da83e58fdfa96f178ba682f11d24f31944cdd28c \\\n    --hash=sha256:ea37047c6b367fd4bd632bff8077449b8fa034b69e812a18e0132a00fae6e808 \\\n    --hash=sha256:f33ed1a2bf1997a36661874b017f5c4b760f41266341af36febaf271d179f6d7 \\\n    --hash=sha256:f76c1e3fe7d7c8afad7ed193f89a292e1999608170dcc9751a7462a87dfd5bc0 \\\n    --hash=sha256:f9444f105664c4ce47a2a7171a2418bce5b7bae45fb610f4e2c36045d85911d3 \\\n    --hash=sha256:fc290605db2a917f6e81b0e1e0796469871f5af381ce15c604a3c5c7e51cb730 \\\n    --hash=sha256:fc353029f176fd2b3ec6cfc71be166aba1936fe5d73dd1992ce289ca6647a9aa \\\n    --hash=sha256:fee0c6bc7db1de362252affec009707a17478a00ec69f797d23ca256e36d5940\n    # via -r requirements.in\naiosignal==1.4.0 \\\n    --hash=sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e\n    # via aiohttp\nalive-progress==3.3.0 \\\n    --hash=sha256:63dd33bb94cde15ad9e5b666dbba8fedf71b72a4935d6fb9a92931e69402c9ff\n    # via -r requirements.in\nannotated-types==0.7.0 \\\n    --hash=sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53\n    # via pydantic\nattrs==24.3.0 \\\n    --hash=sha256:ac96cd038792094f438ad1f6ff80837353805ac950cd2aa0e0625ef19850c308\n    # via aiohttp\nbcrypt==4.2.1 \\\n    --hash=sha256:041fa0155c9004eb98a232d54da05c0b41d4b8e66b6fc3cb71b4b3f6144ba837 \\\n    --hash=sha256:04e56e3fe8308a88b77e0afd20bec516f74aecf391cdd6e374f15cbed32783d6 \\\n    --hash=sha256:1340411a0894b7d3ef562fb233e4b6ed58add185228650942bdc885362f32c17 \\\n    --hash=sha256:533e7f3bcf2f07caee7ad98124fab7499cb3333ba2274f7a36cf1daee7409d99 \\\n    --hash=sha256:687cf30e6681eeda39548a93ce9bfbb300e48b4d445a43db4298d2474d2a1e54 \\\n    --hash=sha256:76132c176a6d9953cdc83c296aeaed65e1a708485fd55abf163e0d9f8f16ce0e \\\n    --hash=sha256:76d3e352b32f4eeb34703370e370997065d28a561e4a18afe4fef07249cb4396 \\\n    --hash=sha256:807261df60a8b1ccd13e6599c779014a362ae4e795f5c59747f60208daddd96d \\\n    --hash=sha256:89df2aea2c43be1e1fa066df5f86c8ce822ab70a30e4c210968669565c0f4685 \\\n    --hash=sha256:8ad2f4528cbf0febe80e5a3a57d7a74e6635e41af1ea5675282a33d769fba413 \\\n    --hash=sha256:8c458cd103e6c5d1d85cf600e546a639f234964d0228909d8f8dbeebff82d526 \\\n    --hash=sha256:8dbd0747208912b1e4ce730c6725cb56c07ac734b3629b60d4398f082ea718ad \\\n    --hash=sha256:909faa1027900f2252a9ca5dfebd25fc0ef1417943824783d1c8418dd7d6df4a \\\n    --hash=sha256:aaa2e285be097050dba798d537b6efd9b698aa88eef52ec98d23dcd6d7cf6fea \\\n    --hash=sha256:adadd36274510a01f33e6dc08f5824b97c9580583bd4487c564fc4617b328005 \\\n    --hash=sha256:b1ee315739bc8387aa36ff127afc99120ee452924e0df517a8f3e4c0187a0f5f \\\n    --hash=sha256:b588af02b89d9fad33e5f98f7838bf590d6d692df7153647724a7f20c186f6bf \\\n    --hash=sha256:b7703ede632dc945ed1172d6f24e9f30f27b1b1a067f32f68bf169c5f08d0425 \\\n    --hash=sha256:c6f5fa3775966cca251848d4d5393ab016b3afed251163c1436fefdec3b02c84 \\\n    --hash=sha256:cde78d385d5e93ece5479a0a87f73cd6fa26b171c786a884f955e165032b262c \\\n    --hash=sha256:cfdf3d7530c790432046c40cda41dfee8c83e29482e6a604f8930b9930e94139 \\\n    --hash=sha256:e158009a54c4c8bc91d5e0da80920d048f918c61a581f0a63e4e93bb556d362f \\\n    --hash=sha256:e84e0e6f8e40a242b11bce56c313edc2be121cec3e0ec2d76fce01f6af33c07c \\\n    --hash=sha256:f85b1ffa09240c89aa2e1ae9f3b1c687104f7b2b9d2098da4e923f1b7082d331\n    # via paramiko\nbetterproto==2.0.0b6 \\\n    --hash=sha256:a0839ec165d110a69d0d116f4d0e2bec8d186af4db826257931f0831dab73fcf\n    # via sigstore-protobuf-specs\nblurb==2.0.0 \\\n    --hash=sha256:f6d0e858dbe94765f6a89b8228217ffdb9c19cff08fc8f2c3153954846d31aa1\n    # via -r requirements.in\ncertifi==2024.12.14 \\\n    --hash=sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56\n    # via requests\ncffi==2.0.0 \\\n    --hash=sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb \\\n    --hash=sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b \\\n    --hash=sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f \\\n    --hash=sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9 \\\n    --hash=sha256:0cf2d91ecc3fcc0625c2c530fe004f82c110405f101548512cce44322fa8ac44 \\\n    --hash=sha256:0f6084a0ea23d05d20c3edcda20c3d006f9b6f3fefeac38f59262e10cef47ee2 \\\n    --hash=sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c \\\n    --hash=sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75 \\\n    --hash=sha256:1cd13c99ce269b3ed80b417dcd591415d3372bcac067009b6e0f59c7d4015e65 \\\n    --hash=sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e \\\n    --hash=sha256:1f72fb8906754ac8a2cc3f9f5aaa298070652a0ffae577e0ea9bd480dc3c931a \\\n    --hash=sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e \\\n    --hash=sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25 \\\n    --hash=sha256:2081580ebb843f759b9f617314a24ed5738c51d2aee65d31e02f6f7a2b97707a \\\n    --hash=sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe \\\n    --hash=sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b \\\n    --hash=sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91 \\\n    --hash=sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592 \\\n    --hash=sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187 \\\n    --hash=sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c \\\n    --hash=sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1 \\\n    --hash=sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94 \\\n    --hash=sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba \\\n    --hash=sha256:3e837e369566884707ddaf85fc1744b47575005c0a229de3327f8f9a20f4efeb \\\n    --hash=sha256:3f4d46d8b35698056ec29bca21546e1551a205058ae1a181d871e278b0b28165 \\\n    --hash=sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca \\\n    --hash=sha256:4647afc2f90d1ddd33441e5b0e85b16b12ddec4fca55f0d9671fef036ecca27c \\\n    --hash=sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6 \\\n    --hash=sha256:53f77cbe57044e88bbd5ed26ac1d0514d2acf0591dd6bb02a3ae37f76811b80c \\\n    --hash=sha256:5eda85d6d1879e692d546a078b44251cdd08dd1cfb98dfb77b670c97cee49ea0 \\\n    --hash=sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743 \\\n    --hash=sha256:61d028e90346df14fedc3d1e5441df818d095f3b87d286825dfcbd6459b7ef63 \\\n    --hash=sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5 \\\n    --hash=sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5 \\\n    --hash=sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4 \\\n    --hash=sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d \\\n    --hash=sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b \\\n    --hash=sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93 \\\n    --hash=sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205 \\\n    --hash=sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27 \\\n    --hash=sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512 \\\n    --hash=sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d \\\n    --hash=sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c \\\n    --hash=sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037 \\\n    --hash=sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26 \\\n    --hash=sha256:89472c9762729b5ae1ad974b777416bfda4ac5642423fa93bd57a09204712322 \\\n    --hash=sha256:8ea985900c5c95ce9db1745f7933eeef5d314f0565b27625d9a10ec9881e1bfb \\\n    --hash=sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c \\\n    --hash=sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8 \\\n    --hash=sha256:9332088d75dc3241c702d852d4671613136d90fa6881da7d770a483fd05248b4 \\\n    --hash=sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414 \\\n    --hash=sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9 \\\n    --hash=sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664 \\\n    --hash=sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9 \\\n    --hash=sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775 \\\n    --hash=sha256:b18a3ed7d5b3bd8d9ef7a8cb226502c6bf8308df1525e1cc676c3680e7176739 \\\n    --hash=sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc \\\n    --hash=sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062 \\\n    --hash=sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe \\\n    --hash=sha256:b882b3df248017dba09d6b16defe9b5c407fe32fc7c65a9c69798e6175601be9 \\\n    --hash=sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92 \\\n    --hash=sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5 \\\n    --hash=sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13 \\\n    --hash=sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d \\\n    --hash=sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26 \\\n    --hash=sha256:cb527a79772e5ef98fb1d700678fe031e353e765d1ca2d409c92263c6d43e09f \\\n    --hash=sha256:cf364028c016c03078a23b503f02058f1814320a56ad535686f90565636a9495 \\\n    --hash=sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b \\\n    --hash=sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6 \\\n    --hash=sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c \\\n    --hash=sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef \\\n    --hash=sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5 \\\n    --hash=sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18 \\\n    --hash=sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad \\\n    --hash=sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3 \\\n    --hash=sha256:de8dad4425a6ca6e4e5e297b27b5c824ecc7581910bf9aee86cb6835e6812aa7 \\\n    --hash=sha256:e11e82b744887154b182fd3e7e8512418446501191994dbf9c9fc1f32cc8efd5 \\\n    --hash=sha256:e6e73b9e02893c764e7e8d5bb5ce277f1a009cd5243f8228f75f842bf937c534 \\\n    --hash=sha256:f73b96c41e3b2adedc34a7356e64c8eb96e03a3782b535e043a986276ce12a49 \\\n    --hash=sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2 \\\n    --hash=sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5 \\\n    --hash=sha256:fc7de24befaeae77ba923797c7c87834c73648a05a4bde34b3b7e5588973a453 \\\n    --hash=sha256:fe562eb1a64e67dd297ccc4f5addea2501664954f2692b69a76449ec7913ecbf\n    # via\n    #   cryptography\n    #   pynacl\ncharset-normalizer==3.4.1 \\\n    --hash=sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537 \\\n    --hash=sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa \\\n    --hash=sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a \\\n    --hash=sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294 \\\n    --hash=sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b \\\n    --hash=sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd \\\n    --hash=sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601 \\\n    --hash=sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd \\\n    --hash=sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4 \\\n    --hash=sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d \\\n    --hash=sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2 \\\n    --hash=sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313 \\\n    --hash=sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd \\\n    --hash=sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa \\\n    --hash=sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8 \\\n    --hash=sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1 \\\n    --hash=sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2 \\\n    --hash=sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496 \\\n    --hash=sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d \\\n    --hash=sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b \\\n    --hash=sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e \\\n    --hash=sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a \\\n    --hash=sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4 \\\n    --hash=sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca \\\n    --hash=sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78 \\\n    --hash=sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408 \\\n    --hash=sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5 \\\n    --hash=sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f \\\n    --hash=sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a \\\n    --hash=sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765 \\\n    --hash=sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6 \\\n    --hash=sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146 \\\n    --hash=sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6 \\\n    --hash=sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9 \\\n    --hash=sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd \\\n    --hash=sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c \\\n    --hash=sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f \\\n    --hash=sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545 \\\n    --hash=sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176 \\\n    --hash=sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770 \\\n    --hash=sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824 \\\n    --hash=sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f \\\n    --hash=sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf \\\n    --hash=sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487 \\\n    --hash=sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d \\\n    --hash=sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd \\\n    --hash=sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b \\\n    --hash=sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534 \\\n    --hash=sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f \\\n    --hash=sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b \\\n    --hash=sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9 \\\n    --hash=sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd \\\n    --hash=sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125 \\\n    --hash=sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9 \\\n    --hash=sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de \\\n    --hash=sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11 \\\n    --hash=sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d \\\n    --hash=sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35 \\\n    --hash=sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f \\\n    --hash=sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda \\\n    --hash=sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7 \\\n    --hash=sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a \\\n    --hash=sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971 \\\n    --hash=sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8 \\\n    --hash=sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41 \\\n    --hash=sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d \\\n    --hash=sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f \\\n    --hash=sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757 \\\n    --hash=sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a \\\n    --hash=sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886 \\\n    --hash=sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77 \\\n    --hash=sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76 \\\n    --hash=sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247 \\\n    --hash=sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85 \\\n    --hash=sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb \\\n    --hash=sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7 \\\n    --hash=sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e \\\n    --hash=sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6 \\\n    --hash=sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037 \\\n    --hash=sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1 \\\n    --hash=sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e \\\n    --hash=sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807 \\\n    --hash=sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407 \\\n    --hash=sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c \\\n    --hash=sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12 \\\n    --hash=sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3 \\\n    --hash=sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089 \\\n    --hash=sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd \\\n    --hash=sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e \\\n    --hash=sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00 \\\n    --hash=sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616\n    # via requests\ncryptography==46.0.7 \\\n    --hash=sha256:04959522f938493042d595a736e7dbdff6eb6cc2339c11465b3ff89343b65f65 \\\n    --hash=sha256:128c5edfe5e5938b86b03941e94fac9ee793a94452ad1365c9fc3f4f62216832 \\\n    --hash=sha256:1d25aee46d0c6f1a501adcddb2d2fee4b979381346a78558ed13e50aa8a59067 \\\n    --hash=sha256:24402210aa54baae71d99441d15bb5a1919c195398a87b563df84468160a65de \\\n    --hash=sha256:258514877e15963bd43b558917bc9f54cf7cf866c38aa576ebf47a77ddbc43a4 \\\n    --hash=sha256:35719dc79d4730d30f1c2b6474bd6acda36ae2dfae1e3c16f2051f215df33ce0 \\\n    --hash=sha256:397655da831414d165029da9bc483bed2fe0e75dde6a1523ec2fe63f3c46046b \\\n    --hash=sha256:3986ac1dee6def53797289999eabe84798ad7817f3e97779b5061a95b0ee4968 \\\n    --hash=sha256:420b1e4109cc95f0e5700eed79908cef9268265c773d3a66f7af1eef53d409ef \\\n    --hash=sha256:42a1e5f98abb6391717978baf9f90dc28a743b7d9be7f0751a6f56a75d14065b \\\n    --hash=sha256:462ad5cb1c148a22b2e3bcc5ad52504dff325d17daf5df8d88c17dda1f75f2a4 \\\n    --hash=sha256:506c4ff91eff4f82bdac7633318a526b1d1309fc07ca76a3ad182cb5b686d6d3 \\\n    --hash=sha256:5ad9ef796328c5e3c4ceed237a183f5d41d21150f972455a9d926593a1dcb308 \\\n    --hash=sha256:5d1c02a14ceb9148cc7816249f64f623fbfee39e8c03b3650d842ad3f34d637e \\\n    --hash=sha256:5e51be372b26ef4ba3de3c167cd3d1022934bc838ae9eaad7e644986d2a3d163 \\\n    --hash=sha256:60627cf07e0d9274338521205899337c5d18249db56865f943cbe753aa96f40f \\\n    --hash=sha256:65814c60f8cc400c63131584e3e1fad01235edba2614b61fbfbfa954082db0ee \\\n    --hash=sha256:73510b83623e080a2c35c62c15298096e2a5dc8d51c3b4e1740211839d0dea77 \\\n    --hash=sha256:7bbc6ccf49d05ac8f7d7b5e2e2c33830d4fe2061def88210a126d130d7f71a85 \\\n    --hash=sha256:80406c3065e2c55d7f49a9550fe0c49b3f12e5bfff5dedb727e319e1afb9bf99 \\\n    --hash=sha256:84d4cced91f0f159a7ddacad249cc077e63195c36aac40b4150e7a57e84fffe7 \\\n    --hash=sha256:8a469028a86f12eb7d2fe97162d0634026d92a21f3ae0ac87ed1c4a447886c83 \\\n    --hash=sha256:91bbcb08347344f810cbe49065914fe048949648f6bd5c2519f34619142bbe85 \\\n    --hash=sha256:935ce7e3cfdb53e3536119a542b839bb94ec1ad081013e9ab9b7cfd478b05006 \\\n    --hash=sha256:9694078c5d44c157ef3162e3bf3946510b857df5a3955458381d1c7cfc143ddb \\\n    --hash=sha256:a1529d614f44b863a7b480c6d000fe93b59acee9c82ffa027cfadc77521a9f5e \\\n    --hash=sha256:abad9dac36cbf55de6eb49badd4016806b3165d396f64925bf2999bcb67837ba \\\n    --hash=sha256:b36a4695e29fe69215d75960b22577197aca3f7a25b9cf9d165dcfe9d80bc325 \\\n    --hash=sha256:b7b412817be92117ec5ed95f880defe9cf18a832e8cafacf0a22337dc1981b4d \\\n    --hash=sha256:c5b1ccd1239f48b7151a65bc6dd54bcfcc15e028c8ac126d3fada09db0e07ef1 \\\n    --hash=sha256:cbd5fb06b62bd0721e1170273d3f4d5a277044c47ca27ee257025146c34cbdd1 \\\n    --hash=sha256:cdf1a610ef82abb396451862739e3fc93b071c844399e15b90726ef7470eeaf2 \\\n    --hash=sha256:cdfbe22376065ffcf8be74dc9a909f032df19bc58a699456a21712d6e5eabfd0 \\\n    --hash=sha256:d02c738dacda7dc2a74d1b2b3177042009d5cab7c7079db74afc19e56ca1b455 \\\n    --hash=sha256:d151173275e1728cf7839aaa80c34fe550c04ddb27b34f48c232193df8db5842 \\\n    --hash=sha256:d23c8ca48e44ee015cd0a54aeccdf9f09004eba9fc96f38c911011d9ff1bd457 \\\n    --hash=sha256:d3b99c535a9de0adced13d159c5a9cf65c325601aa30f4be08afd680643e9c15 \\\n    --hash=sha256:d5f7520159cd9c2154eb61eb67548ca05c5774d39e9c2c4339fd793fe7d097b2 \\\n    --hash=sha256:db0f493b9181c7820c8134437eb8b0b4792085d37dbb24da050476ccb664e59c \\\n    --hash=sha256:e06acf3c99be55aa3b516397fe42f5855597f430add9c17fa46bf2e0fb34c9bb \\\n    --hash=sha256:ea42cbe97209df307fdc3b155f1b6fa2577c0defa8f1f7d3be7d31d189108ad4 \\\n    --hash=sha256:ebd6daf519b9f189f85c479427bbd6e9c9037862cf8fe89ee35503bd209ed902 \\\n    --hash=sha256:f247c8c1a1fb45e12586afbb436ef21ff1e80670b2861a90353d9b025583d246 \\\n    --hash=sha256:fbfd0e5f273877695cb93baf14b185f4878128b250cc9f8e617ea0c025dfb022 \\\n    --hash=sha256:fc9ab8856ae6cf7c9358430e49b368f3108f050031442eaeb6b9d87e4dcf4e4f \\\n    --hash=sha256:fcd8eac50d9138c1d7fc53a653ba60a2bee81a505f9f8850b6b2888555a45d0e \\\n    --hash=sha256:fdd1736fed309b4300346f88f74cd120c27c56852c3838cab416e7a166f67298 \\\n    --hash=sha256:ffca7aa1d00cf7d6469b988c581598f2259e46215e0140af408966a24cf086ce\n    # via\n    #   paramiko\n    #   pyopenssl\n    #   rfc3161-client\n    #   sigstore\ndnspython==2.7.0 \\\n    --hash=sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86\n    # via email-validator\nemail-validator==2.2.0 \\\n    --hash=sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631\n    # via pydantic\nfrozenlist==1.5.0 \\\n    --hash=sha256:000a77d6034fbad9b6bb880f7ec073027908f1b40254b5d6f26210d2dab1240e \\\n    --hash=sha256:03d33c2ddbc1816237a67f66336616416e2bbb6beb306e5f890f2eb22b959cdf \\\n    --hash=sha256:04a5c6babd5e8fb7d3c871dc8b321166b80e41b637c31a995ed844a6139942b6 \\\n    --hash=sha256:0996c66760924da6e88922756d99b47512a71cfd45215f3570bf1e0b694c206a \\\n    --hash=sha256:0cc974cc93d32c42e7b0f6cf242a6bd941c57c61b618e78b6c0a96cb72788c1d \\\n    --hash=sha256:0f253985bb515ecd89629db13cb58d702035ecd8cfbca7d7a7e29a0e6d39af5f \\\n    --hash=sha256:11aabdd62b8b9c4b84081a3c246506d1cddd2dd93ff0ad53ede5defec7886b28 \\\n    --hash=sha256:12f78f98c2f1c2429d42e6a485f433722b0061d5c0b0139efa64f396efb5886b \\\n    --hash=sha256:140228863501b44b809fb39ec56b5d4071f4d0aa6d216c19cbb08b8c5a7eadb9 \\\n    --hash=sha256:1431d60b36d15cda188ea222033eec8e0eab488f39a272461f2e6d9e1a8e63c2 \\\n    --hash=sha256:15538c0cbf0e4fa11d1e3a71f823524b0c46299aed6e10ebb4c2089abd8c3bec \\\n    --hash=sha256:15b731db116ab3aedec558573c1a5eec78822b32292fe4f2f0345b7f697745c2 \\\n    --hash=sha256:17dcc32fc7bda7ce5875435003220a457bcfa34ab7924a49a1c19f55b6ee185c \\\n    --hash=sha256:1893f948bf6681733aaccf36c5232c231e3b5166d607c5fa77773611df6dc336 \\\n    --hash=sha256:189f03b53e64144f90990d29a27ec4f7997d91ed3d01b51fa39d2dbe77540fd4 \\\n    --hash=sha256:1a8ea951bbb6cacd492e3948b8da8c502a3f814f5d20935aae74b5df2b19cf3d \\\n    --hash=sha256:1b96af8c582b94d381a1c1f51ffaedeb77c821c690ea5f01da3d70a487dd0a9b \\\n    --hash=sha256:1e76bfbc72353269c44e0bc2cfe171900fbf7f722ad74c9a7b638052afe6a00c \\\n    --hash=sha256:2150cc6305a2c2ab33299453e2968611dacb970d2283a14955923062c8d00b10 \\\n    --hash=sha256:226d72559fa19babe2ccd920273e767c96a49b9d3d38badd7c91a0fdeda8ea08 \\\n    --hash=sha256:237f6b23ee0f44066219dae14c70ae38a63f0440ce6750f868ee08775073f942 \\\n    --hash=sha256:29d94c256679247b33a3dc96cce0f93cbc69c23bf75ff715919332fdbb6a32b8 \\\n    --hash=sha256:2b5e23253bb709ef57a8e95e6ae48daa9ac5f265637529e4ce6b003a37b2621f \\\n    --hash=sha256:2d0da8bbec082bf6bf18345b180958775363588678f64998c2b7609e34719b10 \\\n    --hash=sha256:2f3f7a0fbc219fb4455264cae4d9f01ad41ae6ee8524500f381de64ffaa077d5 \\\n    --hash=sha256:30c72000fbcc35b129cb09956836c7d7abf78ab5416595e4857d1cae8d6251a6 \\\n    --hash=sha256:31115ba75889723431aa9a4e77d5f398f5cf976eea3bdf61749731f62d4a4a21 \\\n    --hash=sha256:31a9ac2b38ab9b5a8933b693db4939764ad3f299fcaa931a3e605bc3460e693c \\\n    --hash=sha256:366d8f93e3edfe5a918c874702f78faac300209a4d5bf38352b2c1bdc07a766d \\\n    --hash=sha256:374ca2dabdccad8e2a76d40b1d037f5bd16824933bf7bcea3e59c891fd4a0923 \\\n    --hash=sha256:44c49271a937625619e862baacbd037a7ef86dd1ee215afc298a417ff3270608 \\\n    --hash=sha256:45e0896250900b5aa25180f9aec243e84e92ac84bd4a74d9ad4138ef3f5c97de \\\n    --hash=sha256:498524025a5b8ba81695761d78c8dd7382ac0b052f34e66939c42df860b8ff17 \\\n    --hash=sha256:50cf5e7ee9b98f22bdecbabf3800ae78ddcc26e4a435515fc72d97903e8488e0 \\\n    --hash=sha256:52ef692a4bc60a6dd57f507429636c2af8b6046db8b31b18dac02cbc8f507f7f \\\n    --hash=sha256:561eb1c9579d495fddb6da8959fd2a1fca2c6d060d4113f5844b433fc02f2641 \\\n    --hash=sha256:5a3ba5f9a0dfed20337d3e966dc359784c9f96503674c2faf015f7fe8e96798c \\\n    --hash=sha256:5b6a66c18b5b9dd261ca98dffcb826a525334b2f29e7caa54e182255c5f6a65a \\\n    --hash=sha256:5c28f4b5dbef8a0d8aad0d4de24d1e9e981728628afaf4ea0792f5d0939372f0 \\\n    --hash=sha256:5d7f5a50342475962eb18b740f3beecc685a15b52c91f7d975257e13e029eca9 \\\n    --hash=sha256:6321899477db90bdeb9299ac3627a6a53c7399c8cd58d25da094007402b039ab \\\n    --hash=sha256:6482a5851f5d72767fbd0e507e80737f9c8646ae7fd303def99bfe813f76cf7f \\\n    --hash=sha256:666534d15ba8f0fda3f53969117383d5dc021266b3c1a42c9ec4855e4b58b9d3 \\\n    --hash=sha256:683173d371daad49cffb8309779e886e59c2f369430ad28fe715f66d08d4ab1a \\\n    --hash=sha256:6e9080bb2fb195a046e5177f10d9d82b8a204c0736a97a153c2466127de87784 \\\n    --hash=sha256:73f2e31ea8dd7df61a359b731716018c2be196e5bb3b74ddba107f694fbd7604 \\\n    --hash=sha256:7437601c4d89d070eac8323f121fcf25f88674627505334654fd027b091db09d \\\n    --hash=sha256:76e4753701248476e6286f2ef492af900ea67d9706a0155335a40ea21bf3b2f5 \\\n    --hash=sha256:7707a25d6a77f5d27ea7dc7d1fc608aa0a478193823f88511ef5e6b8a48f9d03 \\\n    --hash=sha256:7948140d9f8ece1745be806f2bfdf390127cf1a763b925c4a805c603df5e697e \\\n    --hash=sha256:7a1a048f9215c90973402e26c01d1cff8a209e1f1b53f72b95c13db61b00f953 \\\n    --hash=sha256:7d57d8f702221405a9d9b40f9da8ac2e4a1a8b5285aac6100f3393675f0a85ee \\\n    --hash=sha256:7f3c8c1dacd037df16e85227bac13cca58c30da836c6f936ba1df0c05d046d8d \\\n    --hash=sha256:828afae9f17e6de596825cf4228ff28fbdf6065974e5ac1410cecc22f699d2b3 \\\n    --hash=sha256:87f724d055eb4785d9be84e9ebf0f24e392ddfad00b3fe036e43f489fafc9039 \\\n    --hash=sha256:8969190d709e7c48ea386db202d708eb94bdb29207a1f269bab1196ce0dcca1f \\\n    --hash=sha256:90646abbc7a5d5c7c19461d2e3eeb76eb0b204919e6ece342feb6032c9325ae9 \\\n    --hash=sha256:91d6c171862df0a6c61479d9724f22efb6109111017c87567cfeb7b5d1449fdf \\\n    --hash=sha256:9272fa73ca71266702c4c3e2d4a28553ea03418e591e377a03b8e3659d94fa76 \\\n    --hash=sha256:92b5278ed9d50fe610185ecd23c55d8b307d75ca18e94c0e7de328089ac5dcba \\\n    --hash=sha256:97160e245ea33d8609cd2b8fd997c850b56db147a304a262abc2b3be021a9171 \\\n    --hash=sha256:977701c081c0241d0955c9586ffdd9ce44f7a7795df39b9151cd9a6fd0ce4cfb \\\n    --hash=sha256:9b7dc0c4338e6b8b091e8faf0db3168a37101943e687f373dce00959583f7439 \\\n    --hash=sha256:9b93d7aaa36c966fa42efcaf716e6b3900438632a626fb09c049f6a2f09fc631 \\\n    --hash=sha256:9bbcdfaf4af7ce002694a4e10a0159d5a8d20056a12b05b45cea944a4953f972 \\\n    --hash=sha256:9c2623347b933fcb9095841f1cc5d4ff0b278addd743e0e966cb3d460278840d \\\n    --hash=sha256:a2fe128eb4edeabe11896cb6af88fca5346059f6c8d807e3b910069f39157869 \\\n    --hash=sha256:a72b7a6e3cd2725eff67cd64c8f13335ee18fc3c7befc05aed043d24c7b9ccb9 \\\n    --hash=sha256:a9fe0f1c29ba24ba6ff6abf688cb0b7cf1efab6b6aa6adc55441773c252f7411 \\\n    --hash=sha256:b97f7b575ab4a8af9b7bc1d2ef7f29d3afee2226bd03ca3875c16451ad5a7723 \\\n    --hash=sha256:bdac3c7d9b705d253b2ce370fde941836a5f8b3c5c2b8fd70940a3ea3af7f4f2 \\\n    --hash=sha256:c03eff4a41bd4e38415cbed054bbaff4a075b093e2394b6915dca34a40d1e38b \\\n    --hash=sha256:c16d2fa63e0800723139137d667e1056bee1a1cf7965153d2d104b62855e9b99 \\\n    --hash=sha256:c1fac3e2ace2eb1052e9f7c7db480818371134410e1f5c55d65e8f3ac6d1407e \\\n    --hash=sha256:ce3aa154c452d2467487765e3adc730a8c153af77ad84096bc19ce19a2400840 \\\n    --hash=sha256:cee6798eaf8b1416ef6909b06f7dc04b60755206bddc599f52232606e18179d3 \\\n    --hash=sha256:d1b3eb7b05ea246510b43a7e53ed1653e55c2121019a97e60cad7efb881a97bb \\\n    --hash=sha256:d994863bba198a4a518b467bb971c56e1db3f180a25c6cf7bb1949c267f748c3 \\\n    --hash=sha256:dd47a5181ce5fcb463b5d9e17ecfdb02b678cca31280639255ce9d0e5aa67af0 \\\n    --hash=sha256:dd94994fc91a6177bfaafd7d9fd951bc8689b0a98168aa26b5f543868548d3ca \\\n    --hash=sha256:de537c11e4aa01d37db0d403b57bd6f0546e71a82347a97c6a9f0dcc532b3a45 \\\n    --hash=sha256:df6e2f325bfee1f49f81aaac97d2aa757c7646534a06f8f577ce184afe2f0a9e \\\n    --hash=sha256:e66cc454f97053b79c2ab09c17fbe3c825ea6b4de20baf1be28919460dd7877f \\\n    --hash=sha256:e79225373c317ff1e35f210dd5f1344ff31066ba8067c307ab60254cd3a78ad5 \\\n    --hash=sha256:f1577515d35ed5649d52ab4319db757bb881ce3b2b796d7283e6634d99ace307 \\\n    --hash=sha256:f1e6540b7fa044eee0bb5111ada694cf3dc15f2b0347ca125ee9ca984d5e9e6e \\\n    --hash=sha256:f2ac49a9bedb996086057b75bf93538240538c6d9b38e57c82d51f75a73409d2 \\\n    --hash=sha256:f47c9c9028f55a04ac254346e92977bf0f166c483c74b4232bee19a6697e4778 \\\n    --hash=sha256:f5f9da7f5dbc00a604fe74aa02ae7c98bcede8a3b8b9666f9f86fc13993bc71a \\\n    --hash=sha256:fd74520371c3c4175142d02a976aee0b4cb4a7cc912a60586ffd8d5929979b30 \\\n    --hash=sha256:feeb64bc9bcc6b45c6311c9e9b99406660a9c05ca8a5b30d14a78555088b0b3a\n    # via\n    #   aiohttp\n    #   aiosignal\ngraphemeu==0.7.2 \\\n    --hash=sha256:1444520f6899fd30114fc2a39f297d86d10fa0f23bf7579f772f8bc7efaa2542\n    # via alive-progress\ngrpclib==0.4.8 \\\n    --hash=sha256:a5047733a7acc1c1cee6abf3c841c7c6fab67d2844a45a853b113fa2e6cd2654\n    # via betterproto\nh2==4.3.0 \\\n    --hash=sha256:c438f029a25f7945c69e0ccf0fb951dc3f73a5f6412981daee861431b70e2bdd\n    # via grpclib\nhpack==4.1.0 \\\n    --hash=sha256:157ac792668d995c657d93111f46b4535ed114f0c9c8d672271bbec7eae1b496\n    # via h2\nhyperframe==6.1.0 \\\n    --hash=sha256:b03380493a519fce58ea5af42e4a42317bf9bd425596f7a0835ffce80f1a42e5\n    # via h2\nid==1.5.0 \\\n    --hash=sha256:f1434e1cef91f2cbb8a4ec64663d5a23b9ed43ef44c4c957d02583d61714c658\n    # via sigstore\nidna==3.10 \\\n    --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3\n    # via\n    #   email-validator\n    #   requests\n    #   yarl\ninvoke==2.2.0 \\\n    --hash=sha256:6ea924cc53d4f78e3d98bc436b08069a03077e6f85ad1ddaa8a116d7dad15820\n    # via paramiko\nmarkdown-it-py==3.0.0 \\\n    --hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1\n    # via rich\nmdurl==0.1.2 \\\n    --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8\n    # via markdown-it-py\nmultidict==6.1.0 \\\n    --hash=sha256:052e10d2d37810b99cc170b785945421141bf7bb7d2f8799d431e7db229c385f \\\n    --hash=sha256:06809f4f0f7ab7ea2cabf9caca7d79c22c0758b58a71f9d32943ae13c7ace056 \\\n    --hash=sha256:071120490b47aa997cca00666923a83f02c7fbb44f71cf7f136df753f7fa8761 \\\n    --hash=sha256:0c3f390dc53279cbc8ba976e5f8035eab997829066756d811616b652b00a23a3 \\\n    --hash=sha256:0e2b90b43e696f25c62656389d32236e049568b39320e2735d51f08fd362761b \\\n    --hash=sha256:0e5f362e895bc5b9e67fe6e4ded2492d8124bdf817827f33c5b46c2fe3ffaca6 \\\n    --hash=sha256:10524ebd769727ac77ef2278390fb0068d83f3acb7773792a5080f2b0abf7748 \\\n    --hash=sha256:10a9b09aba0c5b48c53761b7c720aaaf7cf236d5fe394cd399c7ba662d5f9966 \\\n    --hash=sha256:16e5f4bf4e603eb1fdd5d8180f1a25f30056f22e55ce51fb3d6ad4ab29f7d96f \\\n    --hash=sha256:188215fc0aafb8e03341995e7c4797860181562380f81ed0a87ff455b70bf1f1 \\\n    --hash=sha256:189f652a87e876098bbc67b4da1049afb5f5dfbaa310dd67c594b01c10388db6 \\\n    --hash=sha256:1ca0083e80e791cffc6efce7660ad24af66c8d4079d2a750b29001b53ff59ada \\\n    --hash=sha256:1e16bf3e5fc9f44632affb159d30a437bfe286ce9e02754759be5536b169b305 \\\n    --hash=sha256:2090f6a85cafc5b2db085124d752757c9d251548cedabe9bd31afe6363e0aff2 \\\n    --hash=sha256:20b9b5fbe0b88d0bdef2012ef7dee867f874b72528cf1d08f1d59b0e3850129d \\\n    --hash=sha256:22f3105d4fb15c8f57ff3959a58fcab6ce36814486500cd7485651230ad4d4ef \\\n    --hash=sha256:23bfd518810af7de1116313ebd9092cb9aa629beb12f6ed631ad53356ed6b86c \\\n    --hash=sha256:27e5fc84ccef8dfaabb09d82b7d179c7cf1a3fbc8a966f8274fcb4ab2eb4cadb \\\n    --hash=sha256:3380252550e372e8511d49481bd836264c009adb826b23fefcc5dd3c69692f60 \\\n    --hash=sha256:3702ea6872c5a2a4eeefa6ffd36b042e9773f05b1f37ae3ef7264b1163c2dcf6 \\\n    --hash=sha256:37bb93b2178e02b7b618893990941900fd25b6b9ac0fa49931a40aecdf083fe4 \\\n    --hash=sha256:3914f5aaa0f36d5d60e8ece6a308ee1c9784cd75ec8151062614657a114c4478 \\\n    --hash=sha256:3a37ffb35399029b45c6cc33640a92bef403c9fd388acce75cdc88f58bd19a81 \\\n    --hash=sha256:3c8b88a2ccf5493b6c8da9076fb151ba106960a2df90c2633f342f120751a9e7 \\\n    --hash=sha256:3e97b5e938051226dc025ec80980c285b053ffb1e25a3db2a3aa3bc046bf7f56 \\\n    --hash=sha256:3ec660d19bbc671e3a6443325f07263be452c453ac9e512f5eb935e7d4ac28b3 \\\n    --hash=sha256:3efe2c2cb5763f2f1b275ad2bf7a287d3f7ebbef35648a9726e3b69284a4f3d6 \\\n    --hash=sha256:483a6aea59cb89904e1ceabd2b47368b5600fb7de78a6e4a2c2987b2d256cf30 \\\n    --hash=sha256:4867cafcbc6585e4b678876c489b9273b13e9fff9f6d6d66add5e15d11d926cb \\\n    --hash=sha256:48e171e52d1c4d33888e529b999e5900356b9ae588c2f09a52dcefb158b27506 \\\n    --hash=sha256:4a9cb68166a34117d6646c0023c7b759bf197bee5ad4272f420a0141d7eb03a0 \\\n    --hash=sha256:4b820514bfc0b98a30e3d85462084779900347e4d49267f747ff54060cc33925 \\\n    --hash=sha256:4e18b656c5e844539d506a0a06432274d7bd52a7487e6828c63a63d69185626c \\\n    --hash=sha256:4e9f48f58c2c523d5a06faea47866cd35b32655c46b443f163d08c6d0ddb17d6 \\\n    --hash=sha256:50b3a2710631848991d0bf7de077502e8994c804bb805aeb2925a981de58ec2e \\\n    --hash=sha256:55b6d90641869892caa9ca42ff913f7ff1c5ece06474fbd32fb2cf6834726c95 \\\n    --hash=sha256:57feec87371dbb3520da6192213c7d6fc892d5589a93db548331954de8248fd2 \\\n    --hash=sha256:58130ecf8f7b8112cdb841486404f1282b9c86ccb30d3519faf301b2e5659133 \\\n    --hash=sha256:5845c1fd4866bb5dd3125d89b90e57ed3138241540897de748cdf19de8a2fca2 \\\n    --hash=sha256:59bfeae4b25ec05b34f1956eaa1cb38032282cd4dfabc5056d0a1ec4d696d3aa \\\n    --hash=sha256:5b48204e8d955c47c55b72779802b219a39acc3ee3d0116d5080c388970b76e3 \\\n    --hash=sha256:5c09fcfdccdd0b57867577b719c69e347a436b86cd83747f179dbf0cc0d4c1f3 \\\n    --hash=sha256:6180c0ae073bddeb5a97a38c03f30c233e0a4d39cd86166251617d1bbd0af436 \\\n    --hash=sha256:682b987361e5fd7a139ed565e30d81fd81e9629acc7d925a205366877d8c8657 \\\n    --hash=sha256:6b5d83030255983181005e6cfbac1617ce9746b219bc2aad52201ad121226581 \\\n    --hash=sha256:6bb5992037f7a9eff7991ebe4273ea7f51f1c1c511e6a2ce511d0e7bdb754492 \\\n    --hash=sha256:73eae06aa53af2ea5270cc066dcaf02cc60d2994bbb2c4ef5764949257d10f43 \\\n    --hash=sha256:76f364861c3bfc98cbbcbd402d83454ed9e01a5224bb3a28bf70002a230f73e2 \\\n    --hash=sha256:820c661588bd01a0aa62a1283f20d2be4281b086f80dad9e955e690c75fb54a2 \\\n    --hash=sha256:82176036e65644a6cc5bd619f65f6f19781e8ec2e5330f51aa9ada7504cc1926 \\\n    --hash=sha256:87701f25a2352e5bf7454caa64757642734da9f6b11384c1f9d1a8e699758057 \\\n    --hash=sha256:9079dfc6a70abe341f521f78405b8949f96db48da98aeb43f9907f342f627cdc \\\n    --hash=sha256:90f8717cb649eea3504091e640a1b8568faad18bd4b9fcd692853a04475a4b80 \\\n    --hash=sha256:957cf8e4b6e123a9eea554fa7ebc85674674b713551de587eb318a2df3e00255 \\\n    --hash=sha256:99f826cbf970077383d7de805c0681799491cb939c25450b9b5b3ced03ca99f1 \\\n    --hash=sha256:9f636b730f7e8cb19feb87094949ba54ee5357440b9658b2a32a5ce4bce53972 \\\n    --hash=sha256:a114d03b938376557927ab23f1e950827c3b893ccb94b62fd95d430fd0e5cf53 \\\n    --hash=sha256:a185f876e69897a6f3325c3f19f26a297fa058c5e456bfcff8015e9a27e83ae1 \\\n    --hash=sha256:a7a9541cd308eed5e30318430a9c74d2132e9a8cb46b901326272d780bf2d423 \\\n    --hash=sha256:aa466da5b15ccea564bdab9c89175c762bc12825f4659c11227f515cee76fa4a \\\n    --hash=sha256:aaed8b0562be4a0876ee3b6946f6869b7bcdb571a5d1496683505944e268b160 \\\n    --hash=sha256:ab7c4ceb38d91570a650dba194e1ca87c2b543488fe9309b4212694174fd539c \\\n    --hash=sha256:ac10f4c2b9e770c4e393876e35a7046879d195cd123b4f116d299d442b335bcd \\\n    --hash=sha256:b04772ed465fa3cc947db808fa306d79b43e896beb677a56fb2347ca1a49c1fa \\\n    --hash=sha256:b1c416351ee6271b2f49b56ad7f308072f6f44b37118d69c2cad94f3fa8a40d5 \\\n    --hash=sha256:b225d95519a5bf73860323e633a664b0d85ad3d5bede6d30d95b35d4dfe8805b \\\n    --hash=sha256:b2f59caeaf7632cc633b5cf6fc449372b83bbdf0da4ae04d5be36118e46cc0aa \\\n    --hash=sha256:b58c621844d55e71c1b7f7c498ce5aa6985d743a1a59034c57a905b3f153c1ef \\\n    --hash=sha256:bf6bea52ec97e95560af5ae576bdac3aa3aae0b6758c6efa115236d9e07dae44 \\\n    --hash=sha256:c08be4f460903e5a9d0f76818db3250f12e9c344e79314d1d570fc69d7f4eae4 \\\n    --hash=sha256:c7053d3b0353a8b9de430a4f4b4268ac9a4fb3481af37dfe49825bf45ca24156 \\\n    --hash=sha256:c943a53e9186688b45b323602298ab727d8865d8c9ee0b17f8d62d14b56f0753 \\\n    --hash=sha256:ce2186a7df133a9c895dea3331ddc5ddad42cdd0d1ea2f0a51e5d161e4762f28 \\\n    --hash=sha256:d093be959277cb7dee84b801eb1af388b6ad3ca6a6b6bf1ed7585895789d027d \\\n    --hash=sha256:d094ddec350a2fb899fec68d8353c78233debde9b7d8b4beeafa70825f1c281a \\\n    --hash=sha256:d1a9dd711d0877a1ece3d2e4fea11a8e75741ca21954c919406b44e7cf971304 \\\n    --hash=sha256:d569388c381b24671589335a3be6e1d45546c2988c2ebe30fdcada8457a31008 \\\n    --hash=sha256:d618649d4e70ac6efcbba75be98b26ef5078faad23592f9b51ca492953012429 \\\n    --hash=sha256:d83a047959d38a7ff552ff94be767b7fd79b831ad1cd9920662db05fec24fe72 \\\n    --hash=sha256:d8fff389528cad1618fb4b26b95550327495462cd745d879a8c7c2115248e399 \\\n    --hash=sha256:da1758c76f50c39a2efd5e9859ce7d776317eb1dd34317c8152ac9251fc574a3 \\\n    --hash=sha256:db7457bac39421addd0c8449933ac32d8042aae84a14911a757ae6ca3eef1392 \\\n    --hash=sha256:e27bbb6d14416713a8bd7aaa1313c0fc8d44ee48d74497a0ff4c3a1b6ccb5167 \\\n    --hash=sha256:e617fb6b0b6953fffd762669610c1c4ffd05632c138d61ac7e14ad187870669c \\\n    --hash=sha256:e9aa71e15d9d9beaad2c6b9319edcdc0a49a43ef5c0a4c8265ca9ee7d6c67774 \\\n    --hash=sha256:ec2abea24d98246b94913b76a125e855eb5c434f7c46546046372fe60f666351 \\\n    --hash=sha256:f179dee3b863ab1c59580ff60f9d99f632f34ccb38bf67a33ec6b3ecadd0fd76 \\\n    --hash=sha256:f4c035da3f544b1882bac24115f3e2e8760f10a0107614fc9839fd232200b875 \\\n    --hash=sha256:f67f217af4b1ff66c68a87318012de788dd95fcfeb24cc889011f4e1c7454dfd \\\n    --hash=sha256:f90c822a402cb865e396a504f9fc8173ef34212a342d92e362ca498cad308e28 \\\n    --hash=sha256:ff3827aef427c89a25cc96ded1759271a93603aba9fb977a6d264648ebf989db\n    # via\n    #   aiohttp\n    #   grpclib\n    #   yarl\nparamiko==4.0.0 \\\n    --hash=sha256:0e20e00ac666503bf0b4eda3b6d833465a2b7aff2e2b3d79a8bba5ef144ee3b9\n    # via -r requirements.in\nplatformdirs==4.3.6 \\\n    --hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb\n    # via sigstore\npropcache==0.2.1 \\\n    --hash=sha256:03ff9d3f665769b2a85e6157ac8b439644f2d7fd17615a82fa55739bc97863f4 \\\n    --hash=sha256:049324ee97bb67285b49632132db351b41e77833678432be52bdd0289c0e05e4 \\\n    --hash=sha256:081a430aa8d5e8876c6909b67bd2d937bfd531b0382d3fdedb82612c618bc41a \\\n    --hash=sha256:0f022d381747f0dfe27e99d928e31bc51a18b65bb9e481ae0af1380a6725dd1f \\\n    --hash=sha256:12d1083f001ace206fe34b6bdc2cb94be66d57a850866f0b908972f90996b3e9 \\\n    --hash=sha256:14d86fe14b7e04fa306e0c43cdbeebe6b2c2156a0c9ce56b815faacc193e320d \\\n    --hash=sha256:160291c60081f23ee43d44b08a7e5fb76681221a8e10b3139618c5a9a291b84e \\\n    --hash=sha256:1672137af7c46662a1c2be1e8dc78cb6d224319aaa40271c9257d886be4363a6 \\\n    --hash=sha256:19a0f89a7bb9d8048d9c4370c9c543c396e894c76be5525f5e1ad287f1750ddf \\\n    --hash=sha256:1ac2f5fe02fa75f56e1ad473f1175e11f475606ec9bd0be2e78e4734ad575034 \\\n    --hash=sha256:1cd9a1d071158de1cc1c71a26014dcdfa7dd3d5f4f88c298c7f90ad6f27bb46d \\\n    --hash=sha256:1ffc3cca89bb438fb9c95c13fc874012f7b9466b89328c3c8b1aa93cdcfadd16 \\\n    --hash=sha256:297878dc9d0a334358f9b608b56d02e72899f3b8499fc6044133f0d319e2ec30 \\\n    --hash=sha256:2d3af2e79991102678f53e0dbf4c35de99b6b8b58f29a27ca0325816364caaba \\\n    --hash=sha256:30b43e74f1359353341a7adb783c8f1b1c676367b011709f466f42fda2045e95 \\\n    --hash=sha256:3156628250f46a0895f1f36e1d4fbe062a1af8718ec3ebeb746f1d23f0c5dc4d \\\n    --hash=sha256:31f5af773530fd3c658b32b6bdc2d0838543de70eb9a2156c03e410f7b0d3aae \\\n    --hash=sha256:3935bfa5fede35fb202c4b569bb9c042f337ca4ff7bd540a0aa5e37131659348 \\\n    --hash=sha256:39d51fbe4285d5db5d92a929e3e21536ea3dd43732c5b177c7ef03f918dff9f2 \\\n    --hash=sha256:4160d9283bd382fa6c0c2b5e017acc95bc183570cd70968b9202ad6d8fc48dce \\\n    --hash=sha256:4a571d97dbe66ef38e472703067021b1467025ec85707d57e78711c085984e54 \\\n    --hash=sha256:4e6281aedfca15301c41f74d7005e6e3f4ca143584ba696ac69df4f02f40d629 \\\n    --hash=sha256:52277518d6aae65536e9cea52d4e7fd2f7a66f4aa2d30ed3f2fcea620ace3c54 \\\n    --hash=sha256:556fc6c10989f19a179e4321e5d678db8eb2924131e64652a51fe83e4c3db0e1 \\\n    --hash=sha256:574faa3b79e8ebac7cb1d7930f51184ba1ccf69adfdec53a12f319a06030a68b \\\n    --hash=sha256:58791550b27d5488b1bb52bc96328456095d96206a250d28d874fafe11b3dfaf \\\n    --hash=sha256:5b750a8e5a1262434fb1517ddf64b5de58327f1adc3524a5e44c2ca43305eb0b \\\n    --hash=sha256:5d97151bc92d2b2578ff7ce779cdb9174337390a535953cbb9452fb65164c587 \\\n    --hash=sha256:5eee736daafa7af6d0a2dc15cc75e05c64f37fc37bafef2e00d77c14171c2097 \\\n    --hash=sha256:6445804cf4ec763dc70de65a3b0d9954e868609e83850a47ca4f0cb64bd79fea \\\n    --hash=sha256:647894f5ae99c4cf6bb82a1bb3a796f6e06af3caa3d32e26d2350d0e3e3faf24 \\\n    --hash=sha256:66d4cfda1d8ed687daa4bc0274fcfd5267873db9a5bc0418c2da19273040eeb7 \\\n    --hash=sha256:6a9a8c34fb7bb609419a211e59da8887eeca40d300b5ea8e56af98f6fbbb1541 \\\n    --hash=sha256:6b3f39a85d671436ee3d12c017f8fdea38509e4f25b28eb25877293c98c243f6 \\\n    --hash=sha256:6b6fb63ae352e13748289f04f37868099e69dba4c2b3e271c46061e82c745634 \\\n    --hash=sha256:70693319e0b8fd35dd863e3e29513875eb15c51945bf32519ef52927ca883bc3 \\\n    --hash=sha256:781e65134efaf88feb447e8c97a51772aa75e48b794352f94cb7ea717dedda0d \\\n    --hash=sha256:819ce3b883b7576ca28da3861c7e1a88afd08cc8c96908e08a3f4dd64a228034 \\\n    --hash=sha256:857112b22acd417c40fa4595db2fe28ab900c8c5fe4670c7989b1c0230955465 \\\n    --hash=sha256:887d9b0a65404929641a9fabb6452b07fe4572b269d901d622d8a34a4e9043b2 \\\n    --hash=sha256:8b3489ff1ed1e8315674d0775dc7d2195fb13ca17b3808721b54dbe9fd020faf \\\n    --hash=sha256:92fc4500fcb33899b05ba73276dfb684a20d31caa567b7cb5252d48f896a91b1 \\\n    --hash=sha256:9403db39be1393618dd80c746cb22ccda168efce239c73af13c3763ef56ffc04 \\\n    --hash=sha256:98110aa363f1bb4c073e8dcfaefd3a5cea0f0834c2aab23dda657e4dab2f53b5 \\\n    --hash=sha256:999779addc413181912e984b942fbcc951be1f5b3663cd80b2687758f434c583 \\\n    --hash=sha256:9caac6b54914bdf41bcc91e7eb9147d331d29235a7c967c150ef5df6464fd1bb \\\n    --hash=sha256:a7a078f5d37bee6690959c813977da5291b24286e7b962e62a94cec31aa5188b \\\n    --hash=sha256:a7e65eb5c003a303b94aa2c3852ef130230ec79e349632d030e9571b87c4698c \\\n    --hash=sha256:a96dc1fa45bd8c407a0af03b2d5218392729e1822b0c32e62c5bf7eeb5fb3958 \\\n    --hash=sha256:aca405706e0b0a44cc6bfd41fbe89919a6a56999157f6de7e182a990c36e37bc \\\n    --hash=sha256:accb6150ce61c9c4b7738d45550806aa2b71c7668c6942f17b0ac182b6142fd4 \\\n    --hash=sha256:ad1af54a62ffe39cf34db1aa6ed1a1873bd548f6401db39d8e7cd060b9211f82 \\\n    --hash=sha256:ae1aa1cd222c6d205853b3013c69cd04515f9d6ab6de4b0603e2e1c33221303e \\\n    --hash=sha256:b2d0a12018b04f4cb820781ec0dffb5f7c7c1d2a5cd22bff7fb055a2cb19ebce \\\n    --hash=sha256:b480c6a4e1138e1aa137c0079b9b6305ec6dcc1098a8ca5196283e8a49df95a9 \\\n    --hash=sha256:b74c261802d3d2b85c9df2dfb2fa81b6f90deeef63c2db9f0e029a3cac50b518 \\\n    --hash=sha256:ba278acf14471d36316159c94a802933d10b6a1e117b8554fe0d0d9b75c9d536 \\\n    --hash=sha256:bb6178c241278d5fe853b3de743087be7f5f4c6f7d6d22a3b524d323eecec505 \\\n    --hash=sha256:bf72af5e0fb40e9babf594308911436c8efde3cb5e75b6f206c34ad18be5c052 \\\n    --hash=sha256:bfd3223c15bebe26518d58ccf9a39b93948d3dcb3e57a20480dfdd315356baff \\\n    --hash=sha256:c214999039d4f2a5b2073ac506bba279945233da8c786e490d411dfc30f855c1 \\\n    --hash=sha256:c2f992c07c0fca81655066705beae35fc95a2fa7366467366db627d9f2ee097f \\\n    --hash=sha256:cba4cfa1052819d16699e1d55d18c92b6e094d4517c41dd231a8b9f87b6fa681 \\\n    --hash=sha256:cea7daf9fc7ae6687cf1e2c049752f19f146fdc37c2cc376e7d0032cf4f25347 \\\n    --hash=sha256:cf6c4150f8c0e32d241436526f3c3f9cbd34429492abddbada2ffcff506c51af \\\n    --hash=sha256:d09c333d36c1409d56a9d29b3a1b800a42c76a57a5a8907eacdbce3f18768246 \\\n    --hash=sha256:d27b84d5880f6d8aa9ae3edb253c59d9f6642ffbb2c889b78b60361eed449787 \\\n    --hash=sha256:d2ccec9ac47cf4e04897619c0e0c1a48c54a71bdf045117d3a26f80d38ab1fb0 \\\n    --hash=sha256:d71264a80f3fcf512eb4f18f59423fe82d6e346ee97b90625f283df56aee103f \\\n    --hash=sha256:d93f3307ad32a27bda2e88ec81134b823c240aa3abb55821a8da553eed8d9439 \\\n    --hash=sha256:d9631c5e8b5b3a0fda99cb0d29c18133bca1e18aea9effe55adb3da1adef80d3 \\\n    --hash=sha256:ddfab44e4489bd79bda09d84c430677fc7f0a4939a73d2bba3073036f487a0a6 \\\n    --hash=sha256:e7048abd75fe40712005bcfc06bb44b9dfcd8e101dda2ecf2f5aa46115ad07ca \\\n    --hash=sha256:e73091191e4280403bde6c9a52a6999d69cdfde498f1fdf629105247599b57ec \\\n    --hash=sha256:e800776a79a5aabdb17dcc2346a7d66d0777e942e4cd251defeb084762ecd17d \\\n    --hash=sha256:edc9fc7051e3350643ad929df55c451899bb9ae6d24998a949d2e4c87fb596d3 \\\n    --hash=sha256:f089118d584e859c62b3da0892b88a83d611c2033ac410e929cb6754eec0ed16 \\\n    --hash=sha256:f174bbd484294ed9fdf09437f889f95807e5f229d5d93588d34e92106fbf6717 \\\n    --hash=sha256:f508b0491767bb1f2b87fdfacaba5f7eddc2f867740ec69ece6d1946d29029a6 \\\n    --hash=sha256:f7a31fc1e1bd362874863fdeed71aed92d348f5336fd84f2197ba40c59f061bd \\\n    --hash=sha256:f9479aa06a793c5aeba49ce5c5692ffb51fcd9a7016e017d555d5e2b0045d212\n    # via\n    #   aiohttp\n    #   yarl\npyasn1==0.6.3 \\\n    --hash=sha256:a80184d120f0864a52a073acc6fc642847d0be408e7c7252f31390c0f4eadcde\n    # via sigstore\npycparser==2.22 \\\n    --hash=sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc\n    # via cffi\npydantic[email]==2.12.5 \\\n    --hash=sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d\n    # via\n    #   sigstore\n    #   sigstore-rekor-types\npydantic-core==2.41.5 \\\n    --hash=sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90 \\\n    --hash=sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740 \\\n    --hash=sha256:0384e2e1021894b1ff5a786dbf94771e2986ebe2869533874d7e43bc79c6f504 \\\n    --hash=sha256:03b77d184b9eb40240ae9fd676ca364ce1085f203e1b1256f8ab9984dca80a84 \\\n    --hash=sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33 \\\n    --hash=sha256:05a2c8852530ad2812cb7914dc61a1125dc4e06252ee98e5638a12da6cc6fb6c \\\n    --hash=sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0 \\\n    --hash=sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0 \\\n    --hash=sha256:100baa204bb412b74fe285fb0f3a385256dad1d1879f0a5cb1499ed2e83d132a \\\n    --hash=sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34 \\\n    --hash=sha256:16f80f7abe3351f8ea6858914ddc8c77e02578544a0ebc15b4c2e1a0e813b0b2 \\\n    --hash=sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3 \\\n    --hash=sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815 \\\n    --hash=sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14 \\\n    --hash=sha256:1f8d33a7f4d5a7889e60dc39856d76d09333d8a6ed0f5f1190635cbec70ec4ba \\\n    --hash=sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375 \\\n    --hash=sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf \\\n    --hash=sha256:242a206cd0318f95cd21bdacff3fcc3aab23e79bba5cac3db5a841c9ef9c6963 \\\n    --hash=sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1 \\\n    --hash=sha256:266fb4cbf5e3cbd0b53669a6d1b039c45e3ce651fd5442eff4d07c2cc8d66808 \\\n    --hash=sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553 \\\n    --hash=sha256:287dad91cfb551c363dc62899a80e9e14da1f0e2b6ebde82c806612ca2a13ef1 \\\n    --hash=sha256:29452c56df2ed968d18d7e21f4ab0ac55e71dc59524872f6fc57dcf4a3249ed2 \\\n    --hash=sha256:299e0a22e7ae2b85c1a57f104538b2656e8ab1873511fd718a1c1c6f149b77b5 \\\n    --hash=sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470 \\\n    --hash=sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2 \\\n    --hash=sha256:2c010c6ded393148374c0f6f0bf89d206bf3217f201faa0635dcd56bd1520f6b \\\n    --hash=sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660 \\\n    --hash=sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c \\\n    --hash=sha256:33cb885e759a705b426baada1fe68cbb0a2e68e34c5d0d0289a364cf01709093 \\\n    --hash=sha256:346285d28e4c8017da95144c7f3acd42740d637ff41946af5ce6e5e420502dd5 \\\n    --hash=sha256:34a64bc3441dc1213096a20fe27e8e128bd3ff89921706e83c0b1ac971276594 \\\n    --hash=sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008 \\\n    --hash=sha256:378bec5c66998815d224c9ca994f1e14c0c21cb95d2f52b6021cc0b2a58f2a5a \\\n    --hash=sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a \\\n    --hash=sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd \\\n    --hash=sha256:4009935984bd36bd2c774e13f9a09563ce8de4abaa7226f5108262fa3e637284 \\\n    --hash=sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586 \\\n    --hash=sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869 \\\n    --hash=sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294 \\\n    --hash=sha256:4bc36bbc0b7584de96561184ad7f012478987882ebf9f9c389b23f432ea3d90f \\\n    --hash=sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66 \\\n    --hash=sha256:56121965f7a4dc965bff783d70b907ddf3d57f6eba29b6d2e5dabfaf07799c51 \\\n    --hash=sha256:58133647260ea01e4d0500089a8c4f07bd7aa6ce109682b1426394988d8aaacc \\\n    --hash=sha256:5921a4d3ca3aee735d9fd163808f5e8dd6c6972101e4adbda9a4667908849b97 \\\n    --hash=sha256:5a4e67afbc95fa5c34cf27d9089bca7fcab4e51e57278d710320a70b956d1b9a \\\n    --hash=sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d \\\n    --hash=sha256:62de39db01b8d593e45871af2af9e497295db8d73b085f6bfd0b18c83c70a8f9 \\\n    --hash=sha256:634e8609e89ceecea15e2d61bc9ac3718caaaa71963717bf3c8f38bfde64242c \\\n    --hash=sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07 \\\n    --hash=sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36 \\\n    --hash=sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e \\\n    --hash=sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05 \\\n    --hash=sha256:6cb58b9c66f7e4179a2d5e0f849c48eff5c1fca560994d6eb6543abf955a149e \\\n    --hash=sha256:6f52298fbd394f9ed112d56f3d11aabd0d5bd27beb3084cc3d8ad069483b8941 \\\n    --hash=sha256:707625ef0983fcfb461acfaf14de2067c5942c6bb0f3b4c99158bed6fedd3cf3 \\\n    --hash=sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612 \\\n    --hash=sha256:753e230374206729bf0a807954bcc6c150d3743928a73faffee51ac6557a03c3 \\\n    --hash=sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b \\\n    --hash=sha256:76ee27c6e9c7f16f47db7a94157112a2f3a00e958bc626e2f4ee8bec5c328fbe \\\n    --hash=sha256:77b63866ca88d804225eaa4af3e664c5faf3568cea95360d21f4725ab6e07146 \\\n    --hash=sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11 \\\n    --hash=sha256:7b93a4d08587e2b7e7882de461e82b6ed76d9026ce91ca7915e740ecc7855f60 \\\n    --hash=sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd \\\n    --hash=sha256:7f3bf998340c6d4b0c9a2f02d6a400e51f123b59565d74dc60d252ce888c260b \\\n    --hash=sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c \\\n    --hash=sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a \\\n    --hash=sha256:873e0d5b4fb9b89ef7c2d2a963ea7d02879d9da0da8d9d4933dee8ee86a8b460 \\\n    --hash=sha256:88942d3a3dff3afc8288c21e565e476fc278902ae4d6d134f1eeda118cc830b1 \\\n    --hash=sha256:8bfeaf8735be79f225f3fefab7f941c712aaca36f1128c9d7e2352ee1aa87bdf \\\n    --hash=sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf \\\n    --hash=sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858 \\\n    --hash=sha256:93e8740d7503eb008aa2df04d3b9735f845d43ae845e6dcd2be0b55a2da43cd2 \\\n    --hash=sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9 \\\n    --hash=sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2 \\\n    --hash=sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3 \\\n    --hash=sha256:a3a52f6156e73e7ccb0f8cced536adccb7042be67cb45f9562e12b319c119da6 \\\n    --hash=sha256:a668ce24de96165bb239160b3d854943128f4334822900534f2fe947930e5770 \\\n    --hash=sha256:a75dafbf87d6276ddc5b2bf6fae5254e3d0876b626eb24969a574fff9149ee5d \\\n    --hash=sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc \\\n    --hash=sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23 \\\n    --hash=sha256:b2379fa7ed44ddecb5bfe4e48577d752db9fc10be00a6b7446e9663ba143de26 \\\n    --hash=sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa \\\n    --hash=sha256:b5819cd790dbf0c5eb9f82c73c16b39a65dd6dd4d1439dcdea7816ec9adddab8 \\\n    --hash=sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d \\\n    --hash=sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3 \\\n    --hash=sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d \\\n    --hash=sha256:b96d5f26b05d03cc60f11a7761a5ded1741da411e7fe0909e27a5e6a0cb7b034 \\\n    --hash=sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9 \\\n    --hash=sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1 \\\n    --hash=sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56 \\\n    --hash=sha256:c1df3d34aced70add6f867a8cf413e299177e0c22660cc767218373d0779487b \\\n    --hash=sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c \\\n    --hash=sha256:c8d8b4eb992936023be7dee581270af5c6e0697a8559895f527f5b7105ecd36a \\\n    --hash=sha256:c9e19dd6e28fdcaa5a1de679aec4141f691023916427ef9bae8584f9c2fb3b0e \\\n    --hash=sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9 \\\n    --hash=sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5 \\\n    --hash=sha256:d3a978c4f57a597908b7e697229d996d77a6d3c94901e9edee593adada95ce1a \\\n    --hash=sha256:d5160812ea7a8a2ffbe233d8da666880cad0cbaf5d4de74ae15c313213d62556 \\\n    --hash=sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e \\\n    --hash=sha256:df3959765b553b9440adfd3c795617c352154e497a4eaf3752555cfb5da8fc49 \\\n    --hash=sha256:dfa8a0c812ac681395907e71e1274819dec685fec28273a28905df579ef137e2 \\\n    --hash=sha256:e25c479382d26a2a41b7ebea1043564a937db462816ea07afa8a44c0866d52f9 \\\n    --hash=sha256:e4f4a984405e91527a0d62649ee21138f8e3d0ef103be488c1dc11a80d7f184b \\\n    --hash=sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc \\\n    --hash=sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb \\\n    --hash=sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0 \\\n    --hash=sha256:e7b576130c69225432866fe2f4a469a85a54ade141d96fd396dffcf607b558f8 \\\n    --hash=sha256:e8465ab91a4bd96d36dde3263f06caa6a8a6019e4113f24dc753d79a8b3a3f82 \\\n    --hash=sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69 \\\n    --hash=sha256:ece5c59f0ce7d001e017643d8d24da587ea1f74f6993467d85ae8a5ef9d4f42b \\\n    --hash=sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c \\\n    --hash=sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75 \\\n    --hash=sha256:f0cd744688278965817fd0839c4a4116add48d23890d468bc436f78beb28abf5 \\\n    --hash=sha256:f14f8f046c14563f8eb3f45f499cc658ab8d10072961e07225e507adb700e93f \\\n    --hash=sha256:f15489ba13d61f670dcc96772e733aad1a6f9c429cc27574c6cdaed82d0146ad \\\n    --hash=sha256:f31d95a179f8d64d90f6831d71fa93290893a33148d890ba15de25642c5d075b \\\n    --hash=sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7 \\\n    --hash=sha256:f41eb9797986d6ebac5e8edff36d5cef9de40def462311b3eb3eeded1431e425 \\\n    --hash=sha256:f547144f2966e1e16ae626d8ce72b4cfa0caedc7fa28052001c94fb2fcaa1c52\n    # via pydantic\npygments==2.20.0 \\\n    --hash=sha256:81a9e26dd42fd28a23a2d169d86d7ac03b46e2f8b59ed4698fb4785f946d0176\n    # via rich\npyjwt==2.12.0 \\\n    --hash=sha256:9bb459d1bdd0387967d287f5656bf7ec2b9a26645d1961628cda1764e087fd6e\n    # via sigstore\npynacl==1.6.2 \\\n    --hash=sha256:04316d1fc625d860b6c162fff704eb8426b1a8bcd3abacea11142cbd99a6b574 \\\n    --hash=sha256:22de65bb9010a725b0dac248f353bb072969c94fa8d6b1f34b87d7953cf7bbe4 \\\n    --hash=sha256:26bfcd00dcf2cf160f122186af731ae30ab120c18e8375684ec2670dccd28130 \\\n    --hash=sha256:2fef529ef3ee487ad8113d287a593fa26f48ee3620d92ecc6f1d09ea38e0709b \\\n    --hash=sha256:320ef68a41c87547c91a8b58903c9caa641ab01e8512ce291085b5fe2fcb7590 \\\n    --hash=sha256:3bffb6d0f6becacb6526f8f42adfb5efb26337056ee0831fb9a7044d1a964444 \\\n    --hash=sha256:44081faff368d6c5553ccf55322ef2819abb40e25afaec7e740f159f74813634 \\\n    --hash=sha256:46065496ab748469cdd999246d17e301b2c24ae2fdf739132e580a0e94c94a87 \\\n    --hash=sha256:5811c72b473b2f38f7e2a3dc4f8642e3a3e9b5e7317266e4ced1fba85cae41aa \\\n    --hash=sha256:622d7b07cc5c02c666795792931b50c91f3ce3c2649762efb1ef0d5684c81594 \\\n    --hash=sha256:62985f233210dee6548c223301b6c25440852e13d59a8b81490203c3227c5ba0 \\\n    --hash=sha256:68be3a09455743ff9505491220b64440ced8973fe930f270c8e07ccfa25b1f9e \\\n    --hash=sha256:834a43af110f743a754448463e8fd61259cd4ab5bbedcf70f9dabad1d28a394c \\\n    --hash=sha256:8845c0631c0be43abdd865511c41eab235e0be69c81dc66a50911594198679b0 \\\n    --hash=sha256:8a66d6fb6ae7661c58995f9c6435bda2b1e68b54b598a6a10247bfcdadac996c \\\n    --hash=sha256:8b097553b380236d51ed11356c953bf8ce36a29a3e596e934ecabe76c985a577 \\\n    --hash=sha256:a84bf1c20339d06dc0c85d9aea9637a24f718f375d861b2668b2f9f96fa51145 \\\n    --hash=sha256:a9f9932d8d2811ce1a8ffa79dcbdf3970e7355b5c8eb0c1a881a57e7f7d96e88 \\\n    --hash=sha256:bc4a36b28dd72fb4845e5d8f9760610588a96d5a51f01d84d8c6ff9849968c14 \\\n    --hash=sha256:c8a231e36ec2cab018c4ad4358c386e36eede0319a0c41fed24f840b1dac59f6 \\\n    --hash=sha256:c949ea47e4206af7c8f604b8278093b674f7c79ed0d4719cc836902bf4517465 \\\n    --hash=sha256:d071c6a9a4c94d79eb665db4ce5cedc537faf74f2355e4d502591d850d3913c0 \\\n    --hash=sha256:d29bfe37e20e015a7d8b23cfc8bd6aa7909c92a1b8f41ee416bbb3e79ef182b2 \\\n    --hash=sha256:fe9847ca47d287af41e82be1dd5e23023d3c31a951da134121ab02e42ac218c9\n    # via paramiko\npyopenssl==26.0.0 \\\n    --hash=sha256:df94d28498848b98cc1c0ffb8ef1e71e40210d3b0a8064c9d29571ed2904bf81\n    # via sigstore\npython-dateutil==2.9.0.post0 \\\n    --hash=sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427\n    # via betterproto\npython-gnupg==0.5.6 \\\n    --hash=sha256:b5050a55663d8ab9fcc8d97556d229af337a87a3ebebd7054cbd8b7e2043394a\n    # via -r requirements.in\nrequests==2.33.0 \\\n    --hash=sha256:3324635456fa185245e24865e810cecec7b4caf933d7eb133dcde67d48cee69b\n    # via\n    #   id\n    #   sigstore\nrfc3161-client==1.0.6 \\\n    --hash=sha256:0b3920334f7334ec3bb9c319d53a5d08cd43b6883f75e2669cfd869cd264d53a \\\n    --hash=sha256:1671b1be16480ea54c0d36239efd0fb62c13dd572a9865a5e91fea39f1b95303 \\\n    --hash=sha256:1be4e1133f0f7fe875629f2c358285503c1cfc79cebfbc3fb4e28b8a57d6f1a4 \\\n    --hash=sha256:2bc9835467f6166edd6f876470484e5b294ee141add6eff6a59f5047937aaa75 \\\n    --hash=sha256:3da328ba08139846b1ab3a03402ba8a5f3659a640dbe2cd6a18f7f342e99ba98 \\\n    --hash=sha256:4ef4b096abe7d55b020526e39932c2721939a6c55e9a5cd3b3e77897a0942937 \\\n    --hash=sha256:63355099d932851eac507806bb9d0937dab546a66d5857d888168799ec635f6d \\\n    --hash=sha256:78cdc6bde331492cb94f69328831d5c56b271012b00c6f1784c2e4b33837d585 \\\n    --hash=sha256:8102165201c5224cf6e6634bfd68c6a39e8f800601188216f8210face4861215 \\\n    --hash=sha256:85a1d71d1eb2c9bced2b3eb75e96f9fe49732ec2567b5dafa1dd889fff42b7fe \\\n    --hash=sha256:8631f7db7c1327bf87ee6a9a8681b4cd6bc2a90aae651388f29d045cd9ff1ac9 \\\n    --hash=sha256:940e1fc95ec0ca734927a82bcb5363fa988ef1a085d238ff0c861f29c0cfb746 \\\n    --hash=sha256:9a98e9c7ff632d9571fcea25fb70bde0e8339b86368aef67a65f6a301f125733 \\\n    --hash=sha256:b7ad54288a49379b01b1d0d9d15167d2b7c6c7f940332ab85eeb4a6e844da8c7 \\\n    --hash=sha256:bc379167238df32cbcc1dc9c324088559c1734331030f5293d75f4fd37b5f4f6 \\\n    --hash=sha256:bed6ef8e194cab85f6ec5678995b6406bb568383ebb6a4301be40e7939dd28d9 \\\n    --hash=sha256:e16ed34f6f33fd62aa3b1f83615ecf2f96e1b1f57df4e1a36570b3f895333972 \\\n    --hash=sha256:e3caffaebf43242b000c4a6659d60eaf19c3b161ccbe05b15634a856c9ea7e61\n    # via sigstore\nrfc8785==0.1.4 \\\n    --hash=sha256:520d690b448ecf0703691c76e1a34a24ddcd4fc5bc41d589cb7c58ec651bcd48\n    # via sigstore\nrich==13.9.4 \\\n    --hash=sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90\n    # via sigstore\nsecuresystemslib==1.2.0 \\\n    --hash=sha256:fa63abcb1cf4dba4f2df964f623baa45bc39029980d7a0a2119d90731942afc6\n    # via tuf\nsigstore==3.6.7 \\\n    --hash=sha256:85d7512499eded0ffc310462d8be81731a631320751e390d74370d6458864df9\n    # via -r requirements.in\nsigstore-protobuf-specs==0.3.2 \\\n    --hash=sha256:50c99fa6747a3a9c5c562a43602cf76df0b199af28f0e9d4319b6775630425ea\n    # via sigstore\nsigstore-rekor-types==0.0.18 \\\n    --hash=sha256:b62bf38c5b1a62bc0d7fe0ee51a0709e49311d137c7880c329882a8f4b2d1d78\n    # via sigstore\nsix==1.17.0 \\\n    --hash=sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274\n    # via python-dateutil\ntuf==6.0.0 \\\n    --hash=sha256:458f663a233d95cc76dde0e1a3d01796516a05ce2781fefafebe037f7729601a\n    # via sigstore\ntyping-extensions==4.15.0 \\\n    --hash=sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548\n    # via\n    #   aiosignal\n    #   pydantic\n    #   pydantic-core\n    #   pyopenssl\n    #   typing-inspection\ntyping-inspection==0.4.2 \\\n    --hash=sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7\n    # via pydantic\nurllib3==2.6.3 \\\n    --hash=sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4\n    # via\n    #   requests\n    #   tuf\nyarl==1.18.3 \\\n    --hash=sha256:00e5a1fea0fd4f5bfa7440a47eff01d9822a65b4488f7cff83155a0f31a2ecba \\\n    --hash=sha256:02ddb6756f8f4517a2d5e99d8b2f272488e18dd0bfbc802f31c16c6c20f22193 \\\n    --hash=sha256:045b8482ce9483ada4f3f23b3774f4e1bf4f23a2d5c912ed5170f68efb053318 \\\n    --hash=sha256:09c7907c8548bcd6ab860e5f513e727c53b4a714f459b084f6580b49fa1b9cee \\\n    --hash=sha256:0b0cad37311123211dc91eadcb322ef4d4a66008d3e1bdc404808992260e1a0e \\\n    --hash=sha256:0b3c92fa08759dbf12b3a59579a4096ba9af8dd344d9a813fc7f5070d86bbab1 \\\n    --hash=sha256:0fb2171a4486bb075316ee754c6d8382ea6eb8b399d4ec62fde2b591f879778a \\\n    --hash=sha256:1a74a13a4c857a84a845505fd2d68e54826a2cd01935a96efb1e9d86c728e186 \\\n    --hash=sha256:1d407181cfa6e70077df3377938c08012d18893f9f20e92f7d2f314a437c30b1 \\\n    --hash=sha256:1dd4bdd05407ced96fed3d7f25dbbf88d2ffb045a0db60dbc247f5b3c5c25d50 \\\n    --hash=sha256:25b411eddcfd56a2f0cd6a384e9f4f7aa3efee14b188de13048c25b5e91f1640 \\\n    --hash=sha256:2d06d3005e668744e11ed80812e61efd77d70bb7f03e33c1598c301eea20efbb \\\n    --hash=sha256:2ec9bbba33b2d00999af4631a3397d1fd78290c48e2a3e52d8dd72db3a067ac8 \\\n    --hash=sha256:3236da9272872443f81fedc389bace88408f64f89f75d1bdb2256069a8730ccc \\\n    --hash=sha256:35098b24e0327fc4ebdc8ffe336cee0a87a700c24ffed13161af80124b7dc8e5 \\\n    --hash=sha256:41f7ce59d6ee7741af71d82020346af364949314ed3d87553763a2df1829cc58 \\\n    --hash=sha256:436c4fc0a4d66b2badc6c5fc5ef4e47bb10e4fd9bf0c79524ac719a01f3607c2 \\\n    --hash=sha256:4891ed92157e5430874dad17b15eb1fda57627710756c27422200c52d8a4e393 \\\n    --hash=sha256:4ac515b860c36becb81bb84b667466885096b5fc85596948548b667da3bf9f24 \\\n    --hash=sha256:5094d9206c64181d0f6e76ebd8fb2f8fe274950a63890ee9e0ebfd58bf9d787b \\\n    --hash=sha256:54d6921f07555713b9300bee9c50fb46e57e2e639027089b1d795ecd9f7fa910 \\\n    --hash=sha256:578e281c393af575879990861823ef19d66e2b1d0098414855dd367e234f5b3c \\\n    --hash=sha256:5a3f356548e34a70b0172d8890006c37be92995f62d95a07b4a42e90fba54272 \\\n    --hash=sha256:602d98f2c2d929f8e697ed274fbadc09902c4025c5a9963bf4e9edfc3ab6f7ed \\\n    --hash=sha256:61b1a825a13bef4a5f10b1885245377d3cd0bf87cba068e1d9a88c2ae36880e1 \\\n    --hash=sha256:61e5e68cb65ac8f547f6b5ef933f510134a6bf31bb178be428994b0cb46c2a04 \\\n    --hash=sha256:61ee62ead9b68b9123ec24bc866cbef297dd266175d53296e2db5e7f797f902d \\\n    --hash=sha256:6333c5a377c8e2f5fae35e7b8f145c617b02c939d04110c76f29ee3676b5f9a5 \\\n    --hash=sha256:6748dbf9bfa5ba1afcc7556b71cda0d7ce5f24768043a02a58846e4a443d808d \\\n    --hash=sha256:67a283dd2882ac98cc6318384f565bffc751ab564605959df4752d42483ad889 \\\n    --hash=sha256:75674776d96d7b851b6498f17824ba17849d790a44d282929c42dbb77d4f17ae \\\n    --hash=sha256:757e81cae69244257d125ff31663249b3013b5dc0a8520d73694aed497fb195b \\\n    --hash=sha256:77a6e85b90a7641d2e07184df5557132a337f136250caafc9ccaa4a2a998ca2c \\\n    --hash=sha256:7c33dd1931a95e5d9a772d0ac5e44cac8957eaf58e3c8da8c1414de7dd27c576 \\\n    --hash=sha256:7df647e8edd71f000a5208fe6ff8c382a1de8edfbccdbbfe649d263de07d8c34 \\\n    --hash=sha256:7e2ee16578af3b52ac2f334c3b1f92262f47e02cc6193c598502bd46f5cd1477 \\\n    --hash=sha256:80316a8bd5109320d38eef8833ccf5f89608c9107d02d2a7f985f98ed6876990 \\\n    --hash=sha256:82123d0c954dc58db301f5021a01854a85bf1f3bb7d12ae0c01afc414a882ca2 \\\n    --hash=sha256:84b2deecba4a3f1a398df819151eb72d29bfeb3b69abb145a00ddc8d30094512 \\\n    --hash=sha256:8503ad47387b8ebd39cbbbdf0bf113e17330ffd339ba1144074da24c545f0069 \\\n    --hash=sha256:877d209b6aebeb5b16c42cbb377f5f94d9e556626b1bfff66d7b0d115be88d0a \\\n    --hash=sha256:8874027a53e3aea659a6d62751800cf6e63314c160fd607489ba5c2edd753cf6 \\\n    --hash=sha256:88a19f62ff30117e706ebc9090b8ecc79aeb77d0b1f5ec10d2d27a12bc9f66d0 \\\n    --hash=sha256:8d39d351e7faf01483cc7ff7c0213c412e38e5a340238826be7e0e4da450fdc8 \\\n    --hash=sha256:90adb47ad432332d4f0bc28f83a5963f426ce9a1a8809f5e584e704b82685dcb \\\n    --hash=sha256:913829534200eb0f789d45349e55203a091f45c37a2674678744ae52fae23efa \\\n    --hash=sha256:93b2e109287f93db79210f86deb6b9bbb81ac32fc97236b16f7433db7fc437d8 \\\n    --hash=sha256:9d41beda9dc97ca9ab0b9888cb71f7539124bc05df02c0cff6e5acc5a19dcc6e \\\n    --hash=sha256:a440a2a624683108a1b454705ecd7afc1c3438a08e890a1513d468671d90a04e \\\n    --hash=sha256:a4bb030cf46a434ec0225bddbebd4b89e6471814ca851abb8696170adb163985 \\\n    --hash=sha256:a9ca04806f3be0ac6d558fffc2fdf8fcef767e0489d2684a21912cc4ed0cd1b8 \\\n    --hash=sha256:ac36703a585e0929b032fbaab0707b75dc12703766d0b53486eabd5139ebadd5 \\\n    --hash=sha256:b1771de9944d875f1b98a745bc547e684b863abf8f8287da8466cf470ef52690 \\\n    --hash=sha256:b464c4ab4bfcb41e3bfd3f1c26600d038376c2de3297760dfe064d2cb7ea8e10 \\\n    --hash=sha256:b4f6450109834af88cb4cc5ecddfc5380ebb9c228695afc11915a0bf82116789 \\\n    --hash=sha256:b57f4f58099328dfb26c6a771d09fb20dbbae81d20cfb66141251ea063bd101b \\\n    --hash=sha256:b643562c12680b01e17239be267bc306bbc6aac1f34f6444d1bded0c5ce438ca \\\n    --hash=sha256:b958ddd075ddba5b09bb0be8a6d9906d2ce933aee81100db289badbeb966f54e \\\n    --hash=sha256:b9d60031cf568c627d028239693fd718025719c02c9f55df0a53e587aab951b5 \\\n    --hash=sha256:ba23302c0c61a9999784e73809427c9dbedd79f66a13d84ad1b1943802eaaf59 \\\n    --hash=sha256:ba87babd629f8af77f557b61e49e7c7cac36f22f871156b91e10a6e9d4f829e9 \\\n    --hash=sha256:c017a3b6df3a1bd45b9fa49a0f54005e53fbcad16633870104b66fa1a30a29d8 \\\n    --hash=sha256:c1e1cc06da1491e6734f0ea1e6294ce00792193c463350626571c287c9a704db \\\n    --hash=sha256:c654d5207c78e0bd6d749f6dae1dcbbfde3403ad3a4b11f3c5544d9906969dde \\\n    --hash=sha256:c69697d3adff5aa4f874b19c0e4ed65180ceed6318ec856ebc423aa5850d84f7 \\\n    --hash=sha256:c7d79f7d9aabd6011004e33b22bc13056a3e3fb54794d138af57f5ee9d9032cb \\\n    --hash=sha256:ccaa3a4b521b780a7e771cc336a2dba389a0861592bbce09a476190bb0c8b4b3 \\\n    --hash=sha256:ccd17349166b1bee6e529b4add61727d3f55edb7babbe4069b5764c9587a8cc6 \\\n    --hash=sha256:ce1af883b94304f493698b00d0f006d56aea98aeb49d75ec7d98cd4a777e9285 \\\n    --hash=sha256:d0e883008013c0e4aef84dcfe2a0b172c4d23c2669412cf5b3371003941f72bb \\\n    --hash=sha256:d980e0325b6eddc81331d3f4551e2a333999fb176fd153e075c6d1c2530aa8a8 \\\n    --hash=sha256:e17c9361d46a4d5addf777c6dd5eab0715a7684c2f11b88c67ac37edfba6c482 \\\n    --hash=sha256:e2c08cc9b16f4f4bc522771d96734c7901e7ebef70c6c5c35dd0f10845270bcd \\\n    --hash=sha256:e35ef8683211db69ffe129a25d5634319a677570ab6b2eba4afa860f54eeaf75 \\\n    --hash=sha256:e3b9fd71836999aad54084906f8663dffcd2a7fb5cdafd6c37713b2e72be1760 \\\n    --hash=sha256:ef9f7768395923c3039055c14334ba4d926f3baf7b776c923c93d80195624782 \\\n    --hash=sha256:f52a265001d830bc425f82ca9eabda94a64a4d753b07d623a9f2863fde532b53 \\\n    --hash=sha256:f91c4803173928a25e1a55b943c81f55b8872f0018be83e3ad4938adffb77dd2 \\\n    --hash=sha256:fbd6748e8ab9b41171bb95c6142faf068f5ef1511935a0aa07025438dd9a9bc1 \\\n    --hash=sha256:fe57328fbc1bfd0bd0514470ac692630f3901c0ee39052ae47acd1d90a436719 \\\n    --hash=sha256:fea09ca13323376a2fdfb353a5fa2e59f90cd18d7ca4eaa1fd31f0a8b4f91e62\n    # via aiohttp\n"
  },
  {
    "path": "run_release.py",
    "content": "#!/usr/bin/env python3\n\n\"\"\"An automatic engine for Python releases\n\nOriginal code by Pablo Galindo\n\"\"\"\nfrom __future__ import annotations\n\nimport argparse\nimport asyncio\nimport contextlib\nimport functools\nimport getpass\nimport json\nimport os\nimport re\nimport shelve\nimport shlex\nimport shutil\nimport subprocess\nimport sys\nimport tempfile\nimport time\nimport urllib.request\nfrom collections.abc import Iterator\nfrom pathlib import Path\nfrom typing import Any, cast\n\nimport aiohttp\nimport gnupg  # type: ignore[import-untyped]\nimport paramiko\nimport sigstore.oidc\nfrom alive_progress import alive_bar\n\nimport release as release_mod\nimport sbom\nimport update_version_next\nfrom buildbotapi import BuildBotAPI, Builder\nfrom release import ReleaseShelf, Tag, Task, ask_question\n\nAPI_KEY_REGEXP = re.compile(r\"(?P<user>\\w+):(?P<key>\\w+)\")\nRELEASE_REGEXP = re.compile(\n    r\"(?P<major>\\d+)\\.(?P<minor>\\d+)\\.(?P<patch>\\d+)\\.?(?P<extra>.*)?\"\n)\nDOWNLOADS_SERVER = \"downloads.nyc1.psf.io\"\nDOCS_SERVER = \"docs.nyc1.psf.io\"\n\nWHATS_NEW_TEMPLATE = \"\"\"\n****************************\n  What's new in Python {version}\n****************************\n\n:Editor: TBD\n\n.. Rules for maintenance:\n\n   * Anyone can add text to this document.  Do not spend very much time\n   on the wording of your changes, because your text will probably\n   get rewritten to some degree.\n\n   * The maintainer will go through Misc/NEWS periodically and add\n   changes; it's therefore more important to add your changes to\n   Misc/NEWS than to this file.\n\n   * This is not a complete list of every single change; completeness\n   is the purpose of Misc/NEWS.  Some changes I consider too small\n   or esoteric to include.  If such a change is added to the text,\n   I'll just remove it.  (This is another reason you shouldn't spend\n   too much time on writing your addition.)\n\n   * If you want to draw your new text to the attention of the\n   maintainer, add 'XXX' to the beginning of the paragraph or\n   section.\n\n   * It's OK to just add a fragmentary note about a change.  For\n   example: \"XXX Describe the transmogrify() function added to the\n   socket module.\"  The maintainer will research the change and\n   write the necessary text.\n\n   * You can comment out your additions if you like, but it's not\n   necessary (especially when a final release is some months away).\n\n   * Credit the author of a patch or bugfix.   Just the name is\n   sufficient; the e-mail address isn't necessary.\n\n   * It's helpful to add the issue number as a comment:\n\n   XXX Describe the transmogrify() function added to the socket\n   module.\n   (Contributed by P.Y. Developer in :gh:`12345`.)\n\n   This saves the maintainer the effort of going through the VCS log\n   when researching a change.\n\nThis article explains the new features in Python {version}, compared to {prev_version}.\n\nFor full details, see the :ref:`changelog <changelog>`.\n\n.. note::\n\n   Prerelease users should be aware that this document is currently in draft\n   form. It will be updated substantially as Python {version} moves towards release,\n   so it's worth checking back even after reading earlier versions.\n\n\nSummary --- release highlights\n==============================\n\n.. This section singles out the most important changes in Python {version}.\n   Brevity is key.\n\n\n.. PEP-sized items next.\n\n\n\nNew features\n============\n\n\n\nOther language changes\n======================\n\n\n\nNew modules\n===========\n\n* None yet.\n\n\nImproved modules\n================\n\nmodule_name\n-----------\n\n* TODO\n\n.. Add improved modules above alphabetically, not here at the end.\n\nOptimizations\n=============\n\nmodule_name\n-----------\n\n* TODO\n\n\n\nRemoved\n=======\n\nmodule_name\n-----------\n\n* TODO\n.. Add removals above alphabetically, not here at the end.\n\n\nDeprecated\n==========\n\n* module_name:\n  TODO\n\n\n.. Add deprecations above alphabetically, not here at the end.\n\n\nPorting to Python {version}\n======================\n\nThis section lists previously described changes and other bugfixes\nthat may require changes to your code.\n\n\nBuild changes\n=============\n\n\nC API changes\n=============\n\nNew features\n------------\n\n* TODO\n\nPorting to Python {version}\n----------------------\n\n* TODO\n\nDeprecated C APIs\n-----------------\n\n* TODO\n\n.. Add C API deprecations above alphabetically, not here at the end.\n\nRemoved C APIs\n--------------\n\n\"\"\"\n\n\nclass ReleaseException(Exception):\n    \"\"\"An error happened in the release process\"\"\"\n\n\nclass ReleaseDriver:\n    def __init__(\n        self,\n        tasks: list[Task],\n        *,\n        release_tag: Tag,\n        git_repo: str,\n        api_key: str,\n        ssh_user: str,\n        sign_gpg: bool,\n        ssh_key: str | None = None,\n        first_state: Task | None = None,\n    ) -> None:\n        self.tasks = tasks\n        dbfile = Path.home() / f\".python_release-{release_tag}\"\n        self.db: ReleaseShelf = cast(ReleaseShelf, shelve.open(str(dbfile), \"c\"))\n        if not self.db.get(\"finished\"):\n            self.db[\"finished\"] = False\n        else:\n            self.db.close()\n            self.db = cast(ReleaseShelf, shelve.open(str(dbfile), \"n\"))\n\n        self.current_task: Task | None = first_state\n        self.completed_tasks = self.db.get(\"completed_tasks\", [])\n        self.remaining_tasks = iter(tasks[len(self.completed_tasks) :])\n        if self.db.get(\"gpg_key\"):\n            os.environ[\"GPG_KEY_FOR_RELEASE\"] = self.db[\"gpg_key\"]\n        if not self.db.get(\"git_repo\"):\n            self.db[\"git_repo\"] = Path(git_repo)\n        if not self.db.get(\"auth_info\"):\n            self.db[\"auth_info\"] = api_key\n        if not self.db.get(\"ssh_user\"):\n            self.db[\"ssh_user\"] = ssh_user\n        if not self.db.get(\"ssh_key\"):\n            self.db[\"ssh_key\"] = ssh_key\n        if not self.db.get(\"sign_gpg\"):\n            self.db[\"sign_gpg\"] = sign_gpg\n        if not self.db.get(\"release\"):\n            self.db[\"release\"] = release_tag\n        if not self.db.get(\"security_release\"):\n            self.db[\"security_release\"] = self.db[\"release\"].is_security_release\n\n        print(\"Release data: \")\n        print(f\"- Branch: {release_tag.branch}\")\n        print(f\"- Release tag: {self.db['release']}\")\n        print(f\"- Normalized release tag: {release_tag.normalized()}\")\n        print(f\"- Git repo: {self.db['git_repo']}\")\n        print(f\"- SSH username: {self.db['ssh_user']}\")\n        print(f\"- SSH key: {self.db['ssh_key'] or 'Default'}\")\n        print(f\"- Sign with GPG: {self.db['sign_gpg']}\")\n        print(f\"- Security release: {self.db['security_release']}\")\n        print()\n\n    def checkpoint(self) -> None:\n        self.db[\"completed_tasks\"] = self.completed_tasks\n\n    def run(self) -> None:\n        for task in self.completed_tasks:\n            print(f\"✅  {task.description}\")\n\n        self.current_task = next(self.remaining_tasks, None)\n        while self.current_task is not None:\n            self.checkpoint()\n            try:\n                self.current_task(self.db)\n            except Exception as e:\n                print(f\"\\r💥  {self.current_task.description}\")\n                raise e from None\n            print(f\"\\r✅  {self.current_task.description}\")\n            self.completed_tasks.append(self.current_task)\n            self.current_task = next(self.remaining_tasks, None)\n        self.db[\"finished\"] = True\n        print()\n        print(f\"Congratulations, Python {self.db['release']} is released 🎉🎉🎉\")\n\n\n@contextlib.contextmanager\ndef cd(path: Path) -> Iterator[None]:\n    current_path = os.getcwd()\n    os.chdir(path)\n    yield\n    os.chdir(current_path)\n\n\ndef check_tool(db: ReleaseShelf, tool: str) -> None:\n    if shutil.which(tool) is None:\n        raise ReleaseException(f\"{tool} is not available\")\n\n\ncheck_gh = functools.partial(check_tool, tool=\"gh\")\ncheck_git = functools.partial(check_tool, tool=\"git\")\ncheck_make = functools.partial(check_tool, tool=\"make\")\ncheck_blurb = functools.partial(check_tool, tool=\"blurb\")\ncheck_autoconf = functools.partial(check_tool, tool=\"autoconf\")\ncheck_docker = functools.partial(check_tool, tool=\"docker\")\n\n\ndef check_gpg_keys(db: ReleaseShelf) -> None:\n    pg = gnupg.GPG()\n    keys = pg.list_keys(secret=True)\n    if not keys:\n        raise ReleaseException(\"There are no valid GPG keys for release\")\n    for index, key in enumerate(keys):\n        print(f\"{index} - {key['keyid']}: {key['uids']}\")\n    selected_key_index = -1\n    while not (0 <= selected_key_index < len(keys)):\n        with contextlib.suppress(ValueError):\n            selected_key_index = int(\n                input(\"Select one GPG key for release (by index):\")\n            )\n    selected_key = keys[selected_key_index][\"keyid\"]\n    os.environ[\"GPG_KEY_FOR_db['release']\"] = selected_key\n    if selected_key not in {key[\"keyid\"] for key in keys}:\n        raise ReleaseException(\"Invalid GPG key selected\")\n    db[\"gpg_key\"] = selected_key\n    os.environ[\"GPG_KEY_FOR_RELEASE\"] = db[\"gpg_key\"]\n\n\ndef check_ssh_connection(db: ReleaseShelf) -> None:\n    client = paramiko.SSHClient()\n    client.load_system_host_keys()\n    client.set_missing_host_key_policy(paramiko.RejectPolicy)\n    client.connect(\n        DOWNLOADS_SERVER, port=22, username=db[\"ssh_user\"], key_filename=db[\"ssh_key\"]\n    )\n    client.exec_command(\"pwd\")\n    client.connect(\n        DOCS_SERVER, port=22, username=db[\"ssh_user\"], key_filename=db[\"ssh_key\"]\n    )\n    client.exec_command(\"pwd\")\n\n\ndef check_sigstore_client(db: ReleaseShelf) -> None:\n    client = paramiko.SSHClient()\n    client.load_system_host_keys()\n    client.set_missing_host_key_policy(paramiko.RejectPolicy)\n    client.connect(\n        DOWNLOADS_SERVER, port=22, username=db[\"ssh_user\"], key_filename=db[\"ssh_key\"]\n    )\n    _, stdout, _ = client.exec_command(\"python3 -m sigstore --version\")\n    sigstore_version = stdout.read(1000).decode()\n    check_sigstore_version(sigstore_version)\n\n\ndef check_sigstore_version(version: str) -> None:\n    version_match = re.match(\"^sigstore ([0-9.]+)\", version)\n    if version_match:\n        version_tuple = tuple(int(part) for part in version_match.group(1).split(\".\"))\n        if (3, 6, 2) <= version_tuple < (4, 0):\n            # good version\n            return\n\n    raise ReleaseException(\n        f\"Sigstore version not detected or not valid. \"\n        f\"Expecting >= 3.6.2 and < 4.0.0, got: {version}\"\n    )\n\n\ndef check_buildbots(db: ReleaseShelf) -> None:\n    async def _check() -> set[Builder]:\n        async def _get_builder_status(\n            buildbot_api: BuildBotAPI, the_builder: Builder\n        ) -> tuple[Builder, bool]:\n            return the_builder, await buildbot_api.is_builder_failing_currently(\n                the_builder\n            )\n\n        async with aiohttp.ClientSession() as session:\n            api = BuildBotAPI(session)\n            await api.authenticate(token=\"\")\n            release_branch = db[\"release\"].branch\n            stable_builders = await api.stable_builders(branch=release_branch)\n            if not stable_builders:\n                release_branch = \"3.x\"\n                stable_builders = await api.stable_builders(branch=\"3.x\")\n            if not stable_builders:\n                raise ReleaseException(\n                    f\"Failed to get the stable buildbots for the {release_branch} tag\"\n                )\n            builders = await asyncio.gather(\n                *[\n                    _get_builder_status(api, the_builder)\n                    for the_builder in stable_builders.values()\n                ]\n            )\n            return {the_builder for (the_builder, is_failing) in builders if is_failing}\n\n    failing_builders = asyncio.run(_check())\n    if not failing_builders:\n        return\n    print()\n    print(\"The following buildbots are failing:\")\n    for builder in failing_builders:\n        print(f\"- {builder.name}\")\n    print()\n    print(\"Check https://buildbot.python.org/all/#/release_status for more information\")\n    print()\n    if not ask_question(\"Do you want to continue even if these builders are failing?\"):\n        raise ReleaseException(\"Buildbots are failing!\")\n\n\ndef check_docker_running(db: ReleaseShelf) -> None:\n    subprocess.check_call([\"docker\", \"container\", \"ls\"])\n\n\ndef run_blurb_release(db: ReleaseShelf) -> None:\n    subprocess.check_call([\"blurb\", \"release\", str(db[\"release\"])], cwd=db[\"git_repo\"])\n    subprocess.check_call(\n        [\"git\", \"commit\", \"-m\", f\"Python {db['release']}\"],\n        cwd=db[\"git_repo\"],\n    )\n\n\ndef check_cpython_repo_branch(db: ReleaseShelf) -> None:\n    current_branch = subprocess.check_output(\n        shlex.split(\"git branch --show-current\"), text=True, cwd=db[\"git_repo\"]\n    ).strip()\n    expected_branch = db[\"release\"].branch\n    if current_branch != expected_branch:\n        raise ReleaseException(\n            f\"CPython repository is on {current_branch} branch, \"\n            f\"expected {expected_branch}\"\n        )\n\n\ndef check_cpython_repo_age(db: ReleaseShelf) -> None:\n    # %ct = committer date, UNIX timestamp (for example, \"1768300016\")\n    timestamp = subprocess.check_output(\n        shlex.split('git log -1 --format=\"%ct\"'), text=True, cwd=db[\"git_repo\"]\n    ).strip()\n    age_seconds = time.time() - int(timestamp.strip())\n    is_old = age_seconds > 86400  # 1 day\n\n    # cr = committer date, relative (for example, \"3 days ago\")\n    out = subprocess.check_output(\n        shlex.split('git log -1 --format=\"%cr\"'), text=True, cwd=db[\"git_repo\"]\n    )\n    print(f\"Last CPython commit was {out.strip()}\")\n\n    if is_old and not ask_question(\"Continue with old repo?\"):\n        raise ReleaseException(\"CPython repository is old\")\n\n\ndef check_cpython_repo_is_clean(db: ReleaseShelf) -> None:\n    if subprocess.check_output([\"git\", \"status\", \"--porcelain\"], cwd=db[\"git_repo\"]):\n        raise ReleaseException(\"Git repository is not clean\")\n\n\ndef check_magic_number(db: ReleaseShelf) -> None:\n    release_tag = db[\"release\"]\n    if release_tag.is_final or release_tag.is_release_candidate:\n\n        def out(msg: str) -> None:\n            raise ReleaseException(msg)\n\n    else:\n\n        def out(msg: str) -> None:\n            print(\"warning:\", msg, file=sys.stderr, flush=True)\n\n    def get_magic(source: Path, regex: re.Pattern[str]) -> str:\n        if m := regex.search(source.read_text()):\n            return m.group(\"magic\")\n\n        out(f\"Cannot find magic in {source}, tried {regex.pattern}\")\n        return \"unknown\"\n\n    work_dir = Path(db[\"git_repo\"])\n    magic_actual_file = work_dir / \"Include\" / \"internal\" / \"pycore_magic_number.h\"\n    magic_actual_re = re.compile(\n        r\"^#define\\s+PYC_MAGIC_NUMBER\\s+(?P<magic>\\d+)$\", re.MULTILINE\n    )\n    magic_actual = get_magic(magic_actual_file, magic_actual_re)\n\n    magic_expected_file = work_dir / \"Lib\" / \"test\" / \"test_importlib\" / \"test_util.py\"\n    magic_expected_re = re.compile(\n        r\"^\\s+EXPECTED_MAGIC_NUMBER = (?P<magic>\\d+)$\", re.MULTILINE\n    )\n    magic_expected = get_magic(magic_expected_file, magic_expected_re)\n\n    if magic_actual == magic_expected:\n        return\n\n    out(\n        f\"Magic numbers in {magic_actual_file} ({magic_actual})\"\n        f\" and {magic_expected_file} ({magic_expected}) don't match.\"\n    )\n    if not ask_question(\"Do you want to continue? This will fail tests in RC stage.\"):\n        raise ReleaseException(\"Magic numbers don't match!\")\n\n\ndef prepare_temporary_branch(db: ReleaseShelf) -> None:\n    subprocess.check_call(\n        [\"git\", \"checkout\", \"-b\", f\"branch-{db['release']}\"], cwd=db[\"git_repo\"]\n    )\n\n\ndef remove_temporary_branch(db: ReleaseShelf) -> None:\n    subprocess.check_call(\n        [\"git\", \"branch\", \"-D\", f\"branch-{db['release']}\"], cwd=db[\"git_repo\"]\n    )\n\n\ndef prepare_pydoc_topics(db: ReleaseShelf) -> None:\n    subprocess.check_call([\"make\", \"venv\"], cwd=db[\"git_repo\"] / \"Doc\")\n    subprocess.check_call([\"make\", \"pydoc-topics\"], cwd=db[\"git_repo\"] / \"Doc\")\n    shutil.copy2(\n        db[\"git_repo\"] / \"Doc\" / \"build\" / \"pydoc-topics\" / \"topics.py\",\n        db[\"git_repo\"] / \"Lib\" / \"pydoc_data\" / \"topics.py\",\n    )\n    if db[\"release\"].as_tuple() >= (3, 13):\n        shutil.copy2(\n            db[\"git_repo\"] / \"Doc\" / \"build\" / \"pydoc-topics\" / \"module_docs.py\",\n            db[\"git_repo\"] / \"Lib\" / \"pydoc_data\" / \"module_docs.py\",\n        )\n    subprocess.check_call(\n        [\"git\", \"commit\", \"-a\", \"--amend\", \"--no-edit\"], cwd=db[\"git_repo\"]\n    )\n\n\ndef run_autoconf(db: ReleaseShelf) -> None:\n    # Python 3.12 and newer have a script that runs autoconf.\n    regen_configure_sh = db[\"git_repo\"] / \"Tools/build/regen-configure.sh\"\n    if regen_configure_sh.exists():\n        subprocess.check_call(\n            [regen_configure_sh],\n            cwd=db[\"git_repo\"],\n        )\n    # Python 3.11 and prior rely on autoconf built within a container\n    # in order to maintain stability of autoconf generation.\n    else:\n        # Corresponds to the tag '269' and 'cp311'\n        cpython_autoconf_sha256 = (\n            \"f370fee95eefa3d57b00488bce4911635411fa83e2d293ced8cf8a3674ead939\"\n        )\n        subprocess.check_call(\n            [\n                \"docker\",\n                \"run\",\n                \"--rm\",\n                \"--pull=always\",\n                f\"-v{db['git_repo']}:/src\",\n                f\"quay.io/tiran/cpython_autoconf@sha256:{cpython_autoconf_sha256}\",\n            ],\n            cwd=db[\"git_repo\"],\n        )\n        subprocess.check_call([\"docker\", \"rmi\", \"quay.io/tiran/cpython_autoconf\", \"-f\"])\n\n    subprocess.check_call(\n        [\"git\", \"commit\", \"-a\", \"--amend\", \"--no-edit\"], cwd=db[\"git_repo\"]\n    )\n\n\ndef check_pyspecific(db: ReleaseShelf) -> None:\n    with open(\n        db[\"git_repo\"] / \"Doc\" / \"tools\" / \"extensions\" / \"pyspecific.py\"\n    ) as pyspecific:\n        for line in pyspecific:\n            if \"SOURCE_URI =\" in line:\n                break\n    expected_branch = db[\"release\"].branch\n    expected = (\n        f\"SOURCE_URI = 'https://github.com/python/cpython/tree/{expected_branch}/%s'\"\n    )\n    if expected != line.strip():\n        raise ReleaseException(\n            f\"SOURCE_URI is incorrect (it needs changing before beta 1):\\n\"\n            f\"expected: {expected}\\n\"\n            f\"got     : {line.strip()}\"\n        )\n\n\ndef bump_version(db: ReleaseShelf) -> None:\n    with cd(db[\"git_repo\"]):\n        release_mod.bump(db[\"release\"])\n    subprocess.check_call(\n        [\"git\", \"commit\", \"-a\", \"--amend\", \"--no-edit\"], cwd=db[\"git_repo\"]\n    )\n\n\ndef bump_version_in_docs(db: ReleaseShelf) -> None:\n    update_version_next.main([db[\"release\"].doc_version, str(db[\"git_repo\"])])\n    subprocess.check_call(\n        [\"git\", \"commit\", \"-a\", \"--amend\", \"--no-edit\"], cwd=db[\"git_repo\"]\n    )\n\n\ndef create_tag(db: ReleaseShelf) -> None:\n    with cd(db[\"git_repo\"]):\n        if not release_mod.make_tag(db[\"release\"], sign_gpg=db[\"sign_gpg\"]):\n            raise ReleaseException(\"Error when creating tag\")\n    subprocess.check_call(\n        [\"git\", \"commit\", \"-a\", \"--amend\", \"--no-edit\"], cwd=db[\"git_repo\"]\n    )\n\n\ndef wait_for_build_release(db: ReleaseShelf) -> None:\n    # Determine if we need to wait for docs.\n    release_tag = db[\"release\"]\n    should_wait_for_docs = release_tag.includes_docs\n\n    # Create the directory so it's easier to place the artifacts there.\n    release_path = Path(db[\"git_repo\"] / str(release_tag))\n    downloads_path = release_path / \"downloads\"\n    downloads_path.mkdir(parents=True, exist_ok=True)\n\n    # Build the list of filepaths we're expecting.\n    wait_for_paths = [\n        downloads_path / f\"Python-{release_tag}.tgz\",\n        downloads_path / f\"Python-{release_tag}.tar.xz\",\n    ]\n    if release_tag.as_tuple() >= (3, 14):\n        wait_for_paths += [\n            downloads_path / f\"python-{release_tag}-{arch}-linux-android.tar.gz\"\n            for arch in [\"aarch64\", \"x86_64\"]\n        ]\n    if release_tag.as_tuple() >= (3, 15):\n        wait_for_paths.append(\n            downloads_path / f\"python-{release_tag}-iOS-XCframework.tar.gz\"\n        )\n    if should_wait_for_docs:\n        docs_path = release_path / \"docs\"\n        docs_path.mkdir(parents=True, exist_ok=True)\n        wait_for_paths.extend(\n            [\n                docs_path / f\"python-{release_tag}-docs.epub\",\n                docs_path / f\"python-{release_tag}-docs-html.tar.bz2\",\n                docs_path / f\"python-{release_tag}-docs-html.zip\",\n                docs_path / f\"python-{release_tag}-docs-texinfo.tar.bz2\",\n                docs_path / f\"python-{release_tag}-docs-texinfo.zip\",\n                docs_path / f\"python-{release_tag}-docs-text.tar.bz2\",\n                docs_path / f\"python-{release_tag}-docs-text.zip\",\n            ]\n        )\n\n    print(\"Once the build-release workflow is complete:\")\n    print(\"- Download its artifacts from the workflow summary page.\")\n    print(f\"- Copy the following files into {release_path}:\")\n    for path in wait_for_paths:\n        print(f\"  - {os.path.relpath(path, release_path)}\")\n    print(\"The script will continue once all files are present.\")\n\n    while not all(path.exists() for path in wait_for_paths):\n        time.sleep(1)\n\n\ndef check_doc_unreleased_version(db: ReleaseShelf) -> None:\n    print(\"Checking built docs for '(unreleased)'\")\n    # This string is generated when a `versionadded:: next` directive is\n    # left in the docs, which means the `bump_version_in_docs` step\n    # didn't do its job.\n    # But, there could also be a false positive.\n    release_tag = db[\"release\"]\n    docs_path = Path(db[\"git_repo\"]) / str(release_tag) / \"docs\"\n    archive_path = docs_path / f\"python-{release_tag}-docs-html.tar.bz2\"\n    if release_tag.includes_docs:\n        assert archive_path.exists()\n    if archive_path.exists():\n        with tempfile.TemporaryDirectory() as temp_dir:\n            subprocess.run([\"tar\", \"-xjf\", archive_path, \"-C\", temp_dir])\n            proc = subprocess.run([\"grep\", \"-rHn\", \"[(]unreleased[)]\", temp_dir])\n            if proc.returncode == 0:\n                if not ask_question(\n                    \"Are these `(unreleased)` strings in built docs OK?\"\n                ):\n                    raise AssertionError(\"`(unreleased)` strings found in docs\")\n\n\ndef sign_source_artifacts(db: ReleaseShelf) -> None:\n    print(\"Signing tarballs with GPG\")\n    uid = os.environ.get(\"GPG_KEY_FOR_RELEASE\")\n    if not uid:\n        print(\"List of available private keys:\")\n        subprocess.check_call('gpg -K | grep -A 1 \"^sec\"', shell=True)\n        uid = input(\"Please enter key ID to use for signing: \")\n\n    tarballs_path = Path(db[\"git_repo\"] / str(db[\"release\"]) / \"downloads\")\n    tgz = str(tarballs_path / f\"Python-{db['release']}.tgz\")\n    xz = str(tarballs_path / f\"Python-{db['release']}.tar.xz\")\n\n    subprocess.check_call([\"gpg\", \"-bas\", \"-u\", uid, tgz])\n    subprocess.check_call([\"gpg\", \"-bas\", \"-u\", uid, xz])\n\n    print(\"Signing tarballs with Sigstore\")\n    for filename in (tgz, xz):\n        cert_file = filename + \".crt\"\n        sig_file = filename + \".sig\"\n        bundle_file = filename + \".sigstore\"\n\n        subprocess.check_call(\n            [\n                sys.executable,\n                \"-m\",\n                \"sigstore\",\n                \"sign\",\n                \"--oidc-disable-ambient-providers\",\n                \"--signature\",\n                sig_file,\n                \"--certificate\",\n                cert_file,\n                \"--bundle\",\n                bundle_file,\n                filename,\n            ]\n        )\n\n\ndef build_sbom_artifacts(db: ReleaseShelf) -> None:\n\n    # Skip building an SBOM if there isn't a 'Misc/sbom.spdx.json' file.\n    if not (db[\"git_repo\"] / \"Misc/sbom.spdx.json\").exists():\n        print(\"Skipping building an SBOM, missing 'Misc/sbom.spdx.json'\")\n        return\n\n    release_version = db[\"release\"]\n    # For each source tarball build an SBOM.\n    for ext in (\".tgz\", \".tar.xz\"):\n        tarball_name = f\"Python-{release_version}{ext}\"\n        tarball_path = str(\n            db[\"git_repo\"] / str(db[\"release\"]) / \"downloads\" / tarball_name\n        )\n\n        print(f\"Building an SBOM for artifact '{tarball_name}'\")\n        sbom_data = sbom.create_sbom_for_source_tarball(tarball_path)\n\n        with open(tarball_path + \".spdx.json\", mode=\"w\") as f:\n            f.write(json.dumps(sbom_data, indent=2, sort_keys=True))\n\n\nclass MySFTPClient(paramiko.SFTPClient):\n    def put_dir(\n        self, source: str | Path, target: str | Path, progress: Any = None\n    ) -> None:\n        for item in os.listdir(source):\n            if os.path.isfile(os.path.join(source, item)):\n                progress.text(item)\n                self.put(os.path.join(source, item), f\"{target}/{item}\")\n                progress()\n            else:\n                self.mkdir(f\"{target}/{item}\", ignore_existing=True)\n                self.put_dir(\n                    os.path.join(source, item),\n                    f\"{target}/{item}\",\n                    progress=progress,\n                )\n\n    def mkdir(\n        self, path: bytes | str, mode: int = 511, ignore_existing: bool = False\n    ) -> None:\n        try:\n            super().mkdir(path, mode)\n        except OSError:\n            if ignore_existing:\n                pass\n            else:\n                raise\n\n\ndef upload_files_to_server(db: ReleaseShelf, server: str) -> None:\n    client = paramiko.SSHClient()\n    client.load_system_host_keys()\n    client.set_missing_host_key_policy(paramiko.RejectPolicy)\n    client.connect(server, port=22, username=db[\"ssh_user\"], key_filename=db[\"ssh_key\"])\n    transport = client.get_transport()\n    assert transport is not None, f\"SSH transport to {server} is None\"\n\n    destination = Path(f\"/home/psf-users/{db['ssh_user']}/{db['release']}\")\n    ftp_client = MySFTPClient.from_transport(transport)\n    assert ftp_client is not None, f\"SFTP client to {server} is None\"\n\n    client.exec_command(f\"rm -rf {destination}\")\n\n    with contextlib.suppress(OSError):\n        ftp_client.mkdir(str(destination))\n\n    artifacts_path = Path(db[\"git_repo\"] / str(db[\"release\"]))\n\n    shutil.rmtree(artifacts_path / f\"Python-{db['release']}\", ignore_errors=True)\n\n    def upload_subdir(subdir: str) -> None:\n        with contextlib.suppress(OSError):\n            ftp_client.mkdir(str(destination / subdir))\n        with alive_bar(len(tuple((artifacts_path / subdir).glob(\"**/*\")))) as progress:\n            ftp_client.put_dir(\n                artifacts_path / subdir,\n                str(destination / subdir),\n                progress=progress,\n            )\n\n    if server == DOCS_SERVER:\n        upload_subdir(\"docs\")\n    elif server == DOWNLOADS_SERVER:\n        upload_subdir(\"downloads\")\n        if (artifacts_path / \"docs\").exists():\n            upload_subdir(\"docs\")\n\n    ftp_client.close()\n\n\ndef upload_files_to_downloads_server(db: ReleaseShelf) -> None:\n    upload_files_to_server(db, DOWNLOADS_SERVER)\n\n\ndef place_files_in_download_folder(db: ReleaseShelf) -> None:\n    client = paramiko.SSHClient()\n    client.load_system_host_keys()\n    client.set_missing_host_key_policy(paramiko.RejectPolicy)\n    client.connect(\n        DOWNLOADS_SERVER, port=22, username=db[\"ssh_user\"], key_filename=db[\"ssh_key\"]\n    )\n    transport = client.get_transport()\n    assert transport is not None, f\"SSH transport to {DOWNLOADS_SERVER} is None\"\n\n    # Downloads\n\n    source = f\"/home/psf-users/{db['ssh_user']}/{db['release']}\"\n    destination = f\"/srv/www.python.org/ftp/python/{db['release'].normalized()}\"\n\n    def execute_command(command: str) -> None:\n        channel = transport.open_session()\n        channel.exec_command(command)\n        if channel.recv_exit_status() != 0:\n            raise ReleaseException(channel.recv_stderr(1000))\n\n    execute_command(f\"mkdir -p {destination}\")\n    execute_command(f\"cp {source}/downloads/* {destination}\")\n    execute_command(f\"chgrp downloads {destination}\")\n    execute_command(f\"chmod 775 {destination}\")\n    execute_command(f\"find {destination} -type f -exec chmod 664 {{}} \\\\;\")\n\n    # Docs\n\n    release_tag = db[\"release\"]\n    if release_tag.is_final or release_tag.is_release_candidate:\n        source = f\"/home/psf-users/{db['ssh_user']}/{db['release']}\"\n        destination = f\"/srv/www.python.org/ftp/python/doc/{release_tag}\"\n\n        execute_command(f\"mkdir -p {destination}\")\n        execute_command(f\"cp {source}/docs/* {destination}\")\n        execute_command(f\"chgrp downloads {destination}\")\n        execute_command(f\"chmod 775 {destination}\")\n        execute_command(f\"find {destination} -type f -exec chmod 664 {{}} \\\\;\")\n\n\ndef upload_docs_to_the_docs_server(db: ReleaseShelf) -> None:\n    release_tag: release_mod.Tag = db[\"release\"]\n    if not (release_tag.is_final or release_tag.is_release_candidate):\n        return\n\n    upload_files_to_server(db, DOCS_SERVER)\n\n\ndef unpack_docs_in_the_docs_server(db: ReleaseShelf) -> None:\n    release_tag: release_mod.Tag = db[\"release\"]\n    if not (release_tag.is_final or release_tag.is_release_candidate):\n        return\n\n    client = paramiko.SSHClient()\n    client.load_system_host_keys()\n    client.set_missing_host_key_policy(paramiko.RejectPolicy)\n    client.connect(\n        DOCS_SERVER, port=22, username=db[\"ssh_user\"], key_filename=db[\"ssh_key\"]\n    )\n    transport = client.get_transport()\n    assert transport is not None, f\"SSH transport to {DOCS_SERVER} is None\"\n\n    # Sources\n\n    source = f\"/home/psf-users/{db['ssh_user']}/{db['release']}\"\n    destination = f\"/srv/docs.python.org/release/{release_tag}\"\n\n    def execute_command(command: str) -> None:\n        channel = transport.open_session()\n        channel.exec_command(command)\n        if channel.recv_exit_status() != 0:\n            raise ReleaseException(channel.recv_stderr(1000))\n\n    docs_filename = f\"python-{release_tag}-docs-html\"\n    execute_command(f\"mkdir -p {destination}\")\n    execute_command(f\"unzip {source}/docs/{docs_filename}.zip -d {destination}\")\n    execute_command(f\"mv /{destination}/{docs_filename}/* {destination}\")\n    execute_command(f\"rm -rf /{destination}/{docs_filename}\")\n    execute_command(f\"chgrp -R docs {destination}\")\n    execute_command(f\"chmod -R 775 {destination}\")\n    execute_command(f\"find {destination} -type f -exec chmod 664 {{}} \\\\;\")\n\n\n@functools.cache\ndef extract_github_owner(url: str) -> str:\n    if https_match := re.match(r\"(https://)?github\\.com/([^/]+)/\", url):\n        return https_match.group(2)\n    elif ssh_match := re.match(r\"^git@github\\.com:([^/]+)/\", url):\n        return ssh_match.group(1)\n    else:\n        raise ReleaseException(\n            f\"Could not parse GitHub owner from 'origin' remote URL: {url}\"\n        )\n\n\n@functools.cache\ndef get_commit_sha(git_version: str, git_repo: Path) -> str:\n    \"\"\"Get the Git commit SHA for the tag\"\"\"\n    commit_sha = (\n        subprocess.check_output(\n            [\"git\", \"rev-list\", \"-n\", \"1\", git_version], cwd=git_repo\n        )\n        .decode()\n        .strip()\n    )\n    return commit_sha\n\n\n@functools.cache\ndef get_origin_remote_url(git_repo: Path) -> str:\n    \"\"\"Get the owner of the GitHub repo (first path segment in a 'github.com' remote URL)\n    This works for both 'https' and 'ssh' style remote URLs.\"\"\"\n    origin_remote_url = (\n        subprocess.check_output(\n            [\"git\", \"ls-remote\", \"--get-url\", \"origin\"], cwd=git_repo\n        )\n        .decode()\n        .strip()\n    )\n    return origin_remote_url\n\n\ndef start_build_release(db: ReleaseShelf) -> None:\n    commit_sha = get_commit_sha(db[\"release\"].gitname, db[\"git_repo\"])\n    origin_remote_url = get_origin_remote_url(db[\"git_repo\"])\n    origin_remote_github_owner = extract_github_owner(origin_remote_url)\n    # We ask for human verification at this point since this commit SHA is 'locked in'\n    print()\n    print(\n        f\"Go to https://github.com/{origin_remote_github_owner}/cpython/commit/{commit_sha}\"\n    )\n    print(\"- Ensure that the commit diff does not contain any unexpected changes.\")\n    print(\n        \"- For the next step, ensure the commit SHA matches the one you verified on GitHub in this step.\"\n    )\n    print()\n    if not ask_question(\n        \"Have you verified the release commit hasn't been tampered with on GitHub?\"\n    ):\n        raise ReleaseException(\"Commit must be visually reviewed before starting build\")\n\n    # After visually confirming the release manager can start the build process\n    # with the known good commit SHA.\n    print()\n    cmd = (\n        \"gh workflow run build-release.yml --repo python/release-tools\"\n        f\" -f git_remote={origin_remote_github_owner}\"\n        f\" -f git_commit={commit_sha}\"\n        f\" -f cpython_release={db['release']}\"\n    )\n    subprocess.check_call(shlex.split(cmd))\n    print(\n        \"Go to https://github.com/python/release-tools/actions/workflows/build-release.yml\"\n    )\n    print()\n\n    if not ask_question(\"Have you started the build-release workflow?\"):\n        raise ReleaseException(\"build-release workflow must be started\")\n\n\ndef send_email_to_platform_release_managers(db: ReleaseShelf) -> None:\n    commit_sha = get_commit_sha(db[\"release\"].gitname, db[\"git_repo\"])\n    origin_remote_url = get_origin_remote_url(db[\"git_repo\"])\n    origin_remote_github_owner = extract_github_owner(origin_remote_url)\n    github_prefix = f\"https://github.com/{origin_remote_github_owner}/cpython/tree\"\n\n    print()\n    print(f\"{github_prefix}/{db['release'].gitname}\")\n    print(f\"Git commit SHA: {commit_sha}\")\n    print(\n        \"build-release workflow: https://github.com/python/release-tools/actions/runs/[ENTER-RUN-ID-HERE]\"\n    )\n    print()\n\n    if not ask_question(\n        \"Have you notified the platform release managers about the availability of the commit SHA and tag?\"\n    ):\n        raise ReleaseException(\"Platform release managers must be notified\")\n\n\ndef create_release_object_in_db(db: ReleaseShelf) -> None:\n    print(\n        \"Go to https://www.python.org/admin/downloads/release/add/ and create a new release\"\n    )\n    if not ask_question(f\"Have you already created a new release for {db['release']}?\"):\n        raise ReleaseException(\"The Django release object has not been created\")\n\n\ndef wait_until_all_files_are_in_folder(db: ReleaseShelf) -> None:\n    client = paramiko.SSHClient()\n    client.load_system_host_keys()\n    client.set_missing_host_key_policy(paramiko.RejectPolicy)\n    client.connect(\n        DOWNLOADS_SERVER, port=22, username=db[\"ssh_user\"], key_filename=db[\"ssh_key\"]\n    )\n    ftp_client = client.open_sftp()\n\n    destination = f\"/srv/www.python.org/ftp/python/{db['release'].normalized()}\"\n\n    are_all_files_there = False\n    release = str(db[\"release\"])\n    print()\n    while not are_all_files_there:\n        try:\n            all_files = set(ftp_client.listdir(destination))\n        except FileNotFoundError:\n            raise FileNotFoundError(\n                f\"The release folder in {destination} has not been created\"\n            ) from None\n        are_windows_files_there = f\"python-{release}.exe\" in all_files\n        are_macos_files_there = f\"python-{release}-macos11.pkg\" in all_files\n        are_linux_files_there = f\"Python-{release}.tgz\" in all_files\n\n        if db[\"security_release\"]:\n            # For security releases, only check Linux files\n            are_all_files_there = are_linux_files_there\n        else:\n            # For regular releases, check all platforms\n            are_all_files_there = (\n                are_linux_files_there\n                and are_windows_files_there\n                and are_macos_files_there\n            )\n\n        if not are_all_files_there:\n            linux_tick = \"✅\" if are_linux_files_there else \"❌\"\n            windows_tick = \"✅\" if are_windows_files_there else \"❌\"\n            macos_tick = \"✅\" if are_macos_files_there else \"❌\"\n\n            if db[\"security_release\"]:\n                waiting = f\"\\rWaiting for files: Linux {linux_tick} (security release - only checking Linux)\"\n            else:\n                waiting = f\"\\rWaiting for files: Linux {linux_tick}  Windows {windows_tick}  Mac {macos_tick} \"\n\n            print(waiting, flush=True, end=\"\")\n            time.sleep(1)\n    print()\n\n\ndef run_add_to_python_dot_org(db: ReleaseShelf) -> None:\n    client = paramiko.SSHClient()\n    client.load_system_host_keys()\n    client.set_missing_host_key_policy(paramiko.RejectPolicy)\n    client.connect(\n        DOWNLOADS_SERVER, port=22, username=db[\"ssh_user\"], key_filename=db[\"ssh_key\"]\n    )\n    transport = client.get_transport()\n    assert transport is not None, f\"SSH transport to {DOWNLOADS_SERVER} is None\"\n\n    # Ensure the file is there\n    source = Path(__file__).parent / \"add_to_pydotorg.py\"\n    destination = Path(f\"/home/psf-users/{db['ssh_user']}/add_to_pydotorg.py\")\n    ftp_client = MySFTPClient.from_transport(transport)\n    assert ftp_client is not None, f\"SFTP client to {DOWNLOADS_SERVER} is None\"\n    ftp_client.put(str(source), str(destination))\n    ftp_client.close()\n\n    auth_info = db[\"auth_info\"]\n    assert auth_info is not None\n\n    # Do the interactive flow to get an identity for Sigstore\n    issuer = sigstore.oidc.Issuer(sigstore.oidc.DEFAULT_OAUTH_ISSUER_URL)\n    identity_token = issuer.identity_token()\n\n    print(\"Adding files to python.org...\")\n    stdin, stdout, stderr = client.exec_command(\n        f\"AUTH_INFO={auth_info} SIGSTORE_IDENTITY_TOKEN={identity_token} python3 add_to_pydotorg.py {db['release']}\"\n    )\n    stderr_text = stderr.read().decode()\n    if stderr_text:\n        raise paramiko.SSHException(f\"Failed to execute the command: {stderr_text}\")\n    stdout_text = stdout.read().decode()\n    print(\"-- Command output --\")\n    print(stdout_text)\n    print(\"-- End of command output --\")\n\n\ndef purge_the_cdn(db: ReleaseShelf) -> None:\n    headers = {\n        \"User-Agent\": \"Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.1.6) Gecko/20091201 Firefox/3.5.6\"\n    }\n    normalized_release = db[\"release\"].normalized()\n    urls = [\n        f\"https://www.python.org/downloads/release/python-{str(db['release']).replace('.', '')}/\",\n        f\"https://docs.python.org/release/{db['release']}/\",\n        f\"https://www.python.org/ftp/python/{normalized_release}/\",\n        f\"https://docs.python.org/release/{normalized_release}/\",\n        \"https://www.python.org/downloads/\",\n        \"https://www.python.org/downloads/windows/\",\n        \"https://www.python.org/downloads/macos/\",\n    ]\n    # Purge the source URLs and their associated metadata files.\n    source_urls = [\n        f\"https://www.python.org/ftp/python/{normalized_release}/Python-{db['release']}.tgz\",\n        f\"https://www.python.org/ftp/python/{normalized_release}/Python-{db['release']}.tar.xz\",\n    ]\n    for source_url in source_urls:\n        urls.extend(\n            [\n                f\"{source_url}\",\n                f\"{source_url}.asc\",\n                f\"{source_url}.crt\",\n                f\"{source_url}.sig\",\n                f\"{source_url}.sigstore\",\n                f\"{source_url}.spdx.json\",\n            ]\n        )\n\n    for url in urls:\n        req = urllib.request.Request(url=url, headers=headers, method=\"PURGE\")\n        # try:\n        response = urllib.request.urlopen(req)\n        if response.code != 200:\n            raise RuntimeError(\"Failed to purge the python.org/downloads CDN\")\n\n\ndef announce_release(db: ReleaseShelf) -> None:\n    if not ask_question(\n        \"Have you announced the release at https://discuss.python.org/c/core-dev/23 \"\n        \"and https://blog.python.org?\\n\"\n        \"Tip: use the 'release' and 'releases' tags respectively.\"\n    ):\n        raise ReleaseException(\"The release has not been announced\")\n\n\ndef post_release_merge(db: ReleaseShelf) -> None:\n    subprocess.check_call(\n        [\"git\", \"fetch\", \"--all\"],\n        cwd=db[\"git_repo\"],\n    )\n\n    release_tag: release_mod.Tag = db[\"release\"]\n    if release_tag.is_feature_freeze_release:\n        subprocess.check_call(\n            [\"git\", \"checkout\", \"main\"],\n            cwd=db[\"git_repo\"],\n        )\n    else:\n        subprocess.check_call(\n            [\"git\", \"checkout\", release_tag.branch],\n            cwd=db[\"git_repo\"],\n        )\n\n    subprocess.check_call(\n        [\"git\", \"merge\", \"--no-squash\", f\"v{db['release']}\"],\n        cwd=db[\"git_repo\"],\n    )\n\n\ndef post_release_tagging(db: ReleaseShelf) -> None:\n    release_tag: release_mod.Tag = db[\"release\"]\n\n    subprocess.check_call(\n        [\"git\", \"fetch\", \"--all\"],\n        cwd=db[\"git_repo\"],\n    )\n\n    if release_tag.is_feature_freeze_release:\n        checkout_branch = release_tag.basic_version\n    else:\n        checkout_branch = release_tag.branch\n\n    subprocess.check_call(\n        [\"git\", \"checkout\", checkout_branch],\n        cwd=db[\"git_repo\"],\n    )\n\n    with cd(db[\"git_repo\"]):\n        release_mod.done(db[\"release\"])\n\n    subprocess.check_call(\n        [\"git\", \"commit\", \"-a\", \"-m\", f\"Post {db['release']}\"],\n        cwd=db[\"git_repo\"],\n    )\n\n\ndef maybe_prepare_new_main_branch(db: ReleaseShelf) -> None:\n    release_tag: release_mod.Tag = db[\"release\"]\n\n    if not release_tag.is_feature_freeze_release:\n        return\n\n    subprocess.check_call(\n        [\"git\", \"checkout\", \"main\"],\n        cwd=db[\"git_repo\"],\n    )\n\n    new_release = release_tag.next_minor_release()\n    with cd(db[\"git_repo\"]):\n        release_mod.bump(new_release)\n\n    prev_branch = f\"{release_tag.major}.{release_tag.minor}\"\n    new_branch = f\"{release_tag.major}.{int(release_tag.minor)+1}\"\n    whatsnew_file = f\"Doc/whatsnew/{new_branch}.rst\"\n    with cd(db[\"git_repo\"]), open(whatsnew_file, \"w\") as f:\n        f.write(WHATS_NEW_TEMPLATE.format(version=new_branch, prev_version=prev_branch))\n\n    subprocess.check_call(\n        [\"git\", \"add\", whatsnew_file],\n        cwd=db[\"git_repo\"],\n    )\n\n    whatsnew_toctree_file = \"Doc/whatsnew/index.rst\"\n    with cd(db[\"git_repo\"]):\n        update_whatsnew_toctree(db, whatsnew_toctree_file)\n\n    subprocess.check_call(\n        [\"git\", \"add\", whatsnew_toctree_file],\n        cwd=db[\"git_repo\"],\n    )\n\n    subprocess.check_call(\n        [\"git\", \"commit\", \"-a\", \"-m\", f\"Python {new_release}\"],\n        cwd=db[\"git_repo\"],\n    )\n\n\ndef update_whatsnew_toctree(db: ReleaseShelf, filename: str) -> None:\n    release_tag: release_mod.Tag = db[\"release\"]\n    this_rst = f\"   {release_tag.major}.{release_tag.minor}.rst\"\n    next_rst = f\"   {release_tag.major}.{release_tag.minor+1}.rst\"\n    new = next_rst + \"\\n\" + this_rst\n\n    with open(filename) as f:\n        contents = f.read()\n    contents = contents.replace(this_rst, new)\n    with open(filename, \"w\") as f:\n        f.write(contents)\n\n\ndef branch_new_versions(db: ReleaseShelf) -> None:\n    release_tag: release_mod.Tag = db[\"release\"]\n\n    if not release_tag.is_feature_freeze_release:\n        return\n\n    subprocess.check_call([\"git\", \"checkout\", \"main\"], cwd=db[\"git_repo\"])\n\n    subprocess.check_call(\n        [\"git\", \"checkout\", \"-b\", release_tag.basic_version],\n        cwd=db[\"git_repo\"],\n    )\n\n\ndef is_mirror(repo: Path, remote: str) -> bool:\n    \"\"\"Return True if the `repo` directory was created with --mirror.\"\"\"\n\n    cmd = [\"git\", \"config\", \"--local\", \"--get\", f\"remote.{remote}.mirror\"]\n    try:\n        out = subprocess.check_output(cmd, cwd=repo)\n    except subprocess.CalledProcessError:\n        return False\n    return out.startswith(b\"true\")\n\n\ndef push_to_local_fork(db: ReleaseShelf) -> None:\n    def _push_to_local(dry_run: bool = False) -> None:\n        git_command = [\"git\", \"push\"]\n        if dry_run:\n            git_command.append(\"--dry-run\")\n\n        git_command.append(\"origin\")\n        if not is_mirror(db[\"git_repo\"], \"origin\"):\n            # mirrors push everything always, specifying `--tags` or refspecs doesn't work.\n            git_command += [\"HEAD\", \"--tags\"]\n\n        subprocess.check_call(\n            git_command,\n            cwd=db[\"git_repo\"],\n        )\n\n    _push_to_local(dry_run=True)\n    if not ask_question(\n        \"Does these operations look reasonable? ⚠️⚠️⚠️ Answering 'yes' will push to your origin remote ⚠️⚠️⚠️\"\n    ):\n        raise ReleaseException(\"Something is wrong - Push to remote aborted\")\n    _push_to_local(dry_run=False)\n\n\ndef push_to_upstream(db: ReleaseShelf) -> None:\n    release_tag: release_mod.Tag = db[\"release\"]\n\n    def _push_to_upstream(dry_run: bool = False) -> None:\n        branch = f\"{release_tag.major}.{release_tag.minor}\"\n        git_command = [\"git\", \"push\"]\n        if dry_run:\n            git_command.append(\"--dry-run\")\n\n        if release_tag.is_alpha_release:\n            subprocess.check_call(\n                git_command + [\"--tags\", \"git@github.com:python/cpython.git\", \"main\"],\n                cwd=db[\"git_repo\"],\n            )\n        elif release_tag.is_feature_freeze_release:\n            subprocess.check_call(\n                git_command + [\"--tags\", \"git@github.com:python/cpython.git\", branch],\n                cwd=db[\"git_repo\"],\n            )\n            subprocess.check_call(\n                git_command + [\"--tags\", \"git@github.com:python/cpython.git\", \"main\"],\n                cwd=db[\"git_repo\"],\n            )\n        else:\n            subprocess.check_call(\n                git_command + [\"--tags\", \"git@github.com:python/cpython.git\", branch],\n                cwd=db[\"git_repo\"],\n            )\n\n    _push_to_upstream(dry_run=True)\n    if not ask_question(\n        \"Do these operations look reasonable? ⚠️⚠️⚠️ Answering 'yes' will push to the upstream repository ⚠️⚠️⚠️\"\n    ):\n        raise ReleaseException(\"Something is wrong - Push to upstream aborted\")\n    if not ask_question(\n        \"Is the target branch unprotected for your user? \"\n        \"Check at https://github.com/python/cpython/settings/branches\"\n    ):\n        raise ReleaseException(\"The target branch is not unprotected for your user\")\n    _push_to_upstream(dry_run=False)\n\n\ndef main() -> None:\n\n    parser = argparse.ArgumentParser(description=\"Make a CPython release.\")\n\n    def _release_type(release: str) -> str:\n        if not RELEASE_REGEXP.match(release):\n            raise argparse.ArgumentTypeError(\"Invalid release string\")\n        return release\n\n    parser.add_argument(\n        \"--release\",\n        dest=\"release\",\n        help=\"Release tag\",\n        required=True,\n        type=_release_type,\n    )\n    parser.add_argument(\n        \"--repository\",\n        dest=\"repo\",\n        help=\"Location of the CPython repository\",\n        required=True,\n        type=str,\n    )\n\n    def _api_key(api_key: str) -> str:\n        if not API_KEY_REGEXP.match(api_key):\n            raise argparse.ArgumentTypeError(\n                \"Invalid API key format. It must be on the form USER:API_KEY\"\n            )\n        return api_key\n\n    parser.add_argument(\n        \"--auth-key\",\n        dest=\"auth_key\",\n        help=\"API key for python.org in the form 'USER:API_KEY'\",\n        type=_api_key,\n    )\n    parser.add_argument(\n        \"--ssh-user\",\n        dest=\"ssh_user\",\n        default=getpass.getuser(),\n        help=\"Username to be used when authenticating via ssh\",\n        type=str,\n    )\n    parser.add_argument(\n        \"--ssh-key\",\n        dest=\"ssh_key\",\n        default=None,\n        help=\"Path to the SSH key file to use for authentication\",\n        type=str,\n    )\n    args = parser.parse_args()\n\n    auth_key = args.auth_key or os.getenv(\"AUTH_INFO\")\n    assert isinstance(auth_key, str), \"We need an AUTH_INFO env var or --auth-key\"\n\n    if sys.platform not in (\"darwin\", \"linux\"):\n        print(\n            \"\"\"\\\nWARNING! This script has not been tested on a platform other than Linux and macOS.\n\nAlthough it should work correctly as long as you have all the dependencies,\nsome things may not work as expected. As a release manager, you should try to\nfix these things in this script so it also supports your platform.\n\"\"\"\n        )\n        if not ask_question(\"Do you want to continue?\"):\n            raise ReleaseException(\n                \"This release script is not compatible with the running platform\"\n            )\n\n    release_tag = release_mod.Tag(args.release)\n    magic = release_tag.as_tuple() >= (3, 14)\n    no_gpg = release_tag.as_tuple() >= (3, 14)  # see PEP 761\n    tasks = [\n        Task(check_gh, \"Checking gh is available\"),\n        Task(check_git, \"Checking Git is available\"),\n        Task(check_make, \"Checking make is available\"),\n        Task(check_blurb, \"Checking blurb is available\"),\n        Task(check_docker, \"Checking Docker is available\"),\n        Task(check_docker_running, \"Checking Docker is running\"),\n        Task(check_autoconf, \"Checking autoconf is available\"),\n        *([] if no_gpg else [Task(check_gpg_keys, \"Checking GPG keys\")]),\n        Task(\n            check_ssh_connection,\n            f\"Validating ssh connection to {DOWNLOADS_SERVER} and {DOCS_SERVER}\",\n        ),\n        Task(check_sigstore_client, \"Checking Sigstore CLI\"),\n        Task(check_buildbots, \"Check buildbots are good\"),\n        Task(check_cpython_repo_branch, \"Checking CPython repository branch\"),\n        Task(check_cpython_repo_age, \"Checking CPython repository age\"),\n        Task(check_cpython_repo_is_clean, \"Checking CPython repository is clean\"),\n        *(\n            [Task(check_magic_number, \"Checking the magic number is up-to-date\")]\n            if magic\n            else []\n        ),\n        Task(prepare_temporary_branch, \"Checking out a temporary release branch\"),\n        Task(run_blurb_release, \"Run blurb release\"),\n        Task(check_cpython_repo_is_clean, \"Checking CPython repository is clean\"),\n        Task(prepare_pydoc_topics, \"Preparing pydoc topics\"),\n        Task(bump_version, \"Bump version\"),\n        Task(bump_version_in_docs, \"Bump version in docs\"),\n        Task(check_cpython_repo_is_clean, \"Checking CPython repository is clean\"),\n        Task(run_autoconf, \"Running autoconf\"),\n        Task(check_cpython_repo_is_clean, \"Checking CPython repository is clean\"),\n        Task(check_pyspecific, \"Checking pyspecific\"),\n        Task(check_cpython_repo_is_clean, \"Checking CPython repository is clean\"),\n        Task(create_tag, \"Create tag\"),\n        Task(push_to_local_fork, \"Push new tags and branches to private fork\"),\n        Task(start_build_release, \"Start the build-release workflow\"),\n        Task(\n            send_email_to_platform_release_managers,\n            \"Platform release managers have been notified of the commit SHA\",\n        ),\n        Task(wait_for_build_release, \"Wait for build-release workflow\"),\n        Task(check_doc_unreleased_version, \"Check docs for `(unreleased)`\"),\n        Task(build_sbom_artifacts, \"Building SBOM artifacts\"),\n        *([] if no_gpg else [Task(sign_source_artifacts, \"Sign source artifacts\")]),\n        Task(\n            upload_files_to_downloads_server, \"Upload files to the PSF downloads server\"\n        ),\n        Task(place_files_in_download_folder, \"Place files in the download folder\"),\n        Task(upload_docs_to_the_docs_server, \"Upload docs to the PSF docs server\"),\n        Task(unpack_docs_in_the_docs_server, \"Place docs files in the docs folder\"),\n        Task(wait_until_all_files_are_in_folder, \"Wait until all files are ready\"),\n        Task(create_release_object_in_db, \"The Django release object has been created\"),\n        Task(post_release_merge, \"Merge the tag into the release branch\"),\n        Task(branch_new_versions, \"Branch out new versions and prepare main branch\"),\n        Task(post_release_tagging, \"Final touches for the release\"),\n        Task(\n            maybe_prepare_new_main_branch,\n            \"prepare new main branch for feature freeze\",\n        ),\n        Task(push_to_upstream, \"Push new tags and branches to upstream\"),\n        Task(remove_temporary_branch, \"Removing temporary release branch\"),\n        Task(run_add_to_python_dot_org, \"Add files to python.org download page\"),\n        Task(purge_the_cdn, \"Purge the CDN of python.org/downloads\"),\n        Task(announce_release, \"Announce the release\"),\n    ]\n    automata = ReleaseDriver(\n        git_repo=args.repo,\n        release_tag=release_tag,\n        api_key=auth_key,\n        ssh_user=args.ssh_user,\n        sign_gpg=not no_gpg,\n        ssh_key=args.ssh_key,\n        tasks=tasks,\n    )\n    automata.run()\n\n\nif __name__ == \"__main__\":\n    main()\n"
  },
  {
    "path": "sbom.py",
    "content": "\"\"\"\nUtility which creates Software Bill-of-Materials (SBOM)\nfor CPython release artifacts. Can also be run manually with:\n\n    $ python sbom.py <artifact>\n\nFor example:\n\n    $ python sbom.py ./Python-3.13.0a3.tar.xz\n\n\"\"\"\n\nfrom __future__ import annotations\n\nimport argparse\nimport datetime\nimport hashlib\nimport io\nimport json\nimport os\nimport re\nimport subprocess\nimport sys\nimport tarfile\nimport typing\nimport zipfile\nfrom functools import cache\nfrom pathlib import Path\nfrom typing import Any, LiteralString, NotRequired, TypedDict, cast\nfrom urllib.request import urlopen\n\n\nclass SBOM(TypedDict):\n    SPDXID: str\n    spdxVersion: str\n    name: str\n    dataLicense: str\n    documentNamespace: str\n    creationInfo: CreationInfo\n    packages: list[Package]\n    files: list[File]\n    relationships: list[Relationship]\n\n\nclass Package(TypedDict):\n    SPDXID: str\n    name: str\n    versionInfo: str\n    packageFileName: NotRequired[str]\n    supplier: NotRequired[str]\n    originator: NotRequired[str]\n    licenseConcluded: str\n    downloadLocation: str\n    checksums: list[Checksum]\n    primaryPackagePurpose: str\n    packageVerificationCode: NotRequired[PackageVerificationCode]\n    externalRefs: list[Ref]\n    filesAnalyzed: NotRequired[bool]\n\n\nclass File(TypedDict):\n    SPDXID: str\n    fileName: str\n    checksums: list[Checksum]\n\n\nclass Relationship(TypedDict):\n    spdxElementId: str\n    relatedSpdxElement: str\n    relationshipType: str\n\n\nclass Checksum(TypedDict):\n    algorithm: str\n    checksumValue: str\n\n\nclass PackageVerificationCode(TypedDict):\n    packageVerificationCodeValue: str\n\n\nclass Ref(TypedDict):\n    referenceCategory: str\n    referenceLocator: str\n    referenceType: str\n\n\nclass CreationInfo(TypedDict):\n    created: str  # timestamp\n    creators: list[str]\n    licenseListVersion: str\n\n\n# Cache of values that we've seen already. We use this\n# to de-duplicate values and their corresponding SPDX ID.\n_SPDX_IDS_TO_VALUES: dict[str, Any] = {}\n\n\n@cache\ndef spdx_id(value: LiteralString) -> str:\n    \"\"\"Encode a value into characters that are valid in an SPDX ID\"\"\"\n    value_as_spdx_id = re.sub(r\"[^a-zA-Z0-9.\\-]+\", \"-\", value)\n\n    # The happy path is there are no collisions.\n    # But collisions can happen, especially in file paths.\n    # We append a hash suffix in those cases.\n    if _SPDX_IDS_TO_VALUES.setdefault(value_as_spdx_id, value) != value:\n        suffix = hashlib.sha256(value.encode()).hexdigest()[:8]\n        value_as_spdx_id = f\"{value_as_spdx_id}-{suffix}\"\n        assert _SPDX_IDS_TO_VALUES.setdefault(value_as_spdx_id, value) == value\n\n    return value_as_spdx_id\n\n\ndef calculate_package_verification_codes(sbom: SBOM) -> None:\n    \"\"\"\n    Calculate SPDX 'packageVerificationCode' values for\n    each package with 'filesAnalyzed' set to 'true'.\n    Mutates the values within the passed structure.\n\n    The code is SHA1 of a concatenated and sorted list of file SHA1s.\n    \"\"\"\n\n    # Find all packages which we need to calculate package verification codes for.\n    sbom_file_id_to_package_id = {}\n    sbom_package_id_to_file_sha1s: dict[str, list[bytes]] = {}\n    for sbom_package in sbom[\"packages\"]:\n        # If this value is 'false' we skip calculating.\n        if sbom_package.get(\"filesAnalyzed\", False):\n            sbom_package_id = sbom_package[\"SPDXID\"]\n            sbom_package_id_to_file_sha1s[sbom_package_id] = []\n\n    # Next pass we do is over relationships,\n    # we need to find all files that belong to each package.\n    for sbom_relationship in sbom[\"relationships\"]:\n        sbom_relationship_type = sbom_relationship[\"relationshipType\"]\n        sbom_element_id = sbom_relationship[\"spdxElementId\"]\n        sbom_related_element_id = sbom_relationship[\"relatedSpdxElement\"]\n\n        # We're looking for '<package> CONTAINS <file>' relationships\n        if (\n            sbom_relationship_type != \"CONTAINS\"\n            or sbom_element_id not in sbom_package_id_to_file_sha1s\n            or not sbom_related_element_id.startswith(\"SPDXRef-FILE-\")\n        ):\n            continue\n\n        # Found one! Add it to our mapping.\n        sbom_file_id_to_package_id[sbom_related_element_id] = sbom_element_id\n\n    # Now we do a single pass on files, appending all SHA1 values along the way.\n    for sbom_file in sbom[\"files\"]:\n        # Attempt to match this file to a package.\n        sbom_file_id = sbom_file[\"SPDXID\"]\n        if sbom_file_id not in sbom_file_id_to_package_id:\n            continue\n        sbom_package_id = sbom_file_id_to_package_id[sbom_file_id]\n\n        # Find the SHA1 checksum for the file.\n        for sbom_file_checksum in sbom_file[\"checksums\"]:\n            if sbom_file_checksum[\"algorithm\"] == \"SHA1\":\n                # We lowercase the value as that's what's required by the algorithm.\n                sbom_file_checksum_sha1 = (\n                    sbom_file_checksum[\"checksumValue\"].lower().encode(\"ascii\")\n                )\n                break\n        else:\n            raise ValueError(f\"Can't find SHA1 checksum for '{sbom_file_id}'\")\n\n        sbom_package_id_to_file_sha1s[sbom_package_id].append(sbom_file_checksum_sha1)\n\n    # Finally we iterate over the packages again and calculate the final package verification code values.\n    for sbom_package in sbom[\"packages\"]:\n        sbom_package_id = sbom_package[\"SPDXID\"]\n        if sbom_package_id not in sbom_package_id_to_file_sha1s:\n            continue\n\n        # Package verification code is the SHA1 of ASCII values ascending-sorted.\n        sbom_package_verification_code = hashlib.sha1(\n            b\"\".join(sorted(sbom_package_id_to_file_sha1s[sbom_package_id]))\n        ).hexdigest()\n\n        sbom_package[\"packageVerificationCode\"] = {\n            \"packageVerificationCodeValue\": sbom_package_verification_code\n        }\n\n\ndef get_release_tools_commit_sha() -> str:\n    \"\"\"Gets the git commit SHA of the release-tools repository\"\"\"\n    git_prefix = os.path.abspath(os.path.dirname(__file__))\n    stdout = (\n        subprocess.check_output(\n            [\"git\", \"rev-parse\", \"--prefix\", git_prefix, \"HEAD\"], cwd=git_prefix\n        )\n        .decode(\"ascii\")\n        .strip()\n    )\n    assert re.fullmatch(r\"^[a-f0-9]{40,}$\", stdout)\n    return stdout\n\n\ndef normalize_sbom_data(sbom_data: SBOM) -> None:\n    \"\"\"\n    Normalize SBOM data in-place by recursion\n    and sorting lists by some repeatable key.\n    \"\"\"\n\n    def recursive_sort_in_place(value: list[Any] | dict[str, Any]) -> None:\n        if isinstance(value, list):\n            # We need to recurse first so bottom-most elements are sorted first.\n            for item in value:\n                recursive_sort_in_place(item)\n\n            # Otherwise this key might change depending on the unsorted order of items.\n            value.sort(key=lambda item: json.dumps(item, sort_keys=True))\n\n        # Dictionaries are the only other containers and keys\n        # are already handled by json.dumps(sort_keys=True).\n        elif isinstance(value, dict):\n            for dict_val in value.values():\n                recursive_sort_in_place(dict_val)\n\n    recursive_sort_in_place(cast(dict[str, Any], sbom_data))\n\n\ndef check_sbom_data(sbom_data: SBOM) -> None:\n    \"\"\"Check SBOM data for common issues\"\"\"\n\n    def check_id_duplicates(sbom_components: list[Package] | list[File]) -> set[str]:\n        all_ids = set()\n        for sbom_component in sbom_components:\n            sbom_component_id = sbom_component[\"SPDXID\"]\n            assert sbom_component_id not in all_ids\n            all_ids.add(sbom_component_id)\n        return all_ids\n\n    all_package_ids = check_id_duplicates(sbom_data[\"packages\"])\n    all_file_ids = check_id_duplicates(sbom_data[\"files\"])\n\n    # Check that no files and packages have the same ID.\n    assert not all_package_ids.intersection(all_file_ids)\n    all_sbom_ids = all_package_ids | all_file_ids\n\n    # Check that all relationships use existing IDs.\n    for sbom_relationship in sbom_data[\"relationships\"]:\n\n        # The exception being 'DESCRIBES' with the meta 'document' ID\n        if (\n            sbom_relationship[\"spdxElementId\"] == \"SPDXRef-DOCUMENT\"\n            and sbom_relationship[\"relationshipType\"] == \"DESCRIBES\"\n        ):\n            continue\n\n        assert sbom_relationship[\"spdxElementId\"] in all_sbom_ids\n        assert sbom_relationship[\"relatedSpdxElement\"] in all_sbom_ids\n\n\ndef fetch_package_metadata_from_pypi(\n    project: str, version: str, filename: str | None = None\n) -> tuple[str, str]:\n    \"\"\"\n    Fetches the SHA256 checksum and download location from PyPI.\n    If we're given a filename then we match with that, otherwise we use wheels.\n    \"\"\"\n    # Get the package download URL from PyPI.\n    try:\n        raw_text = urlopen(f\"https://pypi.org/pypi/{project}/{version}/json\").read()\n        release_metadata = json.loads(raw_text)\n        url: dict[str, typing.Any]\n\n        # Look for a matching artifact filename and then check\n        # its remote checksum to the local one.\n        for url in release_metadata[\"urls\"]:\n            # pip can only use Python-only dependencies, so there's\n            # no risk of picking the 'incorrect' wheel here.\n            if (filename is None and url[\"packagetype\"] == \"bdist_wheel\") or (\n                filename is not None and url[\"filename\"] == filename\n            ):\n                break\n        else:\n            raise ValueError(f\"No matching filename on PyPI for '{filename}'\")\n\n        # Successfully found the download URL for the matching artifact.\n        download_url = url[\"url\"]\n        checksum_sha256 = url[\"digests\"][\"sha256\"]\n        return download_url, checksum_sha256\n\n    except Exception as e:\n        raise ValueError(\n            f\"Couldn't fetch metadata for project '{project}' from PyPI: {e}\"\n        )\n\n\ndef remove_pip_from_sbom(sbom_data: SBOM) -> None:\n    \"\"\"\n    Removes pip and its dependencies from the SBOM data.\n    This is only necessary if there's potential we get\n    pip SBOM data from the CPython source SBOM.\n    \"\"\"\n    sbom_pip_spdx_id = spdx_id(\"SPDXRef-PACKAGE-pip\")\n    sbom_spdx_ids_to_remove = {sbom_pip_spdx_id}\n\n    # Find all package SPDXIDs that pip depends on.\n    for sbom_relationship in sbom_data[\"relationships\"]:\n        if (\n            sbom_relationship[\"relationshipType\"] == \"DEPENDS_ON\"\n            and sbom_relationship[\"spdxElementId\"] == sbom_pip_spdx_id\n        ):\n            sbom_spdx_ids_to_remove.add(sbom_relationship[\"relatedSpdxElement\"])\n\n    # Remove all the packages and relationships.\n    sbom_data[\"packages\"] = [\n        sbom_package\n        for sbom_package in sbom_data[\"packages\"]\n        if sbom_package[\"SPDXID\"] not in sbom_spdx_ids_to_remove\n    ]\n    sbom_data[\"relationships\"] = [\n        sbom_relationship\n        for sbom_relationship in sbom_data[\"relationships\"]\n        if sbom_relationship[\"relatedSpdxElement\"] not in sbom_spdx_ids_to_remove\n    ]\n\n\ndef create_pip_sbom_from_wheel(\n    sbom_data: SBOM, pip_wheel_filename: str, pip_wheel_bytes: bytes\n) -> None:\n    \"\"\"\n    pip is a part of a packaging ecosystem (Python, surprise!) so it's actually\n    automatable to discover the metadata we need like the version and checksums\n    so let's do that on behalf of our friends at the PyPA. This function also\n    discovers vendored packages within pip and fetches their metadata.\n    \"\"\"\n    # Remove pip from the SBOM in case it's included in the CPython source code SBOM.\n    remove_pip_from_sbom(sbom_data)\n\n    # Wheel filename format puts the version right after the project name.\n    pip_version = pip_wheel_filename.split(\"-\")[1]\n    pip_checksum_sha256 = hashlib.sha256(pip_wheel_bytes).hexdigest()\n\n    pip_download_url, pip_actual_sha256 = fetch_package_metadata_from_pypi(\n        project=\"pip\",\n        version=pip_version,\n        filename=pip_wheel_filename,\n    )\n    if pip_actual_sha256 != pip_checksum_sha256:\n        raise ValueError(\"pip wheel checksum doesn't match PyPI\")\n\n    # Parse 'pip/_vendor/vendor.txt' from the wheel for sub-dependencies.\n    with zipfile.ZipFile(io.BytesIO(pip_wheel_bytes)) as whl:\n        vendor_txt_data = whl.read(\"pip/_vendor/vendor.txt\").decode()\n\n        # With this version regex we're assuming that pip isn't using pre-releases.\n        # If any version doesn't match we get a failure below, so we're safe doing this.\n        version_pin_re = re.compile(r\"^([a-zA-Z0-9_.-]+)==([0-9.]*[0-9])$\")\n        sbom_pip_dependency_spdx_ids = set()\n        for line in vendor_txt_data.splitlines():\n            line = line.partition(\"#\")[0].strip()  # Strip comments and whitespace.\n            if not line:  # Skip empty lines.\n                continue\n\n            # Non-empty lines we must be able to match.\n            match = version_pin_re.match(line)\n            assert (\n                match is not None\n            ), f\"Unparseable line in vendor.txt: {line!r}\"  # Make mypy happy.\n\n            # Parse out and normalize the project name.\n            project_name, project_version = match.groups()\n            project_name = project_name.lower()\n\n            # Fetch the metadata from PyPI\n            project_download_url, project_checksum_sha256 = (\n                fetch_package_metadata_from_pypi(project_name, project_version)\n            )\n\n            # Update our SBOM data with what we received from PyPI.\n            sbom_project_spdx_id = spdx_id(f\"SPDXRef-PACKAGE-{project_name}\")\n            sbom_pip_dependency_spdx_ids.add(sbom_project_spdx_id)\n            sbom_data[\"packages\"].append(\n                {\n                    \"SPDXID\": sbom_project_spdx_id,\n                    \"name\": project_name,\n                    \"versionInfo\": project_version,\n                    \"downloadLocation\": project_download_url,\n                    \"checksums\": [\n                        {\n                            \"algorithm\": \"SHA256\",\n                            \"checksumValue\": project_checksum_sha256,\n                        }\n                    ],\n                    \"externalRefs\": [\n                        {\n                            \"referenceCategory\": \"PACKAGE_MANAGER\",\n                            \"referenceLocator\": f\"pkg:pypi/{project_name}@{project_version}\",\n                            \"referenceType\": \"purl\",\n                        },\n                    ],\n                    \"primaryPackagePurpose\": \"SOURCE\",\n                    \"licenseConcluded\": \"NOASSERTION\",\n                }\n            )\n\n    # Now we add pip to the SBOM and dependency relationships\n    sbom_pip_spdx_id = spdx_id(\"SPDXRef-PACKAGE-pip\")\n    sbom_data[\"packages\"].append(\n        {\n            \"SPDXID\": sbom_pip_spdx_id,\n            \"name\": \"pip\",\n            \"versionInfo\": pip_version,\n            \"originator\": \"Organization: Python Packaging Authority\",\n            \"licenseConcluded\": \"NOASSERTION\",\n            \"downloadLocation\": pip_download_url,\n            \"checksums\": [\n                {\"algorithm\": \"SHA256\", \"checksumValue\": pip_checksum_sha256}\n            ],\n            \"externalRefs\": [\n                {\n                    \"referenceCategory\": \"SECURITY\",\n                    \"referenceLocator\": f\"cpe:2.3:a:pypa:pip:{pip_version}:*:*:*:*:*:*:*\",\n                    \"referenceType\": \"cpe23Type\",\n                },\n                {\n                    \"referenceCategory\": \"PACKAGE_MANAGER\",\n                    \"referenceLocator\": f\"pkg:pypi/pip@{pip_version}\",\n                    \"referenceType\": \"purl\",\n                },\n            ],\n            \"primaryPackagePurpose\": \"SOURCE\",\n        }\n    )\n    for sbom_dep_spdx_id in sorted(sbom_pip_dependency_spdx_ids):\n        sbom_data[\"relationships\"].append(\n            {\n                \"spdxElementId\": sbom_pip_spdx_id,\n                \"relatedSpdxElement\": sbom_dep_spdx_id,\n                \"relationshipType\": \"DEPENDS_ON\",\n            }\n        )\n\n    # Finally, CPython depends on pip.\n    sbom_data[\"relationships\"].append(\n        {\n            \"spdxElementId\": \"SPDXRef-PACKAGE-cpython\",\n            \"relatedSpdxElement\": sbom_pip_spdx_id,\n            \"relationshipType\": \"DEPENDS_ON\",\n        }\n    )\n\n\ndef create_cpython_sbom(\n    sbom_data: SBOM,\n    cpython_version: str,\n    artifact_path: str,\n) -> None:\n    \"\"\"Creates the top-level SBOM metadata and the CPython SBOM package.\"\"\"\n\n    if m := re.match(pat := r\"^([0-9.]+)\", cpython_version):\n        cpython_version_without_suffix = m.group(1)\n    else:\n        raise ValueError(f\"Invalid {cpython_version=}, expected {pat!r}\")\n    artifact_name = os.path.basename(artifact_path)\n    artifact_download_location = f\"https://www.python.org/ftp/python/{cpython_version_without_suffix}/{artifact_name}\"\n\n    # Take a hash of the artifact\n    with open(artifact_path, mode=\"rb\") as f:\n        artifact_checksum_sha256 = hashlib.sha256(f.read()).hexdigest()\n\n    sbom_data.update(\n        {\n            \"SPDXID\": \"SPDXRef-DOCUMENT\",\n            \"spdxVersion\": \"SPDX-2.3\",\n            \"name\": \"CPython SBOM\",\n            \"dataLicense\": \"CC0-1.0\",\n            # Naming done according to OpenSSF SBOM WG recommendations.\n            # See: https://github.com/ossf/sbom-everywhere/blob/main/reference/sbom_naming.md\n            \"documentNamespace\": f\"{artifact_download_location}.spdx.json\",\n            \"creationInfo\": {\n                \"created\": (\n                    datetime.datetime.now(tz=datetime.timezone.utc).strftime(\n                        \"%Y-%m-%dT%H:%M:%SZ\"\n                    )\n                ),\n                \"creators\": [\n                    \"Person: Python Release Managers\",\n                    f\"Tool: ReleaseTools-{get_release_tools_commit_sha()}\",\n                ],\n                # Version of the SPDX License ID list.\n                # This shouldn't need to be updated often, if ever.\n                \"licenseListVersion\": \"3.22\",\n            },\n        }\n    )\n\n    # Create the SBOM entry for the CPython package. We use\n    # the SPDXID later on for creating relationships to files.\n    sbom_cpython_package: Package = {\n        \"SPDXID\": \"SPDXRef-PACKAGE-cpython\",\n        \"name\": \"CPython\",\n        \"versionInfo\": cpython_version,\n        \"licenseConcluded\": \"PSF-2.0\",\n        \"originator\": \"Organization: Python Software Foundation\",\n        \"supplier\": \"Organization: Python Software Foundation\",\n        \"packageFileName\": artifact_name,\n        \"externalRefs\": [\n            {\n                \"referenceCategory\": \"SECURITY\",\n                \"referenceLocator\": f\"cpe:2.3:a:python:python:{cpython_version}:*:*:*:*:*:*:*\",\n                \"referenceType\": \"cpe23Type\",\n            }\n        ],\n        \"primaryPackagePurpose\": \"SOURCE\",\n        \"downloadLocation\": artifact_download_location,\n        \"checksums\": [\n            {\"algorithm\": \"SHA256\", \"checksumValue\": artifact_checksum_sha256}\n        ],\n    }\n\n    # The top-level CPython package depends on every vendored sub-package.\n    for sbom_package in sbom_data[\"packages\"]:\n        sbom_data[\"relationships\"].append(\n            {\n                \"spdxElementId\": sbom_cpython_package[\"SPDXID\"],\n                \"relatedSpdxElement\": sbom_package[\"SPDXID\"],\n                \"relationshipType\": \"DEPENDS_ON\",\n            }\n        )\n\n    sbom_data[\"packages\"].append(sbom_cpython_package)\n\n\ndef create_sbom_for_source_tarball(tarball_path: str) -> SBOM:\n    \"\"\"Stitches together an SBOM for a source tarball\"\"\"\n    tarball_name = os.path.basename(tarball_path)\n\n    # Open the tarball with known compression settings.\n    if tarball_name.endswith(\".tgz\"):\n        tarball = tarfile.open(tarball_path, mode=\"r:gz\")\n    elif tarball_name.endswith(\".tar.xz\"):\n        tarball = tarfile.open(tarball_path, mode=\"r:xz\")\n    else:\n        raise ValueError(f\"Unknown tarball format: '{tarball_name}'\")\n\n    # Parse the CPython version from the tarball.\n    # Calculate the download locations from the CPython version and tarball name.\n\n    if m := re.match(pat := r\"^Python-([0-9abrc.]+)\\.t\", tarball_name):\n        cpython_version = m.group(1)\n    else:\n        raise ValueError(f\"Invalid {tarball_name=}, expected {pat!r}\")\n\n    # There should be an SBOM included in the tarball.\n    # If there's not we can't create an SBOM.\n    try:\n        sbom_tarball_member = tarball.getmember(\n            f\"Python-{cpython_version}/Misc/sbom.spdx.json\"\n        )\n    except KeyError:\n        raise ValueError(\n            \"Tarball doesn't contain an SBOM at 'Misc/sbom.spdx.json'\"\n        ) from None\n    reader = tarball.extractfile(sbom_tarball_member)\n    assert reader, f\"{sbom_tarball_member} is not a file in {tarball_path}\"\n    sbom_bytes = reader.read()\n    sbom_data: SBOM = json.loads(sbom_bytes)\n\n    create_cpython_sbom(\n        sbom_data, cpython_version=cpython_version, artifact_path=tarball_path\n    )\n    sbom_cpython_package_spdx_id = spdx_id(\"SPDXRef-PACKAGE-cpython\")\n\n    # Find the pip wheel in ensurepip in the tarball\n    for member in tarball.getmembers():\n        if match := re.match(\n            rf\"^Python-{cpython_version}/Lib/ensurepip/_bundled/(pip-.*\\.whl)$\",\n            member.name,\n        ):\n            pip_wheel_filename = match.group(1)\n            reader = tarball.extractfile(member)\n            assert reader, f\"{member} is not a file in {tarball_path}\"\n            pip_wheel_bytes = reader.read()\n            break\n    else:\n        raise ValueError(\"Could not find pip wheel in 'Lib/ensurepip/_bundled/...'\")\n\n    # Now add pip to the SBOM. We do this after the above step to avoid\n    # CPython being dependent on packages that pip is dependent on.\n    create_pip_sbom_from_wheel(\n        sbom_data=sbom_data,\n        pip_wheel_filename=pip_wheel_filename,\n        pip_wheel_bytes=pip_wheel_bytes,\n    )\n\n    # Extract all currently known files from the SBOM with their checksums.\n    known_sbom_files = {}\n    for sbom_file in sbom_data[\"files\"]:\n        sbom_filename = sbom_file[\"fileName\"]\n\n        # Look for the expected SHA256 checksum.\n        for sbom_file_checksum in sbom_file[\"checksums\"]:\n            if sbom_file_checksum[\"algorithm\"] == \"SHA256\":\n                known_sbom_files[sbom_filename] = sbom_file_checksum[\"checksumValue\"]\n                break\n        else:\n            raise ValueError(\n                f\"Couldn't find expected SHA256 checksum in SBOM for file '{sbom_filename}'\"\n            )\n\n    # Now we walk the tarball and compare known files to our expected checksums in the SBOM.\n    # All files that aren't already in the SBOM can be added as \"CPython\" files.\n    for member in tarball.getmembers():\n        if not member.isfile():  # Only keep files (no symlinks)\n            continue\n\n        # Get the member from the tarball. CPython prefixes all of its\n        # source code with 'Python-{version}/...'.\n        assert member.name.startswith(f\"Python-{cpython_version}/\")\n\n        # Calculate the hashes, either for comparison with a known value\n        # or to embed in the SBOM as a new file. SHA1 is only used because\n        # SPDX requires it for all file entries.\n        reader = tarball.extractfile(member)\n        assert reader, f\"{member} is not a file in {tarball_path}\"\n        file_bytes = reader.read()\n        actual_file_checksum_sha1 = hashlib.sha1(file_bytes).hexdigest()\n        actual_file_checksum_sha256 = hashlib.sha256(file_bytes).hexdigest()\n\n        # Remove the 'Python-{version}/...' prefix for the SPDXID and fileName.\n        member_name_no_prefix = member.name.split(\"/\", 1)[1]\n\n        # We've already seen this file, so we check it hasn't been modified and continue on.\n        if member_name_no_prefix in known_sbom_files:\n            # If there's a hash mismatch we raise an error, something isn't right!\n            expected_file_checksum_sha256 = known_sbom_files.pop(member_name_no_prefix)\n            if expected_file_checksum_sha256 != actual_file_checksum_sha256:\n                raise ValueError(\n                    f\"Mismatched checksum for file '{member_name_no_prefix}'\"\n                )\n\n        # If this is a new file, then it's a part of the 'CPython' SBOM package.\n        else:\n            sbom_file_spdx_id = spdx_id(f\"SPDXRef-FILE-{member_name_no_prefix}\")\n            sbom_data[\"files\"].append(\n                {\n                    \"SPDXID\": sbom_file_spdx_id,\n                    \"fileName\": member_name_no_prefix,\n                    \"checksums\": [\n                        {\n                            \"algorithm\": \"SHA1\",\n                            \"checksumValue\": actual_file_checksum_sha1,\n                        },\n                        {\n                            \"algorithm\": \"SHA256\",\n                            \"checksumValue\": actual_file_checksum_sha256,\n                        },\n                    ],\n                }\n            )\n            sbom_data[\"relationships\"].append(\n                {\n                    \"spdxElementId\": sbom_cpython_package_spdx_id,\n                    \"relatedSpdxElement\": sbom_file_spdx_id,\n                    \"relationshipType\": \"CONTAINS\",\n                }\n            )\n\n    # If there are any known files that weren't found in the\n    # source tarball we want to raise an error.\n    if known_sbom_files:\n        raise ValueError(\n            f\"Some files from source SBOM aren't accounted for \"\n            f\"in source tarball: {sorted(known_sbom_files)!r}\"\n        )\n\n    # Final relationship, this SBOM describes the CPython package.\n    sbom_data[\"relationships\"].append(\n        {\n            \"spdxElementId\": \"SPDXRef-DOCUMENT\",\n            \"relatedSpdxElement\": sbom_cpython_package_spdx_id,\n            \"relationshipType\": \"DESCRIBES\",\n        }\n    )\n\n    # Apply the 'supplier' tag to every package since we're shipping\n    # the package in the tarball itself. Originator field is used for maintainers.\n    for sbom_package in sbom_data[\"packages\"]:\n        sbom_package[\"supplier\"] = \"Organization: Python Software Foundation\"\n        sbom_package[\"filesAnalyzed\"] = True\n\n    # Calculate the 'packageVerificationCode' values for files in packages.\n    calculate_package_verification_codes(sbom_data)\n\n    return sbom_data\n\n\ndef create_sbom_for_windows_artifact(\n    artifact_path: str, cpython_source_dir: Path | str\n) -> SBOM:\n    artifact_name = os.path.basename(artifact_path)\n    if m := re.match(pat := r\"^python-([0-9abrc.]+)t?(?:-|\\.exe|\\.zip)\", artifact_name):\n        cpython_version = m.group(1)\n    else:\n        raise ValueError(f\"Invalid {artifact_name=}, expected {pat!r}\")\n\n    if not cpython_source_dir:\n        raise ValueError(\"Must specify --cpython-source-dir for Windows artifacts\")\n    cpython_source_dir = Path(cpython_source_dir)\n\n    # Start with the CPython source SBOM as a base\n    with (cpython_source_dir / \"Misc/externals.spdx.json\").open() as f:\n        sbom_data: SBOM = json.loads(f.read())\n\n    sbom_data[\"relationships\"] = []\n    sbom_data[\"files\"] = []\n\n    # Add all the packages from the source SBOM\n    # We want to skip the file information because\n    # the files aren't available in Windows artifacts.\n    with (cpython_source_dir / \"Misc/sbom.spdx.json\").open() as f:\n        source_sbom_data = json.loads(f.read())\n        for sbom_package in source_sbom_data[\"packages\"]:\n            # Update the SPDX ID to avoid collisions with\n            # the 'externals' SBOM.\n            sbom_package[\"SPDXID\"] = spdx_id(\n                f\"SPDXRef-PACKAGE-{sbom_package['name']}-{sbom_package['versionInfo']}\"\n            )\n            sbom_data[\"packages\"].append(sbom_package)\n\n    create_cpython_sbom(\n        sbom_data, cpython_version=cpython_version, artifact_path=artifact_path\n    )\n    sbom_cpython_package_spdx_id = spdx_id(\"SPDXRef-PACKAGE-cpython\")\n\n    # The Windows embed artifacts don't contain pip/ensurepip,\n    # but the others do.\n    if \"-embed\" not in artifact_name:\n\n        # Find the pip wheel in ensurepip in the source code\n        for pathname in os.listdir(cpython_source_dir / \"Lib/ensurepip/_bundled\"):\n            if pathname.startswith(\"pip-\") and pathname.endswith(\".whl\"):\n                pip_wheel_filename = pathname\n                pip_wheel_bytes = (\n                    cpython_source_dir / f\"Lib/ensurepip/_bundled/{pathname}\"\n                ).read_bytes()\n                break\n        else:\n            raise ValueError(\"Could not find pip wheel in 'Lib/ensurepip/_bundled/...'\")\n\n        create_pip_sbom_from_wheel(\n            sbom_data,\n            pip_wheel_filename=pip_wheel_filename,\n            pip_wheel_bytes=pip_wheel_bytes,\n        )\n\n    # Final relationship, this SBOM describes the CPython package.\n    sbom_data[\"relationships\"].append(\n        {\n            \"spdxElementId\": \"SPDXRef-DOCUMENT\",\n            \"relatedSpdxElement\": sbom_cpython_package_spdx_id,\n            \"relationshipType\": \"DESCRIBES\",\n        }\n    )\n\n    # Apply the 'supplier' tag to every package since we're shipping\n    # the package in the artifact itself. Originator field is used for maintainers.\n    for sbom_package in sbom_data[\"packages\"]:\n        sbom_package[\"supplier\"] = \"Organization: Python Software Foundation\"\n        # Source packages have been compiled.\n        if sbom_package[\"primaryPackagePurpose\"] == \"SOURCE\":\n            sbom_package[\"primaryPackagePurpose\"] = \"LIBRARY\"\n\n    return sbom_data\n\n\ndef main() -> None:\n    parser = argparse.ArgumentParser()\n    parser.add_argument(\"--cpython-source-dir\", default=None)\n    parser.add_argument(\"artifacts\", nargs=\"+\")\n    parsed_args = parser.parse_args(sys.argv[1:])\n\n    artifact_paths = parsed_args.artifacts\n    cpython_source_dir = parsed_args.cpython_source_dir\n\n    for artifact_path in artifact_paths:\n        # Windows MSI and Embed artifacts\n        if artifact_path.endswith(\".exe\") or artifact_path.endswith(\".zip\"):\n            sbom_data = create_sbom_for_windows_artifact(\n                artifact_path, cpython_source_dir=cpython_source_dir\n            )\n        # Source artifacts\n        else:\n            sbom_data = create_sbom_for_source_tarball(artifact_path)\n\n        # Normalize SBOM data for reproducibility.\n        normalize_sbom_data(sbom_data)\n\n        # Check SBOM for validity.\n        check_sbom_data(sbom_data)\n\n        with open(artifact_path + \".spdx.json\", mode=\"w\") as f:\n            f.truncate()\n            f.write(json.dumps(sbom_data, indent=2, sort_keys=True))\n\n\nif __name__ == \"__main__\":\n    main()\n"
  },
  {
    "path": "select_jobs.py",
    "content": "#!/usr/bin/env python3\n\nimport argparse\n\nfrom release import Tag\n\n\ndef output(key: str, value: bool) -> None:\n    print(f\"{key}={str(value).lower()}\")\n\n\ndef main() -> None:\n    parser = argparse.ArgumentParser()\n    parser.add_argument(\"version\", type=Tag)\n    parser.add_argument(\n        \"--test\",\n        action=\"store_true\",\n        help=\"Enable all jobs for testing\",\n    )\n    args = parser.parse_args()\n    version = args.version\n\n    if args.test:\n        # When testing the workflow itself (push/PR),\n        # enable all jobs for full coverage.\n        output(\"docs\", True)\n        output(\"android\", True)\n        output(\"ios\", True)\n        return\n\n    # Docs are only built for stable releases or release candidates.\n    output(\"docs\", version.level in [\"rc\", \"f\"])\n\n    # Android binary releases began in Python 3.14.\n    output(\"android\", version.as_tuple() >= (3, 14))\n\n    # iOS binary releases began in Python 3.15.\n    output(\"ios\", version.as_tuple() >= (3, 15))\n\n\nif __name__ == \"__main__\":\n    main()\n"
  },
  {
    "path": "tests/README.rst",
    "content": "This is Python version 3.14.0 alpha 3\n=====================================\n\n.. image:: https://github.com/python/cpython/actions/workflows/build.yml/badge.svg?branch=main&event=push\n   :alt: CPython build status on GitHub Actions\n   :target: https://github.com/python/cpython/actions\n\n.. image:: https://dev.azure.com/python/cpython/_apis/build/status/Azure%20Pipelines%20CI?branchName=main\n   :alt: CPython build status on Azure DevOps\n   :target: https://dev.azure.com/python/cpython/_build/latest?definitionId=4&branchName=main\n\n.. image:: https://img.shields.io/badge/discourse-join_chat-brightgreen.svg\n   :alt: Python Discourse chat\n   :target: https://discuss.python.org/\n\n\nCopyright © 2001 Python Software Foundation.  All rights reserved.\n\nSee the end of this file for further copyright and license information.\n\n.. contents::\n\nGeneral Information\n-------------------\n\n- Website: https://www.python.org\n- Source code: https://github.com/python/cpython\n- Issue tracker: https://github.com/python/cpython/issues\n- Documentation: https://docs.python.org\n- Developer's Guide: https://devguide.python.org/\n\nContributing to CPython\n-----------------------\n\nFor more complete instructions on contributing to CPython development,\nsee the `Developer Guide`_.\n\n.. _Developer Guide: https://devguide.python.org/\n\nUsing Python\n------------\n\nInstallable Python kits, and information about using Python, are available at\n`python.org`_.\n\n.. _python.org: https://www.python.org/\n\nBuild Instructions\n------------------\n\nOn Unix, Linux, BSD, macOS, and Cygwin::\n\n    ./configure\n    make\n    make test\n    sudo make install\n\nThis will install Python as ``python3``.\n\nYou can pass many options to the configure script; run ``./configure --help``\nto find out more.  On macOS case-insensitive file systems and on Cygwin,\nthe executable is called ``python.exe``; elsewhere it's just ``python``.\n\nBuilding a complete Python installation requires the use of various\nadditional third-party libraries, depending on your build platform and\nconfigure options.  Not all standard library modules are buildable or\nusable on all platforms.  Refer to the\n`Install dependencies <https://devguide.python.org/getting-started/setup-building.html#build-dependencies>`_\nsection of the `Developer Guide`_ for current detailed information on\ndependencies for various Linux distributions and macOS.\n\nOn macOS, there are additional configure and build options related\nto macOS framework and universal builds.  Refer to `Mac/README.rst\n<https://github.com/python/cpython/blob/main/Mac/README.rst>`_.\n\nOn Windows, see `PCbuild/readme.txt\n<https://github.com/python/cpython/blob/main/PCbuild/readme.txt>`_.\n\nTo build Windows installer, see `Tools/msi/README.txt\n<https://github.com/python/cpython/blob/main/Tools/msi/README.txt>`_.\n\nIf you wish, you can create a subdirectory and invoke configure from there.\nFor example::\n\n    mkdir debug\n    cd debug\n    ../configure --with-pydebug\n    make\n    make test\n\n(This will fail if you *also* built at the top-level directory.  You should do\na ``make clean`` at the top-level first.)\n\nTo get an optimized build of Python, ``configure --enable-optimizations``\nbefore you run ``make``.  This sets the default make targets up to enable\nProfile Guided Optimization (PGO) and may be used to auto-enable Link Time\nOptimization (LTO) on some platforms.  For more details, see the sections\nbelow.\n\nProfile Guided Optimization\n^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\nPGO takes advantage of recent versions of the GCC or Clang compilers.  If used,\neither via ``configure --enable-optimizations`` or by manually running\n``make profile-opt`` regardless of configure flags, the optimized build\nprocess will perform the following steps:\n\nThe entire Python directory is cleaned of temporary files that may have\nresulted from a previous compilation.\n\nAn instrumented version of the interpreter is built, using suitable compiler\nflags for each flavor. Note that this is just an intermediary step.  The\nbinary resulting from this step is not good for real-life workloads as it has\nprofiling instructions embedded inside.\n\nAfter the instrumented interpreter is built, the Makefile will run a training\nworkload.  This is necessary in order to profile the interpreter's execution.\nNote also that any output, both stdout and stderr, that may appear at this step\nis suppressed.\n\nThe final step is to build the actual interpreter, using the information\ncollected from the instrumented one.  The end result will be a Python binary\nthat is optimized; suitable for distribution or production installation.\n\n\nLink Time Optimization\n^^^^^^^^^^^^^^^^^^^^^^\n\nEnabled via configure's ``--with-lto`` flag.  LTO takes advantage of the\nability of recent compiler toolchains to optimize across the otherwise\narbitrary ``.o`` file boundary when building final executables or shared\nlibraries for additional performance gains.\n\n\nWhat's New\n----------\n\nWe have a comprehensive overview of the changes in the `What's New in Python\n3.14 <https://docs.python.org/3.14/whatsnew/3.14.html>`_ document.  For a more\ndetailed change log, read `Misc/NEWS\n<https://github.com/python/cpython/tree/main/Misc/NEWS.d>`_, but a full\naccounting of changes can only be gleaned from the `commit history\n<https://github.com/python/cpython/commits/main>`_.\n\nIf you want to install multiple versions of Python, see the section below\nentitled \"Installing multiple versions\".\n\n\nDocumentation\n-------------\n\n`Documentation for Python 3.14 <https://docs.python.org/3.14/>`_ is online,\nupdated daily.\n\nIt can also be downloaded in many formats for faster access.  The documentation\nis downloadable in HTML, EPUB, and reStructuredText formats; the latter version\nis primarily for documentation authors, translators, and people with special\nformatting requirements.\n\nFor information about building Python's documentation, refer to `Doc/README.rst\n<https://github.com/python/cpython/blob/main/Doc/README.rst>`_.\n\n\nTesting\n-------\n\nTo test the interpreter, type ``make test`` in the top-level directory.  The\ntest set produces some output.  You can generally ignore the messages about\nskipped tests due to optional features which can't be imported.  If a message\nis printed about a failed test or a traceback or core dump is produced,\nsomething is wrong.\n\nBy default, tests are prevented from overusing resources like disk space and\nmemory.  To enable these tests, run ``make buildbottest``.\n\nIf any tests fail, you can re-run the failing test(s) in verbose mode.  For\nexample, if ``test_os`` and ``test_gdb`` failed, you can run::\n\n    make test TESTOPTS=\"-v test_os test_gdb\"\n\nIf the failure persists and appears to be a problem with Python rather than\nyour environment, you can `file a bug report\n<https://github.com/python/cpython/issues>`_ and include relevant output from\nthat command to show the issue.\n\nSee `Running & Writing Tests <https://devguide.python.org/testing/run-write-tests.html>`_\nfor more on running tests.\n\nInstalling multiple versions\n----------------------------\n\nOn Unix and Mac systems if you intend to install multiple versions of Python\nusing the same installation prefix (``--prefix`` argument to the configure\nscript) you must take care that your primary python executable is not\noverwritten by the installation of a different version.  All files and\ndirectories installed using ``make altinstall`` contain the major and minor\nversion and can thus live side-by-side.  ``make install`` also creates\n``${prefix}/bin/python3`` which refers to ``${prefix}/bin/python3.X``.  If you\nintend to install multiple versions using the same prefix you must decide which\nversion (if any) is your \"primary\" version.  Install that version using\n``make install``.  Install all other versions using ``make altinstall``.\n\nFor example, if you want to install Python 2.7, 3.6, and 3.14 with 3.14 being the\nprimary version, you would execute ``make install`` in your 3.14 build directory\nand ``make altinstall`` in the others.\n\n\nRelease Schedule\n----------------\n\nSee `PEP 745 <https://peps.python.org/pep-0745/>`__ for Python 3.14 release details.\n\n\nCopyright and License Information\n---------------------------------\n\n\nCopyright © 2001 Python Software Foundation.  All rights reserved.\n\nCopyright © 2000 BeOpen.com.  All rights reserved.\n\nCopyright © 1995-2001 Corporation for National Research Initiatives.  All\nrights reserved.\n\nCopyright © 1991-1995 Stichting Mathematisch Centrum.  All rights reserved.\n\nSee the `LICENSE <https://github.com/python/cpython/blob/main/LICENSE>`_ for\ninformation on the history of this software, terms & conditions for usage, and a\nDISCLAIMER OF ALL WARRANTIES.\n\nThis Python distribution contains *no* GNU General Public License (GPL) code,\nso it may be used in proprietary projects.  There are interfaces to some GNU\ncode but these are entirely optional.\n\nAll trademarks referenced herein are property of their respective holders.\n"
  },
  {
    "path": "tests/__init__.py",
    "content": ""
  },
  {
    "path": "tests/buildbotapi/builders.json",
    "content": "{\n    \"builders\": [\n        {\n            \"builderid\": 3,\n            \"description\": null,\n            \"description_format\": null,\n            \"description_html\": null,\n            \"masterids\": [\n                1\n            ],\n            \"name\": \"AMD64 RHEL8 LTO 3.13\",\n            \"projectid\": null,\n            \"tags\": [\n                \"3.13\",\n                \"stable\",\n                \"lto\",\n                \"nondebug\",\n                \"tier-1\"\n            ]\n        },\n        {\n            \"builderid\": 1623,\n            \"description\": null,\n            \"description_format\": null,\n            \"description_html\": null,\n            \"masterids\": [\n                1\n            ],\n            \"name\": \"AMD64 Windows PGO NoGIL PR\",\n            \"projectid\": null,\n            \"tags\": [\n                \"PullRequest\",\n                \"unstable\",\n                \"win64\",\n                \"nogil\",\n                \"nondebug\",\n                \"pgo\",\n                \"tier-1\"\n            ]\n        }\n    ],\n    \"meta\": {\n        \"total\": 3\n    }\n}\n"
  },
  {
    "path": "tests/buildbotapi/failure.json",
    "content": "{\n  \"builds\": [\n    {\n      \"builderid\": 3,\n      \"buildid\": 1732278,\n      \"buildrequestid\": 2341889,\n      \"complete\": true,\n      \"complete_at\": 1734198808,\n      \"locks_duration_s\": 0,\n      \"masterid\": 1,\n      \"number\": 228,\n      \"properties\": {},\n      \"results\": 2,\n      \"started_at\": 1734197714,\n      \"state_string\": \"failed test (failure)\",\n      \"workerid\": 28\n    }\n  ],\n  \"meta\": {\n    \"total\": 1\n  }\n}\n"
  },
  {
    "path": "tests/buildbotapi/no-builds.json",
    "content": "{\n  \"builds\": [\n  ],\n  \"meta\": {\n    \"total\": 0\n  }\n}\n"
  },
  {
    "path": "tests/buildbotapi/success.json",
    "content": "{\n  \"builds\": [\n    {\n      \"builderid\": 3,\n      \"buildid\": 1645411,\n      \"buildrequestid\": 2211085,\n      \"complete\": true,\n      \"complete_at\": 1728312495,\n      \"locks_duration_s\": 531,\n      \"masterid\": 1,\n      \"number\": 6844,\n      \"properties\": {},\n      \"results\": 0,\n      \"started_at\": 1728311538,\n      \"state_string\": \"build successful\",\n      \"workerid\": 27\n    }\n  ],\n  \"meta\": {\n    \"total\": 1\n  }\n}\n"
  },
  {
    "path": "tests/fake-artifact.txt",
    "content": ""
  },
  {
    "path": "tests/fake-ftp-files.txt",
    "content": "# Test data only, doesn't need to be updated for each release\nPython-3.14.0a1.tar.xz\nPython-3.14.0a1.tar.xz.crt\nPython-3.14.0a1.tar.xz.sig\nPython-3.14.0a1.tar.xz.sigstore\nPython-3.14.0a1.tar.xz.spdx.json\nPython-3.14.0a1.tgz\nPython-3.14.0a1.tgz.crt\nPython-3.14.0a1.tgz.sig\nPython-3.14.0a1.tgz.sigstore\nPython-3.14.0a1.tgz.spdx.json\nPython-3.14.0a2.tar.xz\nPython-3.14.0a2.tar.xz.crt\nPython-3.14.0a2.tar.xz.sig\nPython-3.14.0a2.tar.xz.sigstore\nPython-3.14.0a2.tar.xz.spdx.json\nPython-3.14.0a2.tgz\nPython-3.14.0a2.tgz.crt\nPython-3.14.0a2.tgz.sig\nPython-3.14.0a2.tgz.sigstore\nPython-3.14.0a2.tgz.spdx.json\nPython-3.14.0a3.tar.xz\nPython-3.14.0a3.tar.xz.crt\nPython-3.14.0a3.tar.xz.sig\nPython-3.14.0a3.tar.xz.sigstore\nPython-3.14.0a3.tar.xz.spdx.json\nPython-3.14.0a3.tgz\nPython-3.14.0a3.tgz.crt\nPython-3.14.0a3.tgz.sig\nPython-3.14.0a3.tgz.sigstore\nPython-3.14.0a3.tgz.spdx.json\nPython-3.14.0a4.tar.xz\nPython-3.14.0a4.tar.xz.crt\nPython-3.14.0a4.tar.xz.sig\nPython-3.14.0a4.tar.xz.sigstore\nPython-3.14.0a4.tar.xz.spdx.json\nPython-3.14.0a4.tgz\nPython-3.14.0a4.tgz.crt\nPython-3.14.0a4.tgz.sig\nPython-3.14.0a4.tgz.sigstore\nPython-3.14.0a4.tgz.spdx.json\nPython-3.14.0a5.tar.xz\nPython-3.14.0a5.tar.xz.crt\nPython-3.14.0a5.tar.xz.sig\nPython-3.14.0a5.tar.xz.sigstore\nPython-3.14.0a5.tar.xz.spdx.json\nPython-3.14.0a5.tgz\nPython-3.14.0a5.tgz.crt\nPython-3.14.0a5.tgz.sig\nPython-3.14.0a5.tgz.sigstore\nPython-3.14.0a5.tgz.spdx.json\nPython-3.14.0a6.tar.xz\nPython-3.14.0a6.tar.xz.crt\nPython-3.14.0a6.tar.xz.sig\nPython-3.14.0a6.tar.xz.sigstore\nPython-3.14.0a6.tar.xz.spdx.json\nPython-3.14.0a6.tgz\nPython-3.14.0a6.tgz.crt\nPython-3.14.0a6.tgz.sig\nPython-3.14.0a6.tgz.sigstore\nPython-3.14.0a6.tgz.spdx.json\nPython-3.14.0a7.tar.xz\nPython-3.14.0a7.tar.xz.crt\nPython-3.14.0a7.tar.xz.sig\nPython-3.14.0a7.tar.xz.sigstore\nPython-3.14.0a7.tar.xz.spdx.json\nPython-3.14.0a7.tgz\nPython-3.14.0a7.tgz.crt\nPython-3.14.0a7.tgz.sig\nPython-3.14.0a7.tgz.sigstore\nPython-3.14.0a7.tgz.spdx.json\nPython-3.14.0b1.tar.xz\nPython-3.14.0b1.tar.xz.crt\nPython-3.14.0b1.tar.xz.sig\nPython-3.14.0b1.tar.xz.sigstore\nPython-3.14.0b1.tar.xz.spdx.json\nPython-3.14.0b1.tgz\nPython-3.14.0b1.tgz.crt\nPython-3.14.0b1.tgz.sig\nPython-3.14.0b1.tgz.sigstore\nPython-3.14.0b1.tgz.spdx.json\nPython-3.14.0b2.tar.xz\nPython-3.14.0b2.tar.xz.crt\nPython-3.14.0b2.tar.xz.sig\nPython-3.14.0b2.tar.xz.sigstore\nPython-3.14.0b2.tar.xz.spdx.json\nPython-3.14.0b2.tgz\nPython-3.14.0b2.tgz.crt\nPython-3.14.0b2.tgz.sig\nPython-3.14.0b2.tgz.sigstore\nPython-3.14.0b2.tgz.spdx.json\nPython-3.14.0b3.tar.xz\nPython-3.14.0b3.tar.xz.crt\nPython-3.14.0b3.tar.xz.sig\nPython-3.14.0b3.tar.xz.sigstore\nPython-3.14.0b3.tar.xz.spdx.json\nPython-3.14.0b3.tgz\nPython-3.14.0b3.tgz.crt\nPython-3.14.0b3.tgz.sig\nPython-3.14.0b3.tgz.sigstore\nPython-3.14.0b3.tgz.spdx.json\namd64a1/\namd64a2/\namd64a3/\namd64a4/\namd64a5/\namd64a6/\namd64a7/\namd64b1/\namd64b2/\namd64b3/\narm64a1/\narm64a2/\narm64a3/\narm64a4/\narm64a5/\narm64a6/\narm64a7/\narm64b1/\narm64b2/\narm64b3/\npython-3.14.0a1-amd64.exe\npython-3.14.0a1-amd64.exe.crt\npython-3.14.0a1-amd64.exe.sig\npython-3.14.0a1-amd64.exe.sigstore\npython-3.14.0a1-amd64.exe.spdx.json\npython-3.14.0a1-amd64.zip\npython-3.14.0a1-arm64.exe\npython-3.14.0a1-arm64.exe.crt\npython-3.14.0a1-arm64.exe.sig\npython-3.14.0a1-arm64.exe.sigstore\npython-3.14.0a1-arm64.exe.spdx.json\npython-3.14.0a1-arm64.zip\npython-3.14.0a1-embed-amd64.zip\npython-3.14.0a1-embed-amd64.zip.crt\npython-3.14.0a1-embed-amd64.zip.sig\npython-3.14.0a1-embed-amd64.zip.sigstore\npython-3.14.0a1-embed-amd64.zip.spdx.json\npython-3.14.0a1-embed-arm64.zip\npython-3.14.0a1-embed-arm64.zip.crt\npython-3.14.0a1-embed-arm64.zip.sig\npython-3.14.0a1-embed-arm64.zip.sigstore\npython-3.14.0a1-embed-arm64.zip.spdx.json\npython-3.14.0a1-embed-win32.zip\npython-3.14.0a1-embed-win32.zip.crt\npython-3.14.0a1-embed-win32.zip.sig\npython-3.14.0a1-embed-win32.zip.sigstore\npython-3.14.0a1-embed-win32.zip.spdx.json\npython-3.14.0a1-embeddable-amd64.zip\npython-3.14.0a1-embeddable-arm64.zip\npython-3.14.0a1-embeddable-win32.zip\npython-3.14.0a1-macos11.pkg\npython-3.14.0a1-macos11.pkg.crt\npython-3.14.0a1-macos11.pkg.sig\npython-3.14.0a1-macos11.pkg.sigstore\npython-3.14.0a1-test-amd64.zip\npython-3.14.0a1-test-arm64.zip\npython-3.14.0a1-test-win32.zip\npython-3.14.0a1-win32.zip\npython-3.14.0a1.exe\npython-3.14.0a1.exe.crt\npython-3.14.0a1.exe.sig\npython-3.14.0a1.exe.sigstore\npython-3.14.0a1.exe.spdx.json\npython-3.14.0a1t-amd64.zip\npython-3.14.0a1t-arm64.zip\npython-3.14.0a1t-win32.zip\npython-3.14.0a2-amd64.exe\npython-3.14.0a2-amd64.exe.crt\npython-3.14.0a2-amd64.exe.sig\npython-3.14.0a2-amd64.exe.sigstore\npython-3.14.0a2-amd64.exe.spdx.json\npython-3.14.0a2-amd64.zip\npython-3.14.0a2-arm64.exe\npython-3.14.0a2-arm64.exe.crt\npython-3.14.0a2-arm64.exe.sig\npython-3.14.0a2-arm64.exe.sigstore\npython-3.14.0a2-arm64.exe.spdx.json\npython-3.14.0a2-arm64.zip\npython-3.14.0a2-embed-amd64.zip\npython-3.14.0a2-embed-amd64.zip.crt\npython-3.14.0a2-embed-amd64.zip.sig\npython-3.14.0a2-embed-amd64.zip.sigstore\npython-3.14.0a2-embed-amd64.zip.spdx.json\npython-3.14.0a2-embed-arm64.zip\npython-3.14.0a2-embed-arm64.zip.crt\npython-3.14.0a2-embed-arm64.zip.sig\npython-3.14.0a2-embed-arm64.zip.sigstore\npython-3.14.0a2-embed-arm64.zip.spdx.json\npython-3.14.0a2-embed-win32.zip\npython-3.14.0a2-embed-win32.zip.crt\npython-3.14.0a2-embed-win32.zip.sig\npython-3.14.0a2-embed-win32.zip.sigstore\npython-3.14.0a2-embed-win32.zip.spdx.json\npython-3.14.0a2-embeddable-amd64.zip\npython-3.14.0a2-embeddable-arm64.zip\npython-3.14.0a2-embeddable-win32.zip\npython-3.14.0a2-macos11.pkg\npython-3.14.0a2-macos11.pkg.crt\npython-3.14.0a2-macos11.pkg.sig\npython-3.14.0a2-macos11.pkg.sigstore\npython-3.14.0a2-test-amd64.zip\npython-3.14.0a2-test-arm64.zip\npython-3.14.0a2-test-win32.zip\npython-3.14.0a2-win32.zip\npython-3.14.0a2.exe\npython-3.14.0a2.exe.crt\npython-3.14.0a2.exe.sig\npython-3.14.0a2.exe.sigstore\npython-3.14.0a2.exe.spdx.json\npython-3.14.0a2t-amd64.zip\npython-3.14.0a2t-arm64.zip\npython-3.14.0a2t-win32.zip\npython-3.14.0a3-amd64.exe\npython-3.14.0a3-amd64.exe.crt\npython-3.14.0a3-amd64.exe.sig\npython-3.14.0a3-amd64.exe.sigstore\npython-3.14.0a3-amd64.exe.spdx.json\npython-3.14.0a3-amd64.zip\npython-3.14.0a3-arm64.exe\npython-3.14.0a3-arm64.exe.crt\npython-3.14.0a3-arm64.exe.sig\npython-3.14.0a3-arm64.exe.sigstore\npython-3.14.0a3-arm64.exe.spdx.json\npython-3.14.0a3-arm64.zip\npython-3.14.0a3-embed-amd64.zip\npython-3.14.0a3-embed-amd64.zip.crt\npython-3.14.0a3-embed-amd64.zip.sig\npython-3.14.0a3-embed-amd64.zip.sigstore\npython-3.14.0a3-embed-amd64.zip.spdx.json\npython-3.14.0a3-embed-arm64.zip\npython-3.14.0a3-embed-arm64.zip.crt\npython-3.14.0a3-embed-arm64.zip.sig\npython-3.14.0a3-embed-arm64.zip.sigstore\npython-3.14.0a3-embed-arm64.zip.spdx.json\npython-3.14.0a3-embed-win32.zip\npython-3.14.0a3-embed-win32.zip.crt\npython-3.14.0a3-embed-win32.zip.sig\npython-3.14.0a3-embed-win32.zip.sigstore\npython-3.14.0a3-embed-win32.zip.spdx.json\npython-3.14.0a3-embeddable-amd64.zip\npython-3.14.0a3-embeddable-arm64.zip\npython-3.14.0a3-embeddable-win32.zip\npython-3.14.0a3-macos11.pkg\npython-3.14.0a3-macos11.pkg.crt\npython-3.14.0a3-macos11.pkg.sig\npython-3.14.0a3-macos11.pkg.sigstore\npython-3.14.0a3-test-amd64.zip\npython-3.14.0a3-test-arm64.zip\npython-3.14.0a3-test-win32.zip\npython-3.14.0a3-win32.zip\npython-3.14.0a3.exe\npython-3.14.0a3.exe.crt\npython-3.14.0a3.exe.sig\npython-3.14.0a3.exe.sigstore\npython-3.14.0a3.exe.spdx.json\npython-3.14.0a3t-amd64.zip\npython-3.14.0a3t-arm64.zip\npython-3.14.0a3t-win32.zip\npython-3.14.0a4-amd64.exe\npython-3.14.0a4-amd64.exe.crt\npython-3.14.0a4-amd64.exe.sig\npython-3.14.0a4-amd64.exe.sigstore\npython-3.14.0a4-amd64.exe.spdx.json\npython-3.14.0a4-amd64.zip\npython-3.14.0a4-arm64.exe\npython-3.14.0a4-arm64.exe.crt\npython-3.14.0a4-arm64.exe.sig\npython-3.14.0a4-arm64.exe.sigstore\npython-3.14.0a4-arm64.exe.spdx.json\npython-3.14.0a4-arm64.zip\npython-3.14.0a4-embed-amd64.zip\npython-3.14.0a4-embed-amd64.zip.crt\npython-3.14.0a4-embed-amd64.zip.sig\npython-3.14.0a4-embed-amd64.zip.sigstore\npython-3.14.0a4-embed-amd64.zip.spdx.json\npython-3.14.0a4-embed-arm64.zip\npython-3.14.0a4-embed-arm64.zip.crt\npython-3.14.0a4-embed-arm64.zip.sig\npython-3.14.0a4-embed-arm64.zip.sigstore\npython-3.14.0a4-embed-arm64.zip.spdx.json\npython-3.14.0a4-embed-win32.zip\npython-3.14.0a4-embed-win32.zip.crt\npython-3.14.0a4-embed-win32.zip.sig\npython-3.14.0a4-embed-win32.zip.sigstore\npython-3.14.0a4-embed-win32.zip.spdx.json\npython-3.14.0a4-embeddable-amd64.zip\npython-3.14.0a4-embeddable-arm64.zip\npython-3.14.0a4-embeddable-win32.zip\npython-3.14.0a4-macos11.pkg\npython-3.14.0a4-macos11.pkg.crt\npython-3.14.0a4-macos11.pkg.sig\npython-3.14.0a4-macos11.pkg.sigstore\npython-3.14.0a4-test-amd64.zip\npython-3.14.0a4-test-arm64.zip\npython-3.14.0a4-test-win32.zip\npython-3.14.0a4-win32.zip\npython-3.14.0a4.exe\npython-3.14.0a4.exe.crt\npython-3.14.0a4.exe.sig\npython-3.14.0a4.exe.sigstore\npython-3.14.0a4.exe.spdx.json\npython-3.14.0a4t-amd64.zip\npython-3.14.0a4t-arm64.zip\npython-3.14.0a4t-win32.zip\npython-3.14.0a5-amd64.exe\npython-3.14.0a5-amd64.exe.crt\npython-3.14.0a5-amd64.exe.sig\npython-3.14.0a5-amd64.exe.sigstore\npython-3.14.0a5-amd64.exe.spdx.json\npython-3.14.0a5-arm64.exe\npython-3.14.0a5-arm64.exe.crt\npython-3.14.0a5-arm64.exe.sig\npython-3.14.0a5-arm64.exe.sigstore\npython-3.14.0a5-arm64.exe.spdx.json\npython-3.14.0a5-embed-amd64.zip\npython-3.14.0a5-embed-amd64.zip.crt\npython-3.14.0a5-embed-amd64.zip.sig\npython-3.14.0a5-embed-amd64.zip.sigstore\npython-3.14.0a5-embed-amd64.zip.spdx.json\npython-3.14.0a5-embed-arm64.zip\npython-3.14.0a5-embed-arm64.zip.crt\npython-3.14.0a5-embed-arm64.zip.sig\npython-3.14.0a5-embed-arm64.zip.sigstore\npython-3.14.0a5-embed-arm64.zip.spdx.json\npython-3.14.0a5-embed-win32.zip\npython-3.14.0a5-embed-win32.zip.crt\npython-3.14.0a5-embed-win32.zip.sig\npython-3.14.0a5-embed-win32.zip.sigstore\npython-3.14.0a5-embed-win32.zip.spdx.json\npython-3.14.0a5-macos11.pkg\npython-3.14.0a5-macos11.pkg.crt\npython-3.14.0a5-macos11.pkg.sig\npython-3.14.0a5-macos11.pkg.sigstore\npython-3.14.0a5.exe\npython-3.14.0a5.exe.crt\npython-3.14.0a5.exe.sig\npython-3.14.0a5.exe.sigstore\npython-3.14.0a5.exe.spdx.json\npython-3.14.0a6-amd64.exe\npython-3.14.0a6-amd64.exe.crt\npython-3.14.0a6-amd64.exe.sig\npython-3.14.0a6-amd64.exe.sigstore\npython-3.14.0a6-amd64.exe.spdx.json\npython-3.14.0a6-amd64.zip\npython-3.14.0a6-arm64.exe\npython-3.14.0a6-arm64.exe.crt\npython-3.14.0a6-arm64.exe.sig\npython-3.14.0a6-arm64.exe.sigstore\npython-3.14.0a6-arm64.exe.spdx.json\npython-3.14.0a6-arm64.zip\npython-3.14.0a6-embed-amd64.zip\npython-3.14.0a6-embed-amd64.zip.crt\npython-3.14.0a6-embed-amd64.zip.sig\npython-3.14.0a6-embed-amd64.zip.sigstore\npython-3.14.0a6-embed-amd64.zip.spdx.json\npython-3.14.0a6-embed-arm64.zip\npython-3.14.0a6-embed-arm64.zip.crt\npython-3.14.0a6-embed-arm64.zip.sig\npython-3.14.0a6-embed-arm64.zip.sigstore\npython-3.14.0a6-embed-arm64.zip.spdx.json\npython-3.14.0a6-embed-win32.zip\npython-3.14.0a6-embed-win32.zip.crt\npython-3.14.0a6-embed-win32.zip.sig\npython-3.14.0a6-embed-win32.zip.sigstore\npython-3.14.0a6-embed-win32.zip.spdx.json\npython-3.14.0a6-embeddable-amd64.zip\npython-3.14.0a6-embeddable-arm64.zip\npython-3.14.0a6-embeddable-win32.zip\npython-3.14.0a6-macos11.pkg\npython-3.14.0a6-macos11.pkg.crt\npython-3.14.0a6-macos11.pkg.sig\npython-3.14.0a6-macos11.pkg.sigstore\npython-3.14.0a6-test-amd64.zip\npython-3.14.0a6-test-arm64.zip\npython-3.14.0a6-test-win32.zip\npython-3.14.0a6-win32.zip\npython-3.14.0a6.exe\npython-3.14.0a6.exe.crt\npython-3.14.0a6.exe.sig\npython-3.14.0a6.exe.sigstore\npython-3.14.0a6.exe.spdx.json\npython-3.14.0a6t-amd64.zip\npython-3.14.0a6t-arm64.zip\npython-3.14.0a6t-win32.zip\npython-3.14.0a7-amd64.exe\npython-3.14.0a7-amd64.exe.crt\npython-3.14.0a7-amd64.exe.sig\npython-3.14.0a7-amd64.exe.sigstore\npython-3.14.0a7-amd64.exe.spdx.json\npython-3.14.0a7-amd64.zip\npython-3.14.0a7-arm64.exe\npython-3.14.0a7-arm64.exe.crt\npython-3.14.0a7-arm64.exe.sig\npython-3.14.0a7-arm64.exe.sigstore\npython-3.14.0a7-arm64.exe.spdx.json\npython-3.14.0a7-arm64.zip\npython-3.14.0a7-embed-amd64.zip\npython-3.14.0a7-embed-amd64.zip.crt\npython-3.14.0a7-embed-amd64.zip.sig\npython-3.14.0a7-embed-amd64.zip.sigstore\npython-3.14.0a7-embed-amd64.zip.spdx.json\npython-3.14.0a7-embed-arm64.zip\npython-3.14.0a7-embed-arm64.zip.crt\npython-3.14.0a7-embed-arm64.zip.sig\npython-3.14.0a7-embed-arm64.zip.sigstore\npython-3.14.0a7-embed-arm64.zip.spdx.json\npython-3.14.0a7-embed-win32.zip\npython-3.14.0a7-embed-win32.zip.crt\npython-3.14.0a7-embed-win32.zip.sig\npython-3.14.0a7-embed-win32.zip.sigstore\npython-3.14.0a7-embed-win32.zip.spdx.json\npython-3.14.0a7-embeddable-amd64.zip\npython-3.14.0a7-embeddable-arm64.zip\npython-3.14.0a7-embeddable-win32.zip\npython-3.14.0a7-macos11.pkg\npython-3.14.0a7-macos11.pkg.crt\npython-3.14.0a7-macos11.pkg.sig\npython-3.14.0a7-macos11.pkg.sigstore\npython-3.14.0a7-test-amd64.zip\npython-3.14.0a7-test-arm64.zip\npython-3.14.0a7-test-win32.zip\npython-3.14.0a7-win32.zip\npython-3.14.0a7.exe\npython-3.14.0a7.exe.crt\npython-3.14.0a7.exe.sig\npython-3.14.0a7.exe.sigstore\npython-3.14.0a7.exe.spdx.json\npython-3.14.0a7t-amd64.zip\npython-3.14.0a7t-arm64.zip\npython-3.14.0a7t-win32.zip\npython-3.14.0b1-amd64.exe\npython-3.14.0b1-amd64.exe.crt\npython-3.14.0b1-amd64.exe.sig\npython-3.14.0b1-amd64.exe.sigstore\npython-3.14.0b1-amd64.exe.spdx.json\npython-3.14.0b1-amd64.zip\npython-3.14.0b1-arm64.exe\npython-3.14.0b1-arm64.exe.crt\npython-3.14.0b1-arm64.exe.sig\npython-3.14.0b1-arm64.exe.sigstore\npython-3.14.0b1-arm64.exe.spdx.json\npython-3.14.0b1-arm64.zip\npython-3.14.0b1-embed-amd64.zip\npython-3.14.0b1-embed-amd64.zip.crt\npython-3.14.0b1-embed-amd64.zip.sig\npython-3.14.0b1-embed-amd64.zip.sigstore\npython-3.14.0b1-embed-amd64.zip.spdx.json\npython-3.14.0b1-embed-arm64.zip\npython-3.14.0b1-embed-arm64.zip.crt\npython-3.14.0b1-embed-arm64.zip.sig\npython-3.14.0b1-embed-arm64.zip.sigstore\npython-3.14.0b1-embed-arm64.zip.spdx.json\npython-3.14.0b1-embed-win32.zip\npython-3.14.0b1-embed-win32.zip.crt\npython-3.14.0b1-embed-win32.zip.sig\npython-3.14.0b1-embed-win32.zip.sigstore\npython-3.14.0b1-embed-win32.zip.spdx.json\npython-3.14.0b1-embeddable-amd64.zip\npython-3.14.0b1-embeddable-arm64.zip\npython-3.14.0b1-embeddable-win32.zip\npython-3.14.0b1-macos11.pkg\npython-3.14.0b1-macos11.pkg.crt\npython-3.14.0b1-macos11.pkg.sig\npython-3.14.0b1-macos11.pkg.sigstore\npython-3.14.0b1-test-amd64.zip\npython-3.14.0b1-test-arm64.zip\npython-3.14.0b1-test-win32.zip\npython-3.14.0b1-win32.zip\npython-3.14.0b1.exe\npython-3.14.0b1.exe.crt\npython-3.14.0b1.exe.sig\npython-3.14.0b1.exe.sigstore\npython-3.14.0b1.exe.spdx.json\npython-3.14.0b1t-amd64.zip\npython-3.14.0b1t-arm64.zip\npython-3.14.0b1t-win32.zip\npython-3.14.0b2-amd64.exe\npython-3.14.0b2-amd64.exe.crt\npython-3.14.0b2-amd64.exe.sig\npython-3.14.0b2-amd64.exe.sigstore\npython-3.14.0b2-amd64.exe.spdx.json\npython-3.14.0b2-amd64.zip\npython-3.14.0b2-arm64.exe\npython-3.14.0b2-arm64.exe.crt\npython-3.14.0b2-arm64.exe.sig\npython-3.14.0b2-arm64.exe.sigstore\npython-3.14.0b2-arm64.exe.spdx.json\npython-3.14.0b2-arm64.zip\npython-3.14.0b2-embed-amd64.zip\npython-3.14.0b2-embed-amd64.zip.crt\npython-3.14.0b2-embed-amd64.zip.sig\npython-3.14.0b2-embed-amd64.zip.sigstore\npython-3.14.0b2-embed-amd64.zip.spdx.json\npython-3.14.0b2-embed-arm64.zip\npython-3.14.0b2-embed-arm64.zip.crt\npython-3.14.0b2-embed-arm64.zip.sig\npython-3.14.0b2-embed-arm64.zip.sigstore\npython-3.14.0b2-embed-arm64.zip.spdx.json\npython-3.14.0b2-embed-win32.zip\npython-3.14.0b2-embed-win32.zip.crt\npython-3.14.0b2-embed-win32.zip.sig\npython-3.14.0b2-embed-win32.zip.sigstore\npython-3.14.0b2-embed-win32.zip.spdx.json\npython-3.14.0b2-embeddable-amd64.zip\npython-3.14.0b2-embeddable-arm64.zip\npython-3.14.0b2-embeddable-win32.zip\npython-3.14.0b2-macos11.pkg\npython-3.14.0b2-macos11.pkg.crt\npython-3.14.0b2-macos11.pkg.sig\npython-3.14.0b2-macos11.pkg.sigstore\npython-3.14.0b2-test-amd64.zip\npython-3.14.0b2-test-arm64.zip\npython-3.14.0b2-test-win32.zip\npython-3.14.0b2-win32.zip\npython-3.14.0b2.exe\npython-3.14.0b2.exe.crt\npython-3.14.0b2.exe.sig\npython-3.14.0b2.exe.sigstore\npython-3.14.0b2.exe.spdx.json\npython-3.14.0b2t-amd64.zip\npython-3.14.0b2t-arm64.zip\npython-3.14.0b2t-win32.zip\npython-3.14.0b3-aarch64-linux-android.tar.gz\nPython-3.14.0b3-iOS-XCframework.tar.gz\npython-3.14.0b3-amd64.exe\npython-3.14.0b3-amd64.exe.crt\npython-3.14.0b3-amd64.exe.sig\npython-3.14.0b3-amd64.exe.sigstore\npython-3.14.0b3-amd64.exe.spdx.json\npython-3.14.0b3-amd64.zip\npython-3.14.0b3-arm64.exe\npython-3.14.0b3-arm64.exe.crt\npython-3.14.0b3-arm64.exe.sig\npython-3.14.0b3-arm64.exe.sigstore\npython-3.14.0b3-arm64.exe.spdx.json\npython-3.14.0b3-arm64.zip\npython-3.14.0b3-embed-amd64.zip\npython-3.14.0b3-embed-amd64.zip.crt\npython-3.14.0b3-embed-amd64.zip.sig\npython-3.14.0b3-embed-amd64.zip.sigstore\npython-3.14.0b3-embed-amd64.zip.spdx.json\npython-3.14.0b3-embed-arm64.zip\npython-3.14.0b3-embed-arm64.zip.crt\npython-3.14.0b3-embed-arm64.zip.sig\npython-3.14.0b3-embed-arm64.zip.sigstore\npython-3.14.0b3-embed-arm64.zip.spdx.json\npython-3.14.0b3-embed-win32.zip\npython-3.14.0b3-embed-win32.zip.crt\npython-3.14.0b3-embed-win32.zip.sig\npython-3.14.0b3-embed-win32.zip.sigstore\npython-3.14.0b3-embed-win32.zip.spdx.json\npython-3.14.0b3-embeddable-amd64.zip\npython-3.14.0b3-embeddable-arm64.zip\npython-3.14.0b3-embeddable-win32.zip\npython-3.14.0b3-macos11.pkg\npython-3.14.0b3-macos11.pkg.crt\npython-3.14.0b3-macos11.pkg.sig\npython-3.14.0b3-macos11.pkg.sigstore\npython-3.14.0b3-test-amd64.zip\npython-3.14.0b3-test-arm64.zip\npython-3.14.0b3-test-win32.zip\npython-3.14.0b3-win32.zip\npython-3.14.0b3-x86_64-linux-android.tar.gz\npython-3.14.0b3.exe\npython-3.14.0b3.exe.crt\npython-3.14.0b3.exe.sig\npython-3.14.0b3.exe.sigstore\npython-3.14.0b3.exe.spdx.json\npython-3.14.0b3t-amd64.zip\npython-3.14.0b3t-arm64.zip\npython-3.14.0b3t-win32.zip\nwin32a1/\nwin32a2/\nwin32a3/\nwin32a4/\nwin32a5/\nwin32a6/\nwin32a7/\nwin32b1/\nwin32b2/\nwin32b3/\nwindows-3.14.0a1.json\nwindows-3.14.0a2.json\nwindows-3.14.0a3.json\nwindows-3.14.0a4.json\nwindows-3.14.0a6.json\nwindows-3.14.0a7.json\nwindows-3.14.0b1.json\nwindows-3.14.0b2.json\nwindows-3.14.0b3.json\n"
  },
  {
    "path": "tests/magicdata/Include/internal/pycore_magic_number.h",
    "content": "// copied from cpython bd3d31f380cd451a4ab6da5fbfde463fed95b5b5\n// ...\n\n#ifndef Py_INTERNAL_MAGIC_NUMBER_H\n#define Py_INTERNAL_MAGIC_NUMBER_H\n\n#define PYC_MAGIC_NUMBER 3603\n\n#endif  // !Py_INTERNAL_MAGIC_NUMBER_H\n\n// ...\n"
  },
  {
    "path": "tests/magicdata/Lib/test/test_importlib/test_util.py",
    "content": "# copied from cpython bd3d31f380cd451a4ab6da5fbfde463fed95b5b5\n\n\nclass SomeClass:\n    def some_method(self) -> None:\n        EXPECTED_MAGIC_NUMBER = 3495\n        print(EXPECTED_MAGIC_NUMBER)\n"
  },
  {
    "path": "tests/patchlevel.h",
    "content": "\n/* Python version identification scheme.\n\n   When the major or minor version changes, the VERSION variable in\n   configure.ac must also be changed.\n\n   There is also (independent) API version information in modsupport.h.\n*/\n\n/* Values for PY_RELEASE_LEVEL */\n#define PY_RELEASE_LEVEL_ALPHA  0xA\n#define PY_RELEASE_LEVEL_BETA   0xB\n#define PY_RELEASE_LEVEL_GAMMA  0xC     /* For release candidates */\n#define PY_RELEASE_LEVEL_FINAL  0xF     /* Serial should be 0 here */\n                                        /* Higher for patch releases */\n\n/* Version parsed out into numeric values */\n/*--start constants--*/\n#define PY_MAJOR_VERSION        3\n#define PY_MINOR_VERSION        14\n#define PY_MICRO_VERSION        0\n#define PY_RELEASE_LEVEL        PY_RELEASE_LEVEL_ALPHA\n#define PY_RELEASE_SERIAL       1\n\n/* Version as a string */\n#define PY_VERSION              \"3.14.0a1+\"\n/*--end constants--*/\n\n/* Version as a single 4-byte hex number, e.g. 0x010502B2 == 1.5.2b2.\n   Use this for numeric comparisons, e.g. #if PY_VERSION_HEX >= ... */\n#define PY_VERSION_HEX ((PY_MAJOR_VERSION << 24) | \\\n                        (PY_MINOR_VERSION << 16) | \\\n                        (PY_MICRO_VERSION <<  8) | \\\n                        (PY_RELEASE_LEVEL <<  4) | \\\n                        (PY_RELEASE_SERIAL << 0))\n"
  },
  {
    "path": "tests/sbom/sbom-with-pip-removed.json",
    "content": "{\n  \"SPDXID\": \"SPDXRef-DOCUMENT\",\n  \"name\": \"CPython SBOM\",\n  \"spdxVersion\": \"SPDX-2.3\",\n  \"dataLicense\": \"CC0-1.0\",\n  \"documentNamespace\": \"https://www.python.org/ftp/python/3.13.0/fake-artifact.txt.spdx.json\",\n  \"creationInfo\": {\n    \"created\": \"2024-10-15T20:11:52Z\",\n    \"creators\": [],\n    \"licenseListVersion\": \"3.22\"\n  },\n  \"files\": [],\n  \"packages\": [],\n  \"relationships\": [\n    {\n      \"relatedSpdxElement\": \"SPDXRef-FILE-Modules-expat-COPYING\",\n      \"relationshipType\": \"CONTAINS\",\n      \"spdxElementId\": \"SPDXRef-PACKAGE-expat\"\n    }\n  ]\n}\n"
  },
  {
    "path": "tests/sbom/sbom-with-pip.json",
    "content": "{\n  \"SPDXID\": \"SPDXRef-DOCUMENT\",\n  \"name\": \"CPython SBOM\",\n  \"spdxVersion\": \"SPDX-2.3\",\n  \"dataLicense\": \"CC0-1.0\",\n  \"documentNamespace\": \"https://www.python.org/ftp/python/3.13.0/fake-artifact.txt.spdx.json\",\n  \"creationInfo\": {\n    \"created\": \"2024-10-15T20:11:52Z\",\n    \"creators\": [],\n    \"licenseListVersion\": \"3.22\"\n  },\n  \"files\": [],\n  \"packages\": [\n    {\n      \"SPDXID\": \"SPDXRef-PACKAGE-pip\",\n      \"name\": \"pip\",\n      \"versionInfo\": \"24.0\",\n      \"licenseConcluded\": \"MIT\",\n      \"originator\": \"Organization: Python Software Foundation\",\n      \"supplier\": \"Organization: Python Software Foundation\",\n      \"packageFileName\": \"pip-24.0-py3-none-any.whl\",\n      \"externalRefs\": [\n        {\n          \"referenceCategory\": \"SECURITY\",\n          \"referenceLocator\": \"cpe:2.3:a:pypa:pip:24.0:*:*:*:*:*:*:*\",\n          \"referenceType\": \"cpe23Type\"\n        }\n      ],\n      \"primaryPackagePurpose\": \"RUNTIME\",\n      \"downloadLocation\": \"https://files.pythonhosted.org/packages/.../pip-24.0-py3-none-any.whl\",\n      \"checksums\": [\n        {\n          \"algorithm\": \"SHA256\",\n          \"checksumValue\": \"ba0d021a166865d2265246961bec0152ff124de910c5cc39f1156ce3fa7c69dc\"\n        }\n      ]\n    }\n  ],\n  \"relationships\": [\n    {\n      \"relatedSpdxElement\": \"SPDXRef-FILE-Modules-expat-COPYING\",\n      \"relationshipType\": \"CONTAINS\",\n      \"spdxElementId\": \"SPDXRef-PACKAGE-expat\"\n    },\n    {\n      \"relatedSpdxElement\": \"SPDXRef-PACKAGE-urllib3\",\n      \"relationshipType\": \"DEPENDS_ON\",\n      \"spdxElementId\": \"SPDXRef-PACKAGE-pip\"\n    },\n    {\n      \"relatedSpdxElement\": \"SPDXRef-PACKAGE-pip\",\n      \"relationshipType\": \"DEPENDS_ON\",\n      \"spdxElementId\": \"SPDXRef-PACKAGE-cpython\"\n    }\n  ]\n}\n"
  },
  {
    "path": "tests/test_add_to_pydotorg.py",
    "content": "import os\nfrom pathlib import Path\n\nimport pytest\nfrom pyfakefs.fake_filesystem import FakeFilesystem\n\nos.environ[\"AUTH_INFO\"] = \"test_username:test_api_key\"\n\nimport add_to_pydotorg\n\n\n@pytest.mark.parametrize(\n    [\"release\", \"expected\"],\n    [\n        (\"3.9.0a0\", \"390-a0\"),\n        (\"3.10.0b3\", \"3100-b3\"),\n        (\"3.11.0rc2\", \"3110-rc2\"),\n        (\"3.12.15\", \"31215\"),\n    ],\n)\ndef test_slug_for(release: str, expected: str) -> None:\n    assert add_to_pydotorg.slug_for(release) == expected\n\n\ndef test_sigfile_for() -> None:\n    assert (\n        add_to_pydotorg.sigfile_for(\"3.14.0\", \"Python-3.13.0.tgz\")\n        == \"https://www.python.org/ftp/python/3.14.0/Python-3.13.0.tgz.asc\"\n    )\n\n\n@pytest.mark.parametrize(\n    [\"text\", \"expected\"],\n    [\n        (\"3.9.0a0\", \"390a0\"),\n        (\"3.10.0b3\", \"3100b3\"),\n        (\"3.11.0rc2\", \"3110rc2\"),\n        (\"3.12.15\", \"31215\"),\n        (\"Hello, world!\", \"Hello-world\"),\n    ],\n)\ndef test_make_slug(text: str, expected: str) -> None:\n    assert add_to_pydotorg.make_slug(text) == expected\n\n\ndef test_build_file_dict(tmp_path: Path) -> None:\n    release = \"3.14.0rc2\"\n    release_url = \"https://www.python.org/ftp/python/3.14.0\"\n    release_dir = tmp_path / \"3.14.0\"\n    release_dir.mkdir()\n\n    rfile = \"test-artifact.txt\"\n    (release_dir / rfile).write_text(\"Hello world\")\n    (release_dir / f\"{rfile}.sigstore\").touch()\n\n    assert add_to_pydotorg.build_file_dict(\n        str(tmp_path),\n        release,\n        rfile,\n        12,\n        \"Test artifact\",\n        34,\n        True,\n        \"Test description\",\n    ) == {\n        \"name\": \"Test artifact\",\n        \"slug\": \"3140-rc2-Test-artifact\",\n        \"os\": \"/api/v1/downloads/os/34/\",\n        \"release\": \"/api/v1/downloads/release/12/\",\n        \"description\": \"Test description\",\n        \"is_source\": False,\n        \"url\": f\"{release_url}/test-artifact.txt\",\n        \"sha256_sum\": \"64ec88ca00b268e5ba1a35678a1b5316d212f4f366b2477232534a8aeca37f3c\",\n        \"filesize\": 11,\n        \"download_button\": True,\n        \"sigstore_bundle_file\": f\"{release_url}/test-artifact.txt.sigstore\",\n    }\n\n\n@pytest.mark.parametrize(\n    [\"release\", \"expected\"],\n    [\n        (\"3.9.0a0\", \"3.9.0\"),\n        (\"3.10.0b3\", \"3.10.0\"),\n        (\"3.11.0rc2\", \"3.11.0\"),\n        (\"3.12.15\", \"3.12.15\"),\n    ],\n)\ndef test_base_version(release: str, expected: str) -> None:\n    assert add_to_pydotorg.base_version(release) == expected\n\n\n@pytest.mark.parametrize(\n    [\"release\", \"expected\"],\n    [\n        (\"3.9.0a0\", (3, 9, 0)),\n        (\"3.10.0b3\", (3, 10, 0)),\n        (\"3.11.0rc2\", (3, 11, 0)),\n        (\"3.12.15\", (3, 12, 15)),\n    ],\n)\ndef test_base_version_tuple(release: str, expected: tuple[int, int, int]) -> None:\n    assert add_to_pydotorg.base_version_tuple(release) == expected\n\n\n@pytest.mark.parametrize(\n    [\"release\", \"expected\"],\n    [\n        (\"3.9.0a0\", \"3.9\"),\n        (\"3.10.0b3\", \"3.10\"),\n        (\"3.11.0rc2\", \"3.11\"),\n        (\"3.12.15\", \"3.12\"),\n    ],\n)\ndef test_minor_version(release: str, expected: str) -> None:\n    assert add_to_pydotorg.minor_version(release) == expected\n\n\n@pytest.mark.parametrize(\n    [\"release\", \"expected\"],\n    [\n        (\"3.9.0a0\", (3, 9)),\n        (\"3.10.0b3\", (3, 10)),\n        (\"3.11.0rc2\", (3, 11)),\n        (\"3.12.15\", (3, 12)),\n    ],\n)\ndef test_minor_version_tuple(release: str, expected: tuple[int, int]) -> None:\n    assert add_to_pydotorg.minor_version_tuple(release) == expected\n\n\n@pytest.mark.parametrize(\n    [\"release\", \"expected\"],\n    [\n        ((3, 13, 0), \"for macOS 10.13 and later\"),\n        ((3, 14, 0), \"for macOS 10.15 and later\"),\n    ],\n)\ndef test_macos_description(release: tuple[int, int, int], expected: str) -> None:\n    assert add_to_pydotorg.macos_description(release) == expected\n\n\ndef test_list_files(fs: FakeFilesystem) -> None:\n    # Arrange\n    fake_ftp_root = \"/fake_ftp_root\"\n    fs.add_real_file(\"tests/fake-ftp-files.txt\")\n    fake_files = Path(\"tests/fake-ftp-files.txt\").read_text().splitlines()\n    for fn in fake_files:\n        if fn.startswith(\"#\"):  # comment\n            continue\n\n        file_path = Path(fake_ftp_root) / \"3.14.0\" / fn\n        if fn.endswith(\"/\"):\n            fs.create_dir(file_path)\n        else:\n            fs.create_file(file_path)\n\n    # Act\n    files = list(add_to_pydotorg.list_files(fake_ftp_root, \"3.14.0b3\"))\n\n    # Assert\n    assert files == [\n        (\n            \"Python-3.14.0b3-iOS-XCframework.tar.gz\",\n            \"iOS XCframework\",\n            \"ios\",\n            False,\n            \"\",\n        ),\n        (\"Python-3.14.0b3.tar.xz\", \"XZ compressed source tarball\", \"source\", True, \"\"),\n        (\"Python-3.14.0b3.tgz\", \"Gzipped source tarball\", \"source\", False, \"\"),\n        (\n            \"python-3.14.0b3-aarch64-linux-android.tar.gz\",\n            \"Android embeddable package (aarch64)\",\n            \"android\",\n            False,\n            \"\",\n        ),\n        (\n            \"python-3.14.0b3-amd64.exe\",\n            \"Windows installer (64-bit)\",\n            \"windows\",\n            True,\n            \"Recommended\",\n        ),\n        (\n            \"python-3.14.0b3-arm64.exe\",\n            \"Windows installer (ARM64)\",\n            \"windows\",\n            False,\n            \"Experimental\",\n        ),\n        (\n            \"python-3.14.0b3-embed-amd64.zip\",\n            \"Windows embeddable package (64-bit)\",\n            \"windows\",\n            False,\n            \"\",\n        ),\n        (\n            \"python-3.14.0b3-embed-arm64.zip\",\n            \"Windows embeddable package (ARM64)\",\n            \"windows\",\n            False,\n            \"\",\n        ),\n        (\n            \"python-3.14.0b3-embed-win32.zip\",\n            \"Windows embeddable package (32-bit)\",\n            \"windows\",\n            False,\n            \"\",\n        ),\n        (\n            \"python-3.14.0b3-macos11.pkg\",\n            \"macOS installer\",\n            \"macos\",\n            True,\n            \"for macOS 10.15 and later\",\n        ),\n        (\n            \"python-3.14.0b3-x86_64-linux-android.tar.gz\",\n            \"Android embeddable package (x86_64)\",\n            \"android\",\n            False,\n            \"\",\n        ),\n        (\"python-3.14.0b3.exe\", \"Windows installer (32-bit)\", \"windows\", False, \"\"),\n        (\n            \"windows-3.14.0b3.json\",\n            \"Windows release manifest\",\n            \"windows\",\n            False,\n            \"Install with 'py install 3.14'\",\n        ),\n    ]\n"
  },
  {
    "path": "tests/test_buildbotapi.py",
    "content": "from functools import cache\nfrom unittest.mock import AsyncMock\n\nimport aiohttp\nimport pytest\n\nimport buildbotapi\n\n\ndef test_builder_class() -> None:\n    # Arrange / Act\n    builder = buildbotapi.Builder(\n        builderid=123,\n        description=\"my description\",\n        name=\"my name\",\n        tags=[\"tag1\", \"tag2\"],\n    )\n\n    # Assert\n    assert builder.builderid == 123\n    assert builder.description == \"my description\"\n    assert builder.name == \"my name\"\n    assert builder.tags == [\"tag1\", \"tag2\"]\n    assert hash(builder) == 123\n\n\n@cache\ndef load(filename: str) -> str:\n    with open(filename) as f:\n        return f.read()\n\n\n@pytest.mark.asyncio\nasync def test_buildbotapi_authenticate() -> None:\n    # Arrange\n    async with AsyncMock(aiohttp.ClientSession) as mock_session:\n        api = buildbotapi.BuildBotAPI(mock_session)\n\n        # Act\n        await api.authenticate(token=\"\")\n\n    # Assert\n    mock_session.get.assert_called_with(\n        \"https://buildbot.python.org/all/auth/login\", params={\"token\": \"\"}\n    )\n\n\n@pytest.mark.asyncio\nasync def test_buildbotapi_all_builders() -> None:\n    # Arrange\n    mock_session = AsyncMock(aiohttp.ClientSession)\n    mock_session.get.return_value.__aenter__.return_value.status = 200\n    mock_session.get.return_value.__aenter__.return_value.text.return_value = load(\n        \"tests/buildbotapi/builders.json\"\n    )\n    api = buildbotapi.BuildBotAPI(mock_session)\n\n    # Act\n    all_builders = await api.all_builders()\n\n    # Assert\n    mock_session.get.assert_called_with(\n        \"https://buildbot.python.org/all/api/v2/builders\"\n    )\n    assert len(all_builders) == 2\n    assert all_builders[3].name == \"AMD64 RHEL8 LTO 3.13\"\n    assert all_builders[1623].name == \"AMD64 Windows PGO NoGIL PR\"\n\n\n@pytest.mark.asyncio\nasync def test_buildbotapi_all_builders_with_branch() -> None:\n    # Arrange\n    mock_session = AsyncMock(aiohttp.ClientSession)\n    mock_session.get.return_value.__aenter__.return_value.status = 200\n    mock_session.get.return_value.__aenter__.return_value.text.return_value = load(\n        \"tests/buildbotapi/builders.json\"\n    )\n    api = buildbotapi.BuildBotAPI(mock_session)\n\n    # Act\n    await api.all_builders(branch=\"3.13\")\n\n    # Assert\n    mock_session.get.assert_called_with(\n        \"https://buildbot.python.org/all/api/v2/builders?tags__contains=3.13\"\n    )\n\n\n@pytest.mark.asyncio\nasync def test_buildbotapi_stable_builders() -> None:\n    # Arrange\n    mock_session = AsyncMock(aiohttp.ClientSession)\n    mock_session.get.return_value.__aenter__.return_value.status = 200\n    mock_session.get.return_value.__aenter__.return_value.text.return_value = load(\n        \"tests/buildbotapi/builders.json\"\n    )\n    api = buildbotapi.BuildBotAPI(mock_session)\n\n    # Act\n    all_builders = await api.stable_builders()\n\n    # Assert\n    mock_session.get.assert_called_with(\n        \"https://buildbot.python.org/all/api/v2/builders\"\n    )\n    assert len(all_builders) == 1\n    assert all_builders[3].name == \"AMD64 RHEL8 LTO 3.13\"\n    assert \"stable\" in all_builders[3].tags\n\n\n@pytest.mark.asyncio\n@pytest.mark.parametrize(\n    [\"json_data\", \"expected\"],\n    [\n        (\"tests/buildbotapi/success.json\", False),\n        (\"tests/buildbotapi/failure.json\", True),\n        (\"tests/buildbotapi/no-builds.json\", False),\n    ],\n)\nasync def test_buildbotapi_is_builder_failing_currently_yes(\n    json_data: str, expected: bool\n) -> None:\n    # Arrange\n    mock_session = AsyncMock(aiohttp.ClientSession)\n    mock_session.get.return_value.__aenter__.return_value.status = 200\n    mock_session.get.return_value.__aenter__.return_value.text.return_value = load(\n        json_data\n    )\n    api = buildbotapi.BuildBotAPI(mock_session)\n    builder = buildbotapi.Builder(builderid=3)\n\n    # Act\n    failing = await api.is_builder_failing_currently(builder=builder)\n\n    # Assert\n    mock_session.get.assert_called_with(\n        \"https://buildbot.python.org/all/api/v2/builds?complete__eq=true\"\n        \"&&builderid__eq=3&&order=-complete_at&&limit=1\"\n    )\n    assert failing is expected\n"
  },
  {
    "path": "tests/test_release.py",
    "content": "from pathlib import Path\nfrom typing import cast\n\nimport pytest\nfrom pytest_mock import MockerFixture\n\nimport release\n\n\n@pytest.mark.parametrize(\n    [\"test_editor\", \"expected\"],\n    [\n        (\"vim\", [\"vim\", \"README.rst\"]),\n        (\"bbedit --wait\", [\"bbedit\", \"--wait\", \"README.rst\"]),\n    ],\n)\ndef test_manual_edit(\n    mocker: MockerFixture,\n    monkeypatch: pytest.MonkeyPatch,\n    test_editor: str,\n    expected: list[str],\n) -> None:\n    # Arrange\n    monkeypatch.setenv(\"EDITOR\", test_editor)\n    mock_run_cmd = mocker.patch(\"release.run_cmd\")\n\n    # Act\n    release.manual_edit(\"README.rst\")\n\n    # Assert\n    mock_run_cmd.assert_called_once_with(expected)\n\n\ndef test_task(mocker: MockerFixture) -> None:\n    # Arrange\n    db = {\"mock\": \"mock\"}\n    my_task = mocker.Mock()\n    task = release.Task(my_task, \"My task\")\n\n    # Act\n    task(cast(release.ReleaseShelf, db))\n\n    # Assert\n    assert task.description == \"My task\"\n    assert task.function == my_task\n    my_task.assert_called_once_with(cast(release.ReleaseShelf, db))\n\n\n@pytest.mark.parametrize(\n    [\"test_inputs\", \"expected\"],\n    [\n        ([\"yes\"], True),\n        ([\"no\"], False),\n        ([\"maybe\", \"yes\"], True),\n        ([\"maybe\", \"no\"], False),\n        ([\"\", \"nope\", \"y\", \"yes\"], True),\n        ([\"\", \"nope\", \"n\", \"no\"], False),\n    ],\n)\ndef test_ask_question(\n    mocker: MockerFixture,\n    capsys: pytest.CaptureFixture[str],\n    test_inputs: list[str],\n    expected: bool,\n) -> None:\n    # Arrange\n    mocker.patch(\"release.input\", side_effect=test_inputs)\n\n    # Act\n    result = release.ask_question(\"Do you want to proceed?\")\n\n    # Assert\n    assert result is expected\n    captured = capsys.readouterr()\n    assert \"Do you want to proceed?\" in captured.out\n    # All inputs except the last are invalid\n    invalid_count = len(test_inputs) - 1\n    assert captured.out.count(\"Please enter yes or no.\") == invalid_count\n\n\ndef test_tweak_patchlevel(tmp_path: Path) -> None:\n    # Arrange\n    tag = release.Tag(\"3.14.0b2\")\n\n    original_patchlevel_file = Path(__file__).parent / \"patchlevel.h\"\n    patchlevel_file = tmp_path / \"patchlevel.h\"\n    patchlevel_file.write_text(original_patchlevel_file.read_text())\n\n    # Act\n    release.tweak_patchlevel(tag, filename=str(patchlevel_file))\n\n    # Assert\n    new_contents = patchlevel_file.read_text()\n    for expected in (\n        \"#define PY_MAJOR_VERSION        3\",\n        \"#define PY_MINOR_VERSION        14\",\n        \"#define PY_MICRO_VERSION        0\",\n        \"#define PY_RELEASE_LEVEL        PY_RELEASE_LEVEL_BETA\",\n        \"#define PY_RELEASE_SERIAL       2\",\n        '#define PY_VERSION              \"3.14.0b2\"',\n    ):\n        assert expected in new_contents\n\n\n@pytest.mark.parametrize(\n    [\n        \"test_tag\",\n        \"expected_version\",\n        \"expected_underline\",\n        \"expected_whatsnew\",\n        \"expected_docs\",\n        \"expected_pep_line\",\n    ],\n    [\n        (\n            \"3.14.0a6\",\n            \"This is Python version 3.14.0 alpha 6\",\n            \"=====================================\",\n            \"3.14 <https://docs.python.org/3.14/whatsnew/3.14.html>`_\",\n            \"`Documentation for Python 3.14 <https://docs.python.org/3.14/>`_\",\n            \"`PEP 745 <https://peps.python.org/pep-0745/>`__ for Python 3.14\",\n        ),\n        (\n            \"3.14.0b2\",\n            \"This is Python version 3.14.0 beta 2\",\n            \"====================================\",\n            \"3.14 <https://docs.python.org/3.14/whatsnew/3.14.html>`_\",\n            \"`Documentation for Python 3.14 <https://docs.python.org/3.14/>`_\",\n            \"`PEP 745 <https://peps.python.org/pep-0745/>`__ for Python 3.14\",\n        ),\n        (\n            \"3.14.0rc2\",\n            \"This is Python version 3.14.0 release candidate 2\",\n            \"=================================================\",\n            \"3.14 <https://docs.python.org/3.14/whatsnew/3.14.html>`_\",\n            \"`Documentation for Python 3.14 <https://docs.python.org/3.14/>`_\",\n            \"`PEP 745 <https://peps.python.org/pep-0745/>`__ for Python 3.14\",\n        ),\n        (\n            \"3.15.1\",\n            \"This is Python version 3.15.1\",\n            \"=============================\",\n            \"3.15 <https://docs.python.org/3.15/whatsnew/3.15.html>`_\",\n            \"`Documentation for Python 3.15 <https://docs.python.org/3.15/>`_\",\n            \"`PEP 790 <https://peps.python.org/pep-0790/>`__ for Python 3.15\",\n        ),\n    ],\n)\ndef test_tweak_readme(\n    tmp_path: Path,\n    test_tag: str,\n    expected_version: str,\n    expected_underline: str,\n    expected_whatsnew: str,\n    expected_docs: str,\n    expected_pep_line: str,\n) -> None:\n    # Arrange\n    tag = release.Tag(test_tag)\n\n    original_readme_file = Path(__file__).parent / \"README.rst\"\n    original_contents = original_readme_file.read_text()\n    readme_file = tmp_path / \"README.rst\"\n    readme_file.write_text(original_contents)\n\n    # Act\n    release.tweak_readme(tag, filename=str(readme_file))\n\n    # Assert\n    new_contents = readme_file.read_text()\n    new_lines = new_contents.split(\"\\n\")\n    assert new_lines[0] == expected_version\n    assert new_lines[1] == expected_underline\n    assert expected_whatsnew in new_contents\n    assert expected_docs in new_contents\n    assert expected_pep_line in new_contents\n    assert original_contents.endswith(\"\\n\")\n    assert new_contents.endswith(\"\\n\")\n"
  },
  {
    "path": "tests/test_release_tag.py",
    "content": "import io\nfrom subprocess import CompletedProcess\n\nimport pytest\nfrom pytest_mock import MockerFixture\n\nimport release\n\n\ndef test_tag() -> None:\n    # Arrange\n    tag_name = \"3.12.2\"\n\n    # Act\n    tag = release.Tag(tag_name)\n\n    # Assert\n    assert str(tag) == \"3.12.2\"\n    assert str(tag.next_minor_release()) == \"3.13.0a0\"\n    assert tag.as_tuple() == (3, 12, 2, \"f\", 0)\n    assert tag.branch == \"3.12\"\n    assert tag.gitname == \"v3.12.2\"\n    assert tag.long_name == \"3.12.2\"\n    assert tag.is_alpha_release is False\n    assert tag.is_feature_freeze_release is False\n    assert tag.is_release_candidate is False\n    assert tag.nickname == \"3122\"\n    assert tag.normalized() == \"3.12.2\"\n\n\ndef test_tag_phase() -> None:\n    # Arrange\n    alpha = release.Tag(\"3.13.0a7\")\n    beta1 = release.Tag(\"3.13.0b1\")\n    beta4 = release.Tag(\"3.13.0b4\")\n    rc = release.Tag(\"3.13.0rc3\")\n    final = release.Tag(\"3.13.0\")\n\n    # Act / Assert\n    assert alpha.is_alpha_release is True\n    assert alpha.is_feature_freeze_release is False\n    assert alpha.is_release_candidate is False\n    assert alpha.is_final is False\n    assert alpha.branch == \"main\"\n\n    assert beta1.is_alpha_release is False\n    assert beta1.is_feature_freeze_release is True\n    assert beta1.is_release_candidate is False\n    assert beta1.is_final is False\n    assert beta1.branch == \"main\"\n\n    assert beta4.is_alpha_release is False\n    assert beta4.is_feature_freeze_release is False\n    assert beta4.is_release_candidate is False\n    assert beta4.is_final is False\n    assert beta4.branch == \"3.13\"\n\n    assert rc.is_alpha_release is False\n    assert rc.is_feature_freeze_release is False\n    assert rc.is_release_candidate is True\n    assert rc.is_final is False\n    assert rc.branch == \"3.13\"\n\n    assert final.is_alpha_release is False\n    assert final.is_feature_freeze_release is False\n    assert final.is_release_candidate is False\n    assert final.is_final is True\n    assert final.branch == \"3.13\"\n\n\ndef test_tag_committed_at_not_found() -> None:\n    # Arrange\n    tag = release.Tag(\"3.12.2\")\n\n    # Act / Assert\n    with pytest.raises(SystemExit):\n        tag.committed_at()\n\n\ndef test_tag_committed(mocker: MockerFixture) -> None:\n    # Arrange\n    tag = release.Tag(\"3.12.2\")\n\n    proc = CompletedProcess([], 0)\n    proc.stdout = b\"1707250784\"\n    mocker.patch(\"subprocess.run\", return_value=proc)\n\n    # Act / Assert\n    assert str(tag.committed_at) == \"2024-02-06 20:19:44+00:00\"\n\n\ndef test_tag_dot(mocker: MockerFixture) -> None:\n    # Arrange\n    tag_name = \".\"\n    mocker.patch(\"os.getcwd\", return_value=\"/path/to/3.12.2\")\n\n    # Act\n    tag = release.Tag(tag_name)\n\n    # Assert\n    assert str(tag) == \"3.12.2\"\n\n\ndef test_tag_invalid() -> None:\n    # Arrange\n    tag_name = \"bleep\"\n\n    # Act / Assert\n    with pytest.raises(SystemExit):\n        release.Tag(tag_name)\n\n\ndef test_tag_docs_attributes() -> None:\n    # Arrange\n    alpha = release.Tag(\"3.13.0a7\")\n    beta = release.Tag(\"3.13.0b1\")\n    rc = release.Tag(\"3.13.0rc3\")\n    final_zero = release.Tag(\"3.13.0\")\n    final_3 = release.Tag(\"3.13.3\")\n\n    # Act / Assert\n    assert alpha.includes_docs is False\n    assert beta.includes_docs is False\n    assert rc.includes_docs is True\n    assert final_zero.includes_docs is True\n    assert final_3.includes_docs is True\n\n    assert alpha.doc_version == \"3.13\"\n    assert beta.doc_version == \"3.13\"\n    assert rc.doc_version == \"3.13\"\n    assert final_zero.doc_version == \"3.13\"\n    assert final_3.doc_version == \"3.13.3\"\n\n\ndef test_tag_long_name() -> None:\n    # Arrange\n    alpha = release.Tag(\"3.13.0a7\")\n    beta = release.Tag(\"3.13.0b1\")\n    rc = release.Tag(\"3.13.0rc3\")\n    final_zero = release.Tag(\"3.13.0\")\n    final_3 = release.Tag(\"3.13.3\")\n\n    # Act / Assert\n    assert alpha.long_name == \"3.13.0 alpha 7\"\n    assert beta.long_name == \"3.13.0 beta 1\"\n    assert rc.long_name == \"3.13.0 release candidate 3\"\n    assert final_zero.long_name == \"3.13.0\"\n    assert final_3.long_name == \"3.13.3\"\n\n\n@pytest.mark.parametrize(\n    [\"version\", \"expected\"],\n    [\n        (\"3.12.10\", True),\n        (\"3.13.3\", False),\n    ],\n)\ndef test_tag_is_security_release(\n    version: str, expected: str, mocker: MockerFixture\n) -> None:\n    # Arrange\n    mock_response = b\"\"\"\n    {\n        \"3.13\": {\n            \"status\": \"bugfix\"\n        },\n        \"3.12\": {\n            \"status\": \"security\"\n        }\n    }\n    \"\"\"\n    mocker.patch(\"urllib.request.urlopen\", return_value=io.BytesIO(mock_response))\n\n    # Act\n    tag = release.Tag(version)\n\n    # Assert\n    assert tag.is_security_release is expected\n"
  },
  {
    "path": "tests/test_run_release.py",
    "content": "import builtins\nimport contextlib\nimport io\nimport tarfile\nfrom contextlib import nullcontext as does_not_raise\nfrom pathlib import Path\nfrom typing import cast\n\nimport pytest\n\nimport run_release\nfrom release import ReleaseShelf, Tag\nfrom run_release import ReleaseException\n\n\n@pytest.mark.parametrize(\n    \"version\",\n    [\"sigstore 3.6.2\", \"sigstore 3.6.6\"],\n)\ndef test_check_sigstore_version_success(version) -> None:\n    # Verify runs with no exceptions\n    run_release.check_sigstore_version(version)\n\n\n@pytest.mark.parametrize(\n    \"version\",\n    [\"sigstore 3.4.0\", \"sigstore 3.6.0\", \"sigstore 4.0.0\", \"\"],\n)\ndef test_check_sigstore_version_exception(version) -> None:\n    with pytest.raises(\n        ReleaseException, match=\"Sigstore version not detected or not valid\"\n    ):\n        run_release.check_sigstore_version(version)\n\n\n@pytest.mark.parametrize(\n    [\"url\", \"expected\"],\n    [\n        (\"github.com/hugovk/cpython.git\", \"hugovk\"),\n        (\"git@github.com:hugovk/cpython.git\", \"hugovk\"),\n        (\"https://github.com/hugovk/cpython.git\", \"hugovk\"),\n    ],\n)\ndef test_extract_github_owner(url: str, expected: str) -> None:\n    assert run_release.extract_github_owner(url) == expected\n\n\ndef test_invalid_extract_github_owner() -> None:\n    with pytest.raises(\n        ReleaseException,\n        match=\"Could not parse GitHub owner from 'origin' remote URL: \"\n        \"https://example.com\",\n    ):\n        run_release.extract_github_owner(\"https://example.com\")\n\n\n@pytest.mark.parametrize(\n    [\"release_tag\", \"git_current_branch\", \"expectation\"],\n    [\n        # Success cases\n        (\"3.15.0rc1\", \"3.15\\n\", does_not_raise()),\n        (\"3.15.0b3\", \"3.15\\n\", does_not_raise()),\n        (\"3.15.0b2\", \"3.15\\n\", does_not_raise()),\n        (\"3.15.0b1\", \"main\\n\", does_not_raise()),\n        (\"3.15.0a6\", \"main\\n\", does_not_raise()),\n        (\"3.14.3\", \"3.14\\n\", does_not_raise()),\n        (\"3.13.12\", \"3.13\\n\", does_not_raise()),\n        # Failure cases\n        (\n            \"3.15.0rc1\",\n            \"main\\n\",\n            pytest.raises(ReleaseException, match=\"on main branch, expected 3.15\"),\n        ),\n        (\n            \"3.15.0b1\",\n            \"3.15\\n\",\n            pytest.raises(ReleaseException, match=\"on 3.15 branch, expected main\"),\n        ),\n        (\n            \"3.15.0a6\",\n            \"3.14\\n\",\n            pytest.raises(ReleaseException, match=\"on 3.14 branch, expected main\"),\n        ),\n        (\n            \"3.14.3\",\n            \"main\\n\",\n            pytest.raises(ReleaseException, match=\"on main branch, expected 3.14\"),\n        ),\n    ],\n)\ndef test_check_cpython_repo_branch(\n    monkeypatch, release_tag: str, git_current_branch: str, expectation\n) -> None:\n    # Arrange\n    db = {\"release\": Tag(release_tag), \"git_repo\": \"/fake/repo\"}\n    monkeypatch.setattr(\n        run_release.subprocess,\n        \"check_output\",\n        lambda *args, **kwargs: git_current_branch,\n    )\n\n    # Act / Assert\n    with expectation:\n        run_release.check_cpython_repo_branch(cast(ReleaseShelf, db))\n\n\n@pytest.mark.parametrize(\n    [\"age_seconds\", \"user_continues\", \"expectation\"],\n    [\n        # Recent repo (< 1 day) - no question asked\n        (3600, None, does_not_raise()),\n        # Old repo (> 1 day) + user says yes\n        (90000, True, does_not_raise()),\n        # Old repo (> 1 day) + user says no\n        (90000, False, pytest.raises(ReleaseException, match=\"repository is old\")),\n    ],\n)\ndef test_check_cpython_repo_age(\n    monkeypatch, age_seconds: int, user_continues: bool | None, expectation\n) -> None:\n    # Arrange\n    db = {\"release\": Tag(\"3.15.0a6\"), \"git_repo\": \"/fake/repo\"}\n    current_time = 1700000000\n    commit_timestamp = current_time - age_seconds\n\n    def fake_check_output(cmd, **kwargs):\n        cmd_str = \" \".join(cmd)\n        if \"%ct\" in cmd_str:\n            return f\"{commit_timestamp}\\n\"\n        if \"%cr\" in cmd_str:\n            return \"some time ago\\n\"\n        return \"\"\n\n    monkeypatch.setattr(run_release.subprocess, \"check_output\", fake_check_output)\n    monkeypatch.setattr(run_release.time, \"time\", lambda: current_time)\n    if user_continues is not None:\n        monkeypatch.setattr(run_release, \"ask_question\", lambda _: user_continues)\n\n    # Act / Assert\n    with expectation:\n        run_release.check_cpython_repo_age(cast(ReleaseShelf, db))\n\n\ndef test_check_magic_number() -> None:\n    db = {\n        \"release\": Tag(\"3.14.0rc1\"),\n        \"git_repo\": str(Path(__file__).parent / \"magicdata\"),\n    }\n    with pytest.raises(ReleaseException, match=\"Magic numbers in .* don't match\"):\n        run_release.check_magic_number(cast(ReleaseShelf, db))\n\n\ndef prepare_fake_docs(tmp_path: Path, content: str) -> None:\n    docs_path = tmp_path / \"3.13.0rc1/docs\"\n    docs_path.mkdir(parents=True)\n    tarball = tarfile.open(docs_path / \"python-3.13.0rc1-docs-html.tar.bz2\", \"w:bz2\")\n    with tarball:\n        tarinfo = tarfile.TarInfo(\"index.html\")\n        tarinfo.size = len(content)\n        tarball.addfile(tarinfo, io.BytesIO(content.encode()))\n\n\n@contextlib.contextmanager\ndef fake_answers(monkeypatch: pytest.MonkeyPatch, answers: list[str]) -> None:\n    \"\"\"Monkey-patch input() to give the given answers. All must be consumed.\"\"\"\n\n    answers_left = list(answers)\n\n    def fake_input(question):\n        print(question, \"--\", answers_left[0])\n        return answers_left.pop(0)\n\n    with monkeypatch.context() as ctx:\n        ctx.setattr(builtins, \"input\", fake_input)\n        yield\n    assert answers_left == []\n\n\ndef test_check_doc_unreleased_version_no_file(tmp_path: Path) -> None:\n    db = {\n        \"release\": Tag(\"3.13.0rc1\"),\n        \"git_repo\": str(tmp_path),\n    }\n    with pytest.raises(AssertionError):\n        # There should be a docs artefact available\n        run_release.check_doc_unreleased_version(cast(ReleaseShelf, db))\n\n\ndef test_check_doc_unreleased_version_no_file_alpha(tmp_path: Path) -> None:\n    db = {\n        \"release\": Tag(\"3.13.0a1\"),\n        \"git_repo\": str(tmp_path),\n    }\n    # No docs artefact needed for alphas\n    run_release.check_doc_unreleased_version(cast(ReleaseShelf, db))\n\n\ndef test_check_doc_unreleased_version_ok(tmp_path: Path) -> None:\n    prepare_fake_docs(\n        tmp_path,\n        \"<div>New in 3.13</div>\",\n    )\n    db = {\n        \"release\": Tag(\"3.13.0rc1\"),\n        \"git_repo\": str(tmp_path),\n    }\n    run_release.check_doc_unreleased_version(cast(ReleaseShelf, db))\n\n\ndef test_check_doc_unreleased_version_not_ok(monkeypatch, tmp_path: Path) -> None:\n    prepare_fake_docs(\n        tmp_path,\n        \"<div>New in 3.13.0rc1 (unreleased)</div>\",\n    )\n    db = {\n        \"release\": Tag(\"3.13.0rc1\"),\n        \"git_repo\": str(tmp_path),\n    }\n    with fake_answers(monkeypatch, [\"no\"]), pytest.raises(AssertionError):\n        run_release.check_doc_unreleased_version(cast(ReleaseShelf, db))\n\n\ndef test_check_doc_unreleased_version_waived(monkeypatch, tmp_path: Path) -> None:\n    prepare_fake_docs(\n        tmp_path,\n        \"<div>New in 3.13.0rc1 (unreleased)</div>\",\n    )\n    db = {\n        \"release\": Tag(\"3.13.0rc1\"),\n        \"git_repo\": str(tmp_path),\n    }\n    with fake_answers(monkeypatch, [\"yes\"]):\n        run_release.check_doc_unreleased_version(cast(ReleaseShelf, db))\n\n\ndef test_update_whatsnew_toctree(tmp_path: Path) -> None:\n    # Arrange\n    # Only first beta triggers update\n    db = {\"release\": Tag(\"3.14.0b1\")}\n\n    original_toctree_file = Path(__file__).parent / \"whatsnew_index.rst\"\n    toctree__file = tmp_path / \"patchlevel.h\"\n    toctree__file.write_text(original_toctree_file.read_text())\n\n    # Act\n    run_release.update_whatsnew_toctree(cast(ReleaseShelf, db), str(toctree__file))\n\n    # Assert\n    new_contents = toctree__file.read_text()\n    assert \"   3.15.rst\\n   3.14.rst\\n\" in new_contents\n"
  },
  {
    "path": "tests/test_sbom.py",
    "content": "import hashlib\nimport json\nimport pathlib\nimport random\nimport re\nimport unittest.mock\nfrom pathlib import Path\n\nimport pytest\n\nimport sbom\n\n\n@pytest.mark.parametrize(\n    [\"value\", \"expected\"],\n    [\n        (\"abc\", \"abc\"),\n        (\"path/name\", \"path-name\"),\n        (\"SPDXRef-PACKAGE-pip\", \"SPDXRef-PACKAGE-pip\"),\n        (\"SPDXRef-PACKAGE-cpython\", \"SPDXRef-PACKAGE-cpython\"),\n        (\"SPDXRef-PACKAGE-urllib3\", \"SPDXRef-PACKAGE-urllib3\"),\n    ],\n)\ndef test_spdx_id(value: str, expected: str) -> None:\n    assert sbom.spdx_id(value) == expected\n    # Check we get the same value next time\n    assert sbom.spdx_id(value) == expected\n\n\ndef test_spdx_id_collisions():\n    sbom._SPDX_IDS_TO_VALUES = {}  # Reset the cache.\n    assert (\n        sbom.spdx_id(\"SPDXRef-FILE-Lib/collections.py\")\n        == \"SPDXRef-FILE-Lib-collections.py\"\n    )\n    assert (\n        sbom.spdx_id(\"SPDXRef-FILE-Lib/_collections.py\")\n        == \"SPDXRef-FILE-Lib-collections.py-fc43043d\"\n    )\n\n\n@pytest.mark.parametrize(\n    [\"package_sha1s\", \"package_verification_code\"],\n    [\n        # No files -> empty SHA1\n        ([], hashlib.sha1().hexdigest()),\n        # One file -> SHA1(SHA1(file))\n        ([\"F\" * 40], hashlib.sha1(b\"f\" * 40).hexdigest()),\n        # Tests ordering and lowercasing of SHA1s\n        (\n            [\"0\" * 40, \"e\" * 40, \"F\" * 40],\n            hashlib.sha1((b\"0\" * 40) + (b\"e\" * 40) + (b\"f\" * 40)).hexdigest(),\n        ),\n    ],\n)\ndef test_calculate_package_verification_code(package_sha1s, package_verification_code):\n    # Randomize because PackageVerificationCode is deterministic.\n    random.shuffle(package_sha1s)\n\n    input_sbom = {\n        \"files\": [\n            {\n                \"SPDXID\": f\"SPDXRef-FILE-{package_sha1}\",\n                \"checksums\": [{\"algorithm\": \"SHA1\", \"checksumValue\": package_sha1}],\n            }\n            for package_sha1 in package_sha1s\n        ],\n        \"packages\": [{\"SPDXID\": \"SPDXRef-PACKAGE\", \"filesAnalyzed\": True}],\n        \"relationships\": [\n            {\n                \"spdxElementId\": \"SPDXRef-PACKAGE\",\n                \"relatedSpdxElement\": f\"SPDXRef-FILE-{package_sha1}\",\n                \"relationshipType\": \"CONTAINS\",\n            }\n            for package_sha1 in package_sha1s\n        ],\n    }\n\n    sbom.calculate_package_verification_codes(input_sbom)\n\n    assert input_sbom[\"packages\"][0][\"packageVerificationCode\"] == {\n        \"packageVerificationCodeValue\": package_verification_code\n    }\n\n\ndef test_normalization():\n    # Test that arbitrary JSON data can be normalized.\n    # Normalization doesn't have to make too much sense,\n    # only needs to be reproducible.\n    data = {\n        \"a\": [1, 2, 3, {\"b\": [4, \"c\", [7, True, \"2\", {}]]}],\n        # This line tests that inner structures are sorted first.\n        \"b\": [[1, 2, \"b\"], [2, 1, \"a\"]],\n    }\n    sbom.normalize_sbom_data(data)\n    assert data == {\n        \"a\": [1, 2, 3, {\"b\": [\"c\", 4, [\"2\", 7, True, {}]]}],\n        \"b\": [[\"a\", 1, 2], [\"b\", 1, 2]],\n    }\n\n\ndef test_fetch_project_metadata_from_pypi(mocker):\n    mock_urlopen = mocker.patch(\"sbom.urlopen\")\n    mock_urlopen.return_value = unittest.mock.Mock()\n\n    # This is only a partial response using the information\n    # that this function uses.\n    mock_urlopen.return_value.read.return_value = json.dumps(\n        {\n            \"urls\": [\n                {\n                    \"digests\": {\n                        \"blake2b_256\": \"94596638090c25e9bc4ce0c42817b5a234e183872a1129735a9330c472cc2056\",\n                        \"sha256\": \"ea9bd1a847e8c5774a5777bb398c19e80bcd4e2aa16a4b301b718fe6f593aba2\",\n                    },\n                    \"filename\": \"pip-24.0.tar.gz\",\n                    \"packagetype\": \"sdist\",\n                    \"url\": \"https://files.pythonhosted.org/packages/.../pip-24.0.tar.gz\",\n                },\n                {\n                    \"digests\": {\n                        \"blake2b_256\": \"8a6a19e9fe04fca059ccf770861c7d5721ab4c2aebc539889e97c7977528a53b\",\n                        \"sha256\": \"ba0d021a166865d2265246961bec0152ff124de910c5cc39f1156ce3fa7c69dc\",\n                    },\n                    \"filename\": \"pip-24.0-py3-none-any.whl\",\n                    \"packagetype\": \"bdist_wheel\",\n                    \"url\": \"https://files.pythonhosted.org/packages/.../pip-24.0-py3-none-any.whl\",\n                },\n            ]\n        }\n    ).encode()\n\n    # Default filename is the wheel\n    download_url, checksum_sha256 = sbom.fetch_package_metadata_from_pypi(\n        project=\"pip\",\n        version=\"24.0\",\n    )\n\n    mock_urlopen.assert_called_once_with(\"https://pypi.org/pypi/pip/24.0/json\")\n    assert (\n        download_url\n        == \"https://files.pythonhosted.org/packages/.../pip-24.0-py3-none-any.whl\"\n    )\n    assert (\n        checksum_sha256\n        == \"ba0d021a166865d2265246961bec0152ff124de910c5cc39f1156ce3fa7c69dc\"\n    )\n\n    # If we ask for the sdist (which we don't do normally)\n    # then it'll be returned instead.\n    download_url, checksum_sha256 = sbom.fetch_package_metadata_from_pypi(\n        project=\"pip\", version=\"24.0\", filename=\"pip-24.0.tar.gz\"\n    )\n\n    assert download_url == \"https://files.pythonhosted.org/packages/.../pip-24.0.tar.gz\"\n    assert (\n        checksum_sha256\n        == \"ea9bd1a847e8c5774a5777bb398c19e80bcd4e2aa16a4b301b718fe6f593aba2\"\n    )\n\n\ndef test_remove_pip_from_sbom() -> None:\n    # Arrange\n    with (Path(__file__).parent / \"sbom\" / \"sbom-with-pip.json\").open() as f:\n        sbom_data = json.load(f)\n    with (Path(__file__).parent / \"sbom\" / \"sbom-with-pip-removed.json\").open() as f:\n        expected = json.load(f)\n\n    # Act\n    sbom.remove_pip_from_sbom(sbom_data)\n\n    # Assert\n    assert sbom_data == expected\n\n\ndef test_create_cpython_sbom():\n    sbom_data = {\"packages\": []}\n\n    artifact_path = str(pathlib.Path(__file__).parent / \"fake-artifact.txt\")\n    sbom.create_cpython_sbom(\n        sbom_data, cpython_version=\"3.13.0\", artifact_path=artifact_path\n    )\n\n    assert re.fullmatch(\n        r\"^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$\",\n        sbom_data[\"creationInfo\"].pop(\"created\"),\n    )\n    assert re.fullmatch(\n        r\"^Tool: ReleaseTools-[a-f0-9]+$\", sbom_data[\"creationInfo\"][\"creators\"].pop(1)\n    )\n\n    assert sbom_data == {\n        \"packages\": [\n            {\n                \"SPDXID\": \"SPDXRef-PACKAGE-cpython\",\n                \"name\": \"CPython\",\n                \"versionInfo\": \"3.13.0\",\n                \"licenseConcluded\": \"PSF-2.0\",\n                \"originator\": \"Organization: Python Software Foundation\",\n                \"supplier\": \"Organization: Python Software Foundation\",\n                \"packageFileName\": \"fake-artifact.txt\",\n                \"externalRefs\": [\n                    {\n                        \"referenceCategory\": \"SECURITY\",\n                        \"referenceLocator\": \"cpe:2.3:a:python:python:3.13.0:*:*:*:*:*:*:*\",\n                        \"referenceType\": \"cpe23Type\",\n                    }\n                ],\n                \"primaryPackagePurpose\": \"SOURCE\",\n                \"downloadLocation\": \"https://www.python.org/ftp/python/3.13.0/fake-artifact.txt\",\n                \"checksums\": [\n                    {\n                        \"algorithm\": \"SHA256\",\n                        \"checksumValue\": \"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\",\n                    }\n                ],\n            }\n        ],\n        \"SPDXID\": \"SPDXRef-DOCUMENT\",\n        \"spdxVersion\": \"SPDX-2.3\",\n        \"name\": \"CPython SBOM\",\n        \"dataLicense\": \"CC0-1.0\",\n        \"documentNamespace\": \"https://www.python.org/ftp/python/3.13.0/fake-artifact.txt.spdx.json\",\n        \"creationInfo\": {\n            \"creators\": [\n                \"Person: Python Release Managers\",\n            ],\n            \"licenseListVersion\": \"3.22\",\n        },\n    }\n\n\n@pytest.mark.parametrize(\n    [\"cpython_version\", \"download_location\"],\n    [\n        (\"3.13.0\", \"https://www.python.org/ftp/python/3.13.0/fake-artifact.txt\"),\n        (\"3.11.0a1\", \"https://www.python.org/ftp/python/3.11.0/fake-artifact.txt\"),\n        (\"3.12.0b2\", \"https://www.python.org/ftp/python/3.12.0/fake-artifact.txt\"),\n        (\"3.13.0rc3\", \"https://www.python.org/ftp/python/3.13.0/fake-artifact.txt\"),\n    ],\n)\ndef test_create_cpython_sbom_pre_release_download_location(\n    cpython_version, download_location\n):\n    sbom_data = {\"packages\": []}\n\n    artifact_path = str(pathlib.Path(__file__).parent / \"fake-artifact.txt\")\n    sbom.create_cpython_sbom(\n        sbom_data, cpython_version=cpython_version, artifact_path=artifact_path\n    )\n\n    assert sbom_data[\"packages\"][0][\"downloadLocation\"] == download_location\n"
  },
  {
    "path": "tests/test_select_jobs.py",
    "content": "import sys\nfrom textwrap import dedent\n\nimport pytest\n\nimport select_jobs\n\n\n@pytest.mark.parametrize(\n    (\"version\", \"docs\", \"android\", \"ios\"),\n    [\n        (\"3.13.0a1\", \"false\", \"false\", \"false\"),\n        (\"3.13.0rc1\", \"true\", \"false\", \"false\"),\n        (\"3.13.0\", \"true\", \"false\", \"false\"),\n        (\"3.13.1\", \"true\", \"false\", \"false\"),\n        (\"3.14.0b2\", \"false\", \"true\", \"false\"),\n        (\"3.14.0rc1\", \"true\", \"true\", \"false\"),\n        (\"3.14.0\", \"true\", \"true\", \"false\"),\n        (\"3.14.1\", \"true\", \"true\", \"false\"),\n        (\"3.15.0a1\", \"false\", \"true\", \"true\"),\n        (\"3.15.0\", \"true\", \"true\", \"true\"),\n    ],\n)\ndef test_select_jobs(\n    version: str,\n    docs: str,\n    android: str,\n    ios: str,\n    monkeypatch: pytest.MonkeyPatch,\n    capsys: pytest.CaptureFixture[str],\n) -> None:\n    monkeypatch.setattr(sys, \"argv\", [\"select_jobs.py\", version])\n    select_jobs.main()\n    assert capsys.readouterr().out == dedent(\n        f\"\"\"\\\n            docs={docs}\n            android={android}\n            ios={ios}\n        \"\"\"\n    )\n\n\n@pytest.mark.parametrize(\n    \"version\",\n    [\n        \"3.13.0a1\",\n        \"3.13.0\",\n        \"3.14.0b2\",\n        \"3.15.0a1\",\n    ],\n)\ndef test_select_jobs_test_mode(\n    version: str,\n    monkeypatch: pytest.MonkeyPatch,\n    capsys: pytest.CaptureFixture[str],\n) -> None:\n    monkeypatch.setattr(sys, \"argv\", [\"select_jobs.py\", \"--test\", version])\n    select_jobs.main()\n    assert capsys.readouterr().out == dedent(\n        \"\"\"\\\n            docs=true\n            android=true\n            ios=true\n        \"\"\"\n    )\n"
  },
  {
    "path": "tests/test_update_version_next.py",
    "content": "\"\"\"Tests for the update_version_next tool.\"\"\"\n\nfrom pathlib import Path\n\nimport update_version_next\n\nTO_CHANGE = \"\"\"\nDirectives to change\n--------------------\n\nHere, all occurrences of NEXT (lowercase) should be changed:\n\n.. versionadded:: next\n\n.. versionchanged:: next\n\n.. deprecated:: next\n\n.. deprecated-removed:: next 4.0\n\nwhitespace:\n\n..   versionchanged:: next\n\n.. versionchanged  :: next\n\n    .. versionadded:: next\n\narguments:\n\n.. versionadded:: next\n    Foo bar\n\n.. versionadded:: next as ``previousname``\n\"\"\"\n\nUNCHANGED = \"\"\"\nUnchanged\n---------\n\nHere, the word \"next\" should NOT be changed:\n\n.. versionchanged:: NEXT\n\n..versionchanged:: NEXT\n\n... versionchanged:: next\n\nfoo .. versionchanged:: next\n\n.. otherdirective:: next\n\n.. VERSIONCHANGED: next\n\n.. deprecated-removed: 3.0 next\n\"\"\"\n\nEXPECTED_CHANGED = TO_CHANGE.replace(\"next\", \"VER\")\n\n\ndef test_freeze_simple_script(tmp_path: Path) -> None:\n    p = tmp_path.joinpath\n\n    p(\"source.rst\").write_text(TO_CHANGE + UNCHANGED)\n    p(\"subdir\").mkdir()\n    p(\"subdir/change.rst\").write_text(\".. versionadded:: next\")\n    p(\"subdir/keep.not-rst\").write_text(\".. versionadded:: next\")\n    p(\"subdir/keep.rst\").write_text(\"nothing to see here\")\n    args = [\"VER\", str(tmp_path)]\n    update_version_next.main(args)\n    assert p(\"source.rst\").read_text() == EXPECTED_CHANGED + UNCHANGED\n    assert p(\"subdir/change.rst\").read_text() == \".. versionadded:: VER\"\n    assert p(\"subdir/keep.not-rst\").read_text() == \".. versionadded:: next\"\n    assert p(\"subdir/keep.rst\").read_text() == \"nothing to see here\"\n"
  },
  {
    "path": "tests/whatsnew_index.rst",
    "content": ".. _whatsnew-index:\n\n######################\n What's New in Python\n######################\n\nThe \"What's New in Python\" series of essays takes tours through the most\nimportant changes between major Python versions.  They are a \"must read\" for\nanyone wishing to stay up-to-date after a new release.\n\n.. toctree::\n   :maxdepth: 2\n\n   3.14.rst\n   3.13.rst\n   3.12.rst\n   3.11.rst\n   3.10.rst\n   3.9.rst\n   3.8.rst\n   3.7.rst\n   3.6.rst\n   3.5.rst\n   3.4.rst\n   3.3.rst\n   3.2.rst\n   3.1.rst\n   3.0.rst\n   2.7.rst\n   2.6.rst\n   2.5.rst\n   2.4.rst\n   2.3.rst\n   2.2.rst\n   2.1.rst\n   2.0.rst\n\nThe \"Changelog\" is an HTML version of the :pypi:`file built<blurb>`\nfrom the contents of the\n:source:`Misc/NEWS.d` directory tree, which contains *all* nontrivial changes\nto Python for the current version.\n\n.. toctree::\n   :maxdepth: 2\n\n   changelog.rst\n"
  },
  {
    "path": "tox.ini",
    "content": "[tox]\nrequires =\n    tox>=4.2\nenv_list =\n    lint\n    py{314, 313, 312}\n\n[testenv]\nskip_install = true\ndeps =\n    -r dev-requirements.txt\n    -r requirements.txt\ncommands =\n    {envpython} -m pytest -vv \\\n      tests/ \\\n      --cov . \\\n      --cov tests \\\n      --cov-report html \\\n      --cov-report term \\\n      --cov-report xml \\\n      {posargs}\n\n[testenv:lint]\nskip_install = true\ndeps =\n    pre-commit\npass_env =\n    PRE_COMMIT_COLOR\ncommands =\n    pre-commit run --all-files --show-diff-on-failure\n\n[testenv:mypy]\nskip_install = true\ndeps =\n    -r mypy-requirements.txt\ncommands =\n    mypy . {posargs}\n"
  },
  {
    "path": "update_version_next.py",
    "content": "#!/usr/bin/env python3\n\"\"\"\nReplace `.. versionchanged:: next` lines in docs files by the given version.\n\nRun this at release time to replace `next` with the just-released version\nin the sources.\n\nNo backups are made; add/commit to Git before running the script.\n\nApplies to all the VersionChange directives. For deprecated-removed, only\nhandle the first argument (deprecation version, not the removal version).\n\n\"\"\"\n\nimport argparse\nimport re\nimport sys\nfrom pathlib import Path\n\nDIRECTIVE_RE = re.compile(\n    r\"\"\"\n        (?P<before>\n            \\s*\\.\\.\\s+\n            (version(added|changed|removed)|deprecated(-removed)?)\n            \\s*::\\s*\n        )\n        next\n        (?P<after>\n            .*\n        )\n    \"\"\",\n    re.VERBOSE | re.DOTALL,\n)\n\nparser = argparse.ArgumentParser(\n    description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter\n)\nparser.add_argument(\n    \"version\",\n    help='String to replace \"next\" with. Usually `x.y`, but can be anything.',\n)\nparser.add_argument(\n    \"directory\",\n    type=Path,\n    help=\"Directory to process\",\n)\nparser.add_argument(\n    \"--verbose\",\n    \"-v\",\n    action=\"count\",\n    default=0,\n    help=\"Increase verbosity. Can be repeated (`-vv`).\",\n)\n\n\ndef main(argv: list[str]) -> None:\n    args = parser.parse_args(argv)\n    version = args.version\n    if args.verbose:\n        print(\n            f'Updating \"next\" versions in {args.directory} to {version!r}',\n            file=sys.stderr,\n        )\n    for path in Path(args.directory).glob(\"**/*.rst\"):\n        num_changed_lines = 0\n        lines = []\n        with open(path, encoding=\"utf-8\") as file:\n            for lineno, line in enumerate(file, start=1):\n                try:\n                    if match := DIRECTIVE_RE.fullmatch(line):\n                        line = match[\"before\"] + version + match[\"after\"]\n                        num_changed_lines += 1\n                    lines.append(line)\n                except Exception as exc:\n                    exc.add_note(f\"processing line {path}:{lineno}\")\n                    raise\n        if num_changed_lines:\n            if args.verbose:\n                s = \"\" if num_changed_lines == 1 else \"s\"\n                print(\n                    f\"Updating file {path} ({num_changed_lines} change{s})\",\n                    file=sys.stderr,\n                )\n            with open(path, \"w\", encoding=\"utf-8\") as file:\n                file.writelines(lines)\n        else:\n            if args.verbose > 1:\n                print(f\"Unchanged file {path}\", file=sys.stderr)\n\n\nif __name__ == \"__main__\":\n    main(sys.argv[1:])\n"
  },
  {
    "path": "windows-release/README.md",
    "content": "# Windows Releases\n\nThis build script is used for official releases of CPython on Windows.\nIt is somewhat more complex than `Tools/msi/buildrelease.bat` because it uses additional parallelism\nand uses our official code signing certificate.\n\nThis script is designed to be run on Azure Pipelines.\nInformation about the syntax can be found at https://docs.microsoft.com/azure/devops/pipelines/\n\nThe current deployment is at https://dev.azure.com/Python/cpython/_build?definitionId=21\nChances are you don't have permission to do anything other than view builds. Access is controlled by the release team.\n\nIf you do have permission, you can launch a release build by selecting **Run pipeline**,\nspecify the desired **Git remote** and **Git tag**, enable **Publish release**,\ntoggle any version specific options, and click **Run**.\n\nThe version specific options are required due to changes in our build that require modifications\nto the publish pipeline. For example, whether to publish ARM64 binaries.\n\nWhen signing is enabled (any value besides \"Unsigned\"), authorised approvers will be notified and\nwill need to approve each stage that requires the signing certificate (typically three).\nThis helps prevent \"surprise\" builds from using the official certificate.\n\nSome additional points to be aware of:\n\n* packages are not automatically published to the Microsoft Store\n* successful builds should be retained by selecting \"Retain\" under the \"...\" menu in the top-right\n\nThe `msixupload` artifacts should be uploaded to the Microsoft Store at\nhttps://partner.microsoft.com/en-us/dashboard/apps-and-games/overview.\nAccess to this site is very limited.\nWe also usually update the screenshots so that the version information they show matches the release.\n\nAzure DevOps no longer has a per-pipeline option for retention,\nand so the only way to permanently retain a build is to manually select the \"Retain\" option.\nWithout this, the build records will be lost after 30 days.\n\n## Finding/updating certificates\n\nFor code signing, we use [Azure Trusted Signing](https://learn.microsoft.com/en-us/azure/trusted-signing/overview).\nThis service belongs to the PSF's Azure subscription and is paid for on a monthly basis.\nWhen we send files for signing, it uploads a manifest (hash) of the file rather than the file itself,\nand then receives a signature that can be embedded into the target file.\n\nAuthentication to Azure currently uses an [Entra app registration](https://learn.microsoft.com/en-us/entra/identity-platform/quickstart-register-app)\nrather than OIDC (which is available, and may be switched to in future).\nThe authentication details are stored as private variables in a Variable group called CPythonSign.\nReferencing this variable group is what triggers approvals during the build.\nThe group is at https://dev.azure.com/Python/cpython/_library?itemType=VariableGroups&view=VariableGroupView&variableGroupId=1&path=CPythonSign\n\nThe five variables in the Variable Group identify the Entra ID\n with access,\nand the name of the certificate to use.\n\n* `TrustedSigningClientId` - the \"Application (client) ID\" of the App registration\n* `TrustedSigningTenantId` - the \"Directory (tenant) ID\" of the App registration\n* `TrustedSigningSecret` - the current \"Client secret\" of the App registration\n* `TrustedSigningUri` - the endpoint of the Trusted Signing service (provided by Azure)\n* `TrustedSigningAccount` - the name of our Trusted Signing account, \"pythondev\". This is not a secret\n* `TrustedSigningCertificateName` - the name of our certificate profile. This is not a secret\n\nCertificates are renewed daily,\nand as such it is no longer useful to reference the \"thumbprint\" (SHA1 hash) of the certificate.\nInstead, to trust all of our releases in restricted scenarios,\nyou need to first trust one of the certificates in the certification path\nand then check for EKU `1.3.6.1.4.1.311.97.608394634.79987812.305991749.578777327`,\nwhich represents our signing account,\nor Subject `CN=Python Software Foundation,O=Python Software Foundation,L=Beaverton,S=Oregon,C=US`.\n\nTODO: Reference/link to documentation on verifying certificates with tools.\n\nNote that regular signing checks (such as `signtool.exe verify /pa python.exe`)\nand malware scans will treat the files as correctly signed.\nIt's only more complicated to verify that it was signed _specifically_ with our cert.\n\n(Further documentation to be added as we find out what ought to be documented.)\n"
  },
  {
    "path": "windows-release/acquire-vcruntime.yml",
    "content": "parameters:\n  Remote: https://github.com/python/cpython-bin-deps\n  Ref: vcruntime\n\nsteps:\n- powershell: |\n    git clone --progress -v --depth 1 --branch ${{ parameters.Ref }} --single-branch ${{ parameters.Remote }} vcruntime\n    $files = (dir \"vcruntime\\$(arch)\\*.dll\").FullName -join \";\"\n    \"##vso[task.setvariable variable=VCRuntimeDLL]$files\"\n  displayName: 'Import VC redist'\n"
  },
  {
    "path": "windows-release/azure-pipelines.yml",
    "content": "name: Release_$(SourceTag)_$(Date:yyyyMMdd)$(Rev:.rr)\n\nparameters:\n- name: GitRemote\n  displayName: \"Git remote\"\n  type: string\n  default: python\n  values:\n  - 'python'\n  - 'savannahostrowski'\n  - 'hugovk'\n  - 'Yhg1s'\n  - 'pablogsal'\n  - 'ambv'\n  - 'zooba'\n  - '(Other)'\n- name: GitRemote_Other\n  displayName: \"If Other, specify Git remote\"\n  type: string\n  default: 'python'\n- name: SourceTag\n  displayName: \"Git tag\"\n  type: string\n  default: main\n- name: SourceCommit\n  displayName: \"Git commit ('empty' to disable commit SHA check)\"\n  type: string\n  default: 'empty'\n- name: DoPublish\n  displayName: \"Publish release\"\n  type: boolean\n  default: false\n- name: SigningCertificate\n  displayName: \"Code signing certificate\"\n  type: string\n  default: 'PythonSoftwareFoundation'\n  values:\n  - 'PythonSoftwareFoundation'\n  - 'TestSign'\n  - 'Unsigned'\n- name: SigningDescription\n  displayName: \"Signature description\"\n  type: string\n  default: '(default)'\n- name: Post315OutputDir\n  displayName: \"Separate free-threaded outputs (3.15 and later)\"\n  type: boolean\n  default: false\n- name: DoTailCalling\n  displayName: \"Build with tail-calling support (3.15 and later)\"\n  type: boolean\n  default: false\n- name: DoJIT\n  displayName: \"Build the JIT compiler (3.14 and later)\"\n  type: boolean\n  default: true\n- name: DoGPG\n  displayName: \"Include GPG signatures (3.13 and earlier)\"\n  type: boolean\n  default: false\n- name: DoFreethreaded\n  displayName: \"Include free-threaded builds\"\n  type: boolean\n  default: true\n- name: DoARM64\n  displayName: \"Publish ARM64 build\"\n  type: boolean\n  default: true\n- name: DoPGO\n  displayName: \"Run PGO\"\n  type: boolean\n  default: true\n- name: DoPGOARM64\n  displayName: \"Run ARM64 PGO\"\n  type: boolean\n  default: true\n- name: DoLayout\n  displayName: \"Produce full layout artifact\"\n  type: boolean\n  default: true\n- name: DoMSIX\n  displayName: \"Produce Store packages (3.13 and earlier)\"\n  type: boolean\n  default: false\n- name: DoNuget\n  displayName: \"Produce Nuget packages\"\n  type: boolean\n  default: true\n- name: DoEmbed\n  displayName: \"Produce embeddable package (w/ PyManager or MSI options)\"\n  type: boolean\n  default: true\n- name: DoMSI\n  displayName: \"Produce EXE/MSI installer\"\n  type: boolean\n  default: true\n- name: TestMSI\n  displayName: \"Run EXE/MSI installer tests\"\n  type: boolean\n  default: true\n- name: DoPyManager\n  displayName: \"Produce PyManager package\"\n  type: boolean\n  default: true\n- name: BuildToPublish\n  displayName: \"Republish a build (select in Resources)\"\n  type: string\n  default: current\n  values: ['current', 'build_to_publish']\n- name: BuildToPackage\n  displayName: \"Repackage and publish a build (select in Resources)\"\n  type: string\n  default: current\n  values: ['current', 'build_to_package']\n- name: SignNuget\n  displayName: \"Enable Nuget signing (not recommended right now)\"\n  type: boolean\n  default: false\n- name: DoJITEnabled\n  displayName: \"Enable the JIT compiler by default (not used yet)\"\n  type: boolean\n  default: false\n- name: DoJITFreethreaded\n  displayName: \"Build the JIT compiler for free-threaded builds (not used yet)\"\n  type: boolean\n  default: false\n- name: vmImage\n  displayName: \"VM Image\"\n  type: string\n  default: windows-2025\n\nresources:\n  pipelines:\n  - pipeline: build_to_publish\n    source: 'Windows-Release'\n  - pipeline: build_to_package\n    source: 'Windows-Release'\n\nvariables:\n  ${{ if ne(parameters.GitRemote, '(Other)') }}:\n    GitRemote: ${{ parameters.GitRemote }}\n  ${{ else }}:\n    GitRemote: ${{ parameters.GitRemote_Other }}\n  SourceTag: ${{ parameters.SourceTag }}\n  ${{ if ne(parameters.SourceCommit, 'empty') }}:\n    SourceCommit: ${{ parameters.SourceCommit }}\n  ${{ if eq(parameters.SigningCertificate, 'PythonSoftwareFoundation') }}:\n    IsRealSigned: true\n  ${{ else }}:\n    IsRealSigned: false\n  ${{ if ne(parameters.SigningDescription, '(default)') }}:\n    SigningDescription: ${{ parameters.SigningDescription }}\n  ${{ else }}:\n    SigningDescription: ''\n  PublishARM64: ${{ parameters.DoARM64 }}\n# QUEUE TIME VARIABLES\n#  OverrideNugetVersion: ''\n#  PyManagerIndexFilename: ''\n#  SkipNugetPublish: ''\n#  SkipPipTests: ''\n#  SkipPythonOrgPublish: ''\n#  SkipSBOM: ''\n#  SkipTests: ''\n#  SkipTkTests: ''\n\ntrigger: none\npr: none\n\nstages:\n- ${{ if and(eq(parameters.BuildToPublish, 'current'), eq(parameters.BuildToPackage, 'current')) }}:\n  - stage: Build\n    displayName: Build binaries\n    pool:\n      vmImage: ${{ iif(eq(parameters.DoTailCalling, 'true'), 'windows-2025-vs2026', parameters.vmImage) }}\n    jobs:\n    - template: start-arm64vm.yml\n      parameters:\n        DoARM64: ${{ parameters.DoARM64 }}\n        DoPGOARM64: ${{ parameters.DoPGOARM64 }}\n    - template: stage-build.yml\n      parameters:\n        DoFreethreaded: ${{ parameters.DoFreethreaded }}\n        DoPGO: ${{ parameters.DoPGO }}\n        DoPGOARM64: ${{ parameters.DoPGOARM64 }}\n        ${{ if and(parameters.SigningCertificate, ne(parameters.SigningCertificate, 'Unsigned')) }}:\n          ToBeSigned: true\n        ${{ if ne(parameters.DoJIT, 'true') }}:\n          ExtraOptions: ''\n        ${{ elseif ne(parameters.DoJITEnabled, 'true') }}:\n          ExtraOptions: '--experimental-jit-off'\n        ${{ else }}:\n          ExtraOptions: '--experimental-jit'\n        ${{ if or(ne(parameters.DoJIT, 'true'), ne(parameters.DoJITFreethreaded, 'true')) }}:\n          ExtraOptionsFreethreaded: '--disable-gil'\n        ${{ elseif ne(parameters.DoJITEnabled, 'true') }}:\n          ExtraOptionsFreethreaded: '--disable-gil --experimental-jit-off'\n        ${{ else }}:\n          ExtraOptionsFreethreaded: '--disable-gil --experimental-jit'\n        ${{ if ne(parameters.DoTailCalling, 'true') }}:\n          TailCallingOption: ''\n        ${{ else }}:\n          TailCallingOption: '--tail-call-interp'\n        Post315OutputDir: ${{ parameters.Post315OutputDir }}\n\n  - stage: Sign\n    displayName: Sign binaries\n    dependsOn: Build\n    pool:\n      vmImage: ${{ parameters.vmImage }}\n    jobs:\n    - template: stage-sign.yml\n      parameters:\n        SigningCertificate: ${{ parameters.SigningCertificate }}\n        DoFreethreaded: ${{ parameters.DoFreethreaded }}\n\n- ${{ if eq(parameters.BuildToPublish, 'current') }}:\n  - stage: Layout\n    ${{ if eq(parameters.BuildToPackage, 'current') }}:\n      displayName: Generate layouts\n      dependsOn: Sign\n    ${{ else }}:\n      displayName: Generate layouts from prior build\n      dependsOn: []\n    pool:\n      vmImage: ${{ parameters.vmImage }}\n    jobs:\n    - template: stage-layout-full.yml\n      parameters:\n        BuildToPackage: ${{ parameters.BuildToPackage }}\n        DoFreethreaded: ${{ parameters.DoFreethreaded }}\n        SigningCertificate: ${{ iif(eq(parameters.SigningCertificate, 'Unsigned'), '', parameters.SigningCertificate) }}\n    - template: stage-layout-symbols.yml\n      parameters:\n        BuildToPackage: ${{ parameters.BuildToPackage }}\n        DoFreethreaded: ${{ parameters.DoFreethreaded }}\n        SigningCertificate: ${{ iif(eq(parameters.SigningCertificate, 'Unsigned'), '', parameters.SigningCertificate) }}\n    - ${{ if eq(parameters.DoEmbed, 'true') }}:\n      - template: stage-layout-embed.yml\n        parameters:\n          BuildToPackage: ${{ parameters.BuildToPackage }}\n          SigningCertificate: ${{ iif(eq(parameters.SigningCertificate, 'Unsigned'), '', parameters.SigningCertificate) }}\n    - ${{ if eq(parameters.DoNuget, 'true') }}:\n      - template: stage-layout-nuget.yml\n        parameters:\n          BuildToPackage: ${{ parameters.BuildToPackage }}\n          DoFreethreaded: ${{ parameters.DoFreethreaded }}\n          SigningCertificate: ${{ iif(eq(parameters.SigningCertificate, 'Unsigned'), '', parameters.SigningCertificate) }}\n    - ${{ if eq(parameters.DoMSIX, 'true') }}:\n      - template: stage-layout-msix.yml\n        parameters:\n          BuildToPackage: ${{ parameters.BuildToPackage }}\n          SigningCertificate: ${{ iif(eq(parameters.SigningCertificate, 'Unsigned'), '', parameters.SigningCertificate) }}\n    - ${{ if eq(parameters.DoPyManager, 'true') }}:\n      - template: stage-layout-pymanager.yml\n        parameters:\n          BuildToPackage: ${{ parameters.BuildToPackage }}\n          DoFreethreaded: ${{ parameters.DoFreethreaded }}\n          DoEmbed: ${{ parameters.DoEmbed }}\n          SigningCertificate: ${{ iif(eq(parameters.SigningCertificate, 'Unsigned'), '', parameters.SigningCertificate) }}\n\n  - stage: Pack\n    dependsOn: Layout\n    displayName: Pack\n    pool:\n      vmImage: ${{ parameters.vmImage }}\n    jobs:\n    #- ${{ if eq(parameters.DoEmbed, 'true') }}:\n    #  - template: stage-pack-embed.yml\n    #    parameters:\n    #      SigningCertificate: ${{ iif(eq(parameters.SigningCertificate, 'Unsigned'), '', parameters.SigningCertificate) }}\n    - ${{ if eq(parameters.DoMSI, 'true') }}:\n      - template: stage-msi.yml\n        parameters:\n          BuildToPackage: ${{ parameters.BuildToPackage }}\n          DoARM64: ${{ parameters.DoARM64}}\n          DoFreethreaded: ${{ parameters.DoFreethreaded }}\n          SigningCertificate: ${{ iif(eq(parameters.SigningCertificate, 'Unsigned'), '', parameters.SigningCertificate) }}\n          Post315OutputDir: ${{ parameters.Post315OutputDir }}\n    - ${{ if eq(parameters.DoMSIX, 'true') }}:\n      - template: stage-pack-msix.yml\n        parameters:\n          SigningCertificate: ${{ iif(eq(parameters.SigningCertificate, 'Unsigned'), '', parameters.SigningCertificate) }}\n    - ${{ if eq(parameters.DoNuget, 'true') }}:\n      - template: stage-pack-nuget.yml\n        parameters:\n          ${{ if eq(parameters.SignNuget, 'true') }}:\n            SigningCertificate: ${{ iif(eq(parameters.SigningCertificate, 'Unsigned'), '', parameters.SigningCertificate) }}\n          DoFreethreaded: ${{ parameters.DoFreethreaded }}\n    - ${{ if eq(parameters.DoPyManager, 'true') }}:\n      - template: stage-pack-pymanager.yml\n        parameters:\n          DoFreethreaded: ${{ parameters.DoFreethreaded }}\n          DoEmbed: ${{ parameters.DoEmbed }}\n          SigningCertificate: ${{ iif(eq(parameters.SigningCertificate, 'Unsigned'), '', parameters.SigningCertificate) }}\n\n  - stage: Test\n    dependsOn: Pack\n    pool:\n      vmImage: ${{ parameters.vmImage }}\n    jobs:\n    - ${{ if eq(parameters.DoEmbed, 'true') }}:\n      - template: stage-test-embed.yml\n    - ${{ if and(eq(parameters.DoMSI, 'true'), eq(parameters.TestMSI, 'true')) }}:\n      - template: stage-test-msi.yml\n        parameters:\n          DoFreethreaded: ${{ parameters.DoFreethreaded }}\n    - ${{ if eq(parameters.DoNuget, 'true') }}:\n      - template: stage-test-nuget.yml\n        parameters:\n          DoFreethreaded: ${{ parameters.DoFreethreaded }}\n    - ${{ if eq(parameters.DoPyManager, 'true') }}:\n      - template: stage-test-pymanager.yml\n        parameters:\n          DoEmbed: ${{ parameters.DoEmbed }}\n          DoFreethreaded: ${{ parameters.DoFreethreaded }}\n\n- ${{ if eq(parameters.DoPublish, 'true') }}:\n  - stage: Publish\n    displayName: Publish\n    dependsOn:\n    - ${{ if eq(parameters.BuildToPublish, 'current') }}:\n      - Test\n    pool:\n      vmImage: ${{ parameters.vmImage }}\n    jobs:\n    - ${{ if eq(parameters.DoPyManager, 'true') }}:\n      - template: stage-publish-pymanager.yml\n        parameters:\n          BuildToPublish: ${{ parameters.BuildToPublish }}\n          DoEmbed: ${{ parameters.DoEmbed }}\n          DoFreethreaded: ${{ parameters.DoFreethreaded }}\n          SigningCertificate: ${{ iif(eq(parameters.SigningCertificate, 'Unsigned'), '', parameters.SigningCertificate) }}\n    - ${{ if eq(parameters.DoMSI, 'true') }}:\n      - template: stage-publish-pythonorg.yml\n        parameters:\n          BuildToPublish: ${{ parameters.BuildToPublish }}\n          DoEmbed: ${{ parameters.DoEmbed }}\n          IncludeGPG: ${{ parameters.DoGPG }}\n    - ${{ if eq(parameters.DoNuget, 'true') }}:\n      - template: stage-publish-nugetorg.yml\n        parameters:\n          BuildToPublish: ${{ parameters.BuildToPublish }}\n"
  },
  {
    "path": "windows-release/build-steps-pgo.yml",
    "content": "parameters:\n  PGInstrument: false\n  PGRun: false\n  PGUpdate: false\n\nsteps:\n- template: ./checkout.yml\n\n- ${{ if or(eq(parameters.PGInstrument, 'true'), eq(parameters.PGUpdate, 'true')) }}:\n  - template: ./acquire-vcruntime.yml\n\n  - powershell: |\n      $d = (.\\PCbuild\\build.bat -V) | %{ if($_ -match '\\s+(\\w+):\\s*(.+)\\s*$') { @{$Matches[1] = $Matches[2];} }};\n      Write-Host \"##vso[task.setvariable variable=VersionText]$($d.PythonVersion)\"\n      Write-Host \"##vso[task.setvariable variable=VersionNumber]$($d.PythonVersionNumber)\"\n      Write-Host \"##vso[task.setvariable variable=VersionHex]$($d.PythonVersionHex)\"\n      Write-Host \"##vso[task.setvariable variable=VersionUnique]$($d.PythonVersionUnique)\"\n      Write-Host \"##vso[build.addbuildtag]$($d.PythonVersion)\"\n      Write-Host \"##vso[build.addbuildtag]$($d.PythonVersion)-$(Name)\"\n    displayName: 'Extract version numbers'\n\n- ${{ if eq(parameters.PGInstrument, 'true') }}:\n  - powershell: |\n      $env:SigningCertificate = $null\n      .\\PCbuild\\build.bat -v -p $(Platform) -c PGInstrument $(ExtraOptions)\n    displayName: 'Run build'\n    env:\n      IncludeUwp: true\n      Py_OutDir: '$(Build.BinariesDirectory)\\bin'\n\n  - ${{ if ne(parameters.PGRun, 'true') }}:\n    # Not running in this job, so we publish our entire build and object files\n\n    - publish: '$(Build.BinariesDirectory)\\bin\\$(ArchDir)\\instrumented'\n      artifact: instrumented_bin_$(Name)\n      displayName: 'Publish binaries for profiling'\n\n    - powershell: |\n        if ((Test-Path Python\\frozen_modules) -and (Test-Path Python\\deepfreeze)) {\n            move Python\\frozen_modules, Python\\deepfreeze PCbuild\\obj\\\n        }\n      displayName: 'Preserve frozen_modules'\n\n    - publish: '$(Build.SourcesDirectory)\\PCbuild\\obj'\n      artifact: instrumented_obj_$(Name)\n      displayName: 'Download artifact: instrumented_obj_$(Name)'\n\n\n- ${{ if eq(parameters.PGRun, 'true') }}:\n  - ${{ if ne(parameters.PGInstrument, 'true') }}:\n    # Didn't build in this job, so download the required binaries\n    - task: DownloadPipelineArtifact@2\n      displayName: 'Download artifact: instrumented_bin_$(Name)'\n      inputs:\n        buildType: current\n        artifact: instrumented_bin_$(Name)\n        targetPath: '$(Build.BinariesDirectory)\\bin\\$(ArchDir)\\instrumented'\n\n  - powershell: |\n      $exe = (gci \"$(Build.BinariesDirectory)\\bin\\$(ArchDir)\\instrumented\\$(PythonExePattern)\" | select -First 1)\n      & $exe -m test --pgo\n    ignoreLASTEXITCODE: true\n    displayName: 'Collect profile'\n    env:\n      PYTHONHOME: '$(Build.SourcesDirectory)'\n\n\n  - ${{ if ne(parameters.PGUpdate, 'true') }}:\n    # Not finishing in this job, so publish the binaries\n    - publish: '$(Build.BinariesDirectory)\\bin\\$(ArchDir)\\instrumented'\n      artifact: profile_bin_$(Name)\n      displayName: 'Publish collected data and binaries'\n\n\n- ${{ if eq(parameters.PGUpdate, 'true') }}:\n  - ${{ if ne(parameters.PGRun, 'true') }}:\n    # Didn't run/build in this job, so download files\n    - task: DownloadPipelineArtifact@2\n      displayName: 'Download artifact: profile_bin_$(Name)'\n      inputs:\n        buildType: current\n        artifact: profile_bin_$(Name)\n        targetPath: '$(Build.BinariesDirectory)\\bin\\$(ArchDir)\\instrumented'\n\n    - task: DownloadPipelineArtifact@2\n      displayName: 'Download artifact: instrumented_obj_$(Name)'\n      inputs:\n        buildType: current\n        artifact: instrumented_obj_$(Name)\n        targetPath: '$(Build.SourcesDirectory)\\PCbuild\\obj'\n\n    - powershell: |\n        if (Test-Path PCbuild\\obj\\frozen_modules) {\n            copy -force -r PCbuild\\obj\\frozen_modules\\* (mkdir -Force Python\\frozen_modules)\n        }\n        if (Test-Path PCbuild\\obj\\deepfreeze) {\n            copy -force -r PCbuild\\obj\\deepfreeze\\* (mkdir -Force Python\\deepfreeze)\n        }\n      displayName: 'Restore frozen_modules'\n\n  - powershell: |\n      $env:SigningCertificate = $null\n      .\\PCbuild\\build.bat -v -p $(Platform) -c PGUpdate $(ExtraOptions)\n    displayName: 'Run build with PGO'\n    env:\n      IncludeUwp: true\n      Py_OutDir: '$(Build.BinariesDirectory)\\bin'\n\n  - powershell: |\n      $kitroot = (gp 'HKLM:\\SOFTWARE\\Microsoft\\Windows Kits\\Installed Roots\\').KitsRoot10\n      $tool = (gci -r \"$kitroot\\Bin\\*\\x64\\signtool.exe\" | sort FullName -Desc | select -First 1)\n      if (-not $tool) {\n        throw \"SDK is not available\"\n      }\n      Write-Host \"##vso[task.prependpath]$($tool.Directory)\"\n    displayName: 'Add WinSDK tools to path'\n\n  - powershell: |\n      $env:SigningCertificate = $null\n      $(_HostPython) PC\\layout -vv -b \"$(Build.BinariesDirectory)\\bin\" -t \"$(Build.BinariesDirectory)\\catalog\" --catalog \"${env:CAT}.cdf\" --preset-default --arch $(Arch)\n      makecat \"${env:CAT}.cdf\"\n      del \"${env:CAT}.cdf\"\n      if (-not (Test-Path \"${env:CAT}.cat\")) {\n        throw \"Failed to build catalog file\"\n      }\n    displayName: 'Generate catalog'\n    env:\n      CAT: $(Build.BinariesDirectory)\\bin\\$(ArchDir)\\python\n      PYTHON_HEXVERSION: $(VersionHex)\n\n  - powershell: |\n      del instrumented -r -EA 0\n      del *.pgc, *.pgd, *.exp\n    displayName: 'Cleanup build'\n    workingDirectory: '$(Build.BinariesDirectory)\\bin\\$(ArchDir)'\n\n  - powershell: |\n      copy \"$(Build.SourcesDirectory)\\Lib\\venv\\scripts\\common\\Activate.ps1\" .\n    displayName: 'Copy Powershell scripts from source'\n    workingDirectory: '$(Build.BinariesDirectory)\\bin\\$(ArchDir)'\n\n  - publish: '$(Build.BinariesDirectory)\\bin\\$(ArchDir)'\n    artifact: $(Artifact)\n    displayName: 'Publish binaries'\n"
  },
  {
    "path": "windows-release/build-steps.yml",
    "content": "parameters:\n  ShouldPGO: false\n\nsteps:\n- template: ./checkout.yml\n- template: ./acquire-vcruntime.yml\n\n- powershell: |\n    $d = (.\\PCbuild\\build.bat -V) | %{ if($_ -match '\\s+(\\w+):\\s*(.+)\\s*$') { @{$Matches[1] = $Matches[2];} }};\n    Write-Host \"##vso[task.setvariable variable=VersionText]$($d.PythonVersion)\"\n    Write-Host \"##vso[task.setvariable variable=VersionNumber]$($d.PythonVersionNumber)\"\n    Write-Host \"##vso[task.setvariable variable=VersionHex]$($d.PythonVersionHex)\"\n    Write-Host \"##vso[task.setvariable variable=VersionUnique]$($d.PythonVersionUnique)\"\n    Write-Host \"##vso[build.addbuildtag]$($d.PythonVersion)\"\n    Write-Host \"##vso[build.addbuildtag]$($d.PythonVersion)-$(Name)\"\n  displayName: 'Extract version numbers'\n\n- ${{ if eq(parameters.ShouldPGO, 'false') }}:\n  - powershell: |\n      $env:SigningCertificate = $null\n      .\\PCbuild\\build.bat -v -p $(Platform) -c $(Configuration) $(ExtraOptions)\n    displayName: 'Run build'\n    env:\n      IncludeUwp: true\n      Py_OutDir: '$(Build.BinariesDirectory)\\bin'\n\n- ${{ else }}:\n  - powershell: |\n      $env:SigningCertificate = $null\n      .\\PCbuild\\build.bat -v -p $(Platform) --pgo $(ExtraOptions)\n    displayName: 'Run build with PGO'\n    env:\n      IncludeUwp: true\n      Py_OutDir: '$(Build.BinariesDirectory)\\bin'\n\n- powershell: |\n    $kitroot = (gp 'HKLM:\\SOFTWARE\\Microsoft\\Windows Kits\\Installed Roots\\').KitsRoot10\n    $tool = (gci -r \"$kitroot\\Bin\\*\\x64\\signtool.exe\" | sort FullName -Desc | select -First 1)\n    if (-not $tool) {\n      throw \"SDK is not available\"\n    }\n    Write-Host \"##vso[task.prependpath]$($tool.Directory)\"\n  displayName: 'Add WinSDK tools to path'\n\n- powershell: |\n    $env:SigningCertificate = $null\n    $(_HostPython) PC\\layout -vv -b \"$(Build.BinariesDirectory)\\bin\" -t \"$(Build.BinariesDirectory)\\catalog\" --catalog \"${env:CAT}.cdf\" --preset-default --arch $(Arch)\n    makecat \"${env:CAT}.cdf\"\n    del \"${env:CAT}.cdf\"\n    if (-not (Test-Path \"${env:CAT}.cat\")) {\n      throw \"Failed to build catalog file\"\n    }\n  displayName: 'Generate catalog'\n  env:\n    CAT: $(Build.BinariesDirectory)\\bin\\$(ArchDir)\\python\n    PYTHON_HEXVERSION: $(VersionHex)\n\n- powershell: |\n    del *.pgc, *.pgd, *.exp\n  displayName: 'Cleanup binaries'\n  workingDirectory: '$(Build.BinariesDirectory)\\bin\\$(ArchDir)'\n\n- powershell: |\n    copy \"$(Build.SourcesDirectory)\\Lib\\venv\\scripts\\common\\Activate.ps1\" .\n  displayName: 'Copy Powershell scripts from source'\n  workingDirectory: '$(Build.BinariesDirectory)\\bin\\$(ArchDir)'\n\n- publish: '$(Build.BinariesDirectory)\\bin\\$(ArchDir)'\n  artifact: $(Artifact)\n  displayName: 'Publish binaries'\n"
  },
  {
    "path": "windows-release/checkout.yml",
    "content": "parameters:\n  depth: 3\n  IncludeSelf: false\n  Path: .\n\nsteps:\n- ${{ if eq(parameters.IncludeSelf, 'true') }}:\n  - checkout: self\n    path: release-tools\n- ${{ else }}:\n  - checkout: none\n\n- script: git clone --progress -v --depth ${{ parameters.depth }} --branch $(SourceTag) --single-branch https://github.com/$(GitRemote)/cpython.git ${{ parameters.Path }}\n  displayName: 'git clone ($(GitRemote)/$(SourceTag))'\n  condition: and(succeeded(), and(variables['GitRemote'], variables['SourceTag']))\n\n- script: git clone --progress -v --depth ${{ parameters.depth }} --branch $(SourceTag) --single-branch $(Build.Repository.Uri) ${{ parameters.Path }}\n  displayName: 'git clone (<default>/$(SourceTag))'\n  condition: and(succeeded(), and(not(variables['GitRemote']), variables['SourceTag']))\n\n- script: git clone --progress -v --depth ${{ parameters.depth }} --branch $(Build.SourceBranchName) --single-branch https://github.com/$(GitRemote)/cpython.git ${{ parameters.Path }}\n  displayName: 'git clone ($(GitRemote)/<default>)'\n  condition: and(succeeded(), and(variables['GitRemote'], not(variables['SourceTag'])))\n\n- script: git clone --progress -v --depth ${{ parameters.depth }} --branch $(Build.SourceBranchName) --single-branch $(Build.Repository.Uri) ${{ parameters.Path }}\n  displayName: 'git clone'\n  condition: and(succeeded(), and(not(variables['GitRemote']), not(variables['SourceTag'])))\n\n- powershell: |\n    $checkout_commit = (git rev-parse HEAD)\n    if ($checkout_commit -ne '$(SourceCommit)') {\n        throw \"Expected git commit '$(SourceCommit)' didn't match tagged commit '$checkout_commit'\"\n    }\n  displayName: \"Verify CPython commit matches tag\"\n  ${{ if and(parameters.Path, ne(parameters.Path, '.')) }}:\n    workingDirectory: ${{ parameters.Path }}\n  condition: and(succeeded(), variables['SourceCommit'])\n\n- powershell: |\n    if (-not (Test-Path \"Misc\\externals.spdx.json\")) {\n      \"externals.spdx.json is missing - skipping SBOM\"\n      Write-Host \"##vso[task.setvariable variable=SkipSBOM]1\"\n    }\n  displayName: 'Checking for SBOM inputs'\n  ${{ if and(parameters.Path, ne(parameters.Path, '.')) }}:\n    workingDirectory: ${{ parameters.Path }}\n  condition: and(succeeded(), not(variables['SkipSBOM']))\n"
  },
  {
    "path": "windows-release/find-sdk.yml",
    "content": "# Locate the Windows SDK and add its binaries directory to PATH\n#\n# `toolname` can be overridden to use a different marker file.\n\nparameters:\n  toolname: signtool.exe\n\nsteps:\n  - powershell: |\n      $kitroot = (gp 'HKLM:\\SOFTWARE\\Microsoft\\Windows Kits\\Installed Roots\\').KitsRoot10\n      $tool = (gci -r \"$kitroot\\Bin\\*\\${{ parameters.toolname }}\" | sort FullName -Desc | select -First 1)\n      if (-not $tool) {\n          throw \"SDK is not available\"\n      }\n      Write-Host \"##vso[task.prependpath]$($tool.Directory)\"\n      Write-Host \"Adding $($tool.Directory) to PATH\"\n    displayName: 'Add WinSDK tools to path'\n"
  },
  {
    "path": "windows-release/find-tools.yml",
    "content": "# Locate a set of the tools used for builds\n\nsteps:\n  - template: ./find-sdk.yml\n    parameters:\n      toolname: 'signtool.exe'\n\n  - powershell: |\n      $vcvarsall = (& \"${env:ProgramFiles(x86)}\\Microsoft Visual Studio\\Installer\\vswhere.exe\" `\n          -prerelease `\n          -latest `\n          -requires Microsoft.VisualStudio.Component.VC.Tools.x86.x64 `\n          -find VC\\Auxiliary\\Build\\vcvarsall.bat)\n      Write-Host \"Found vcvarsall at $vcvarsall\"\n      Write-Host \"##vso[task.setVariable variable=vcvarsall]$vcvarsall\"\n    displayName: 'Find vcvarsall.bat'\n\n  - powershell: |\n      $msbuild = (& \"${env:ProgramFiles(x86)}\\Microsoft Visual Studio\\Installer\\vswhere.exe\" `\n          -prerelease `\n          -latest `\n          -requires Microsoft.VisualStudio.Component.VC.Tools.x86.x64 `\n          -find MSBuild\\Current\\Bin\\msbuild.exe)\n      Write-Host \"Found MSBuild at $msbuild\"\n      Write-Host \"##vso[task.setVariable variable=msbuild]$msbuild\"\n    displayName: 'Find MSBuild'\n"
  },
  {
    "path": "windows-release/layout-command.yml",
    "content": "parameters:\n  Binaries: $(Pipeline.Workspace)\\bin_$(Name)\n  Sources: $(Build.SourcesDirectory)\n  Temp: $(Build.BinariesDirectory)\\layout-temp\n  Docs: $(Build.BinariesDirectory)\\doc\n  LayoutSources:\n  BuildToPackage: current\n\nsteps:\n- task: DownloadPipelineArtifact@2\n  displayName: 'Download artifact: bin_$(HostArch)'\n  condition: and(succeeded(), variables['HostArch'])\n  inputs:\n    ${{ if eq(parameters.BuildToPackage, 'current') }}:\n      buildType: current\n    ${{ else }}:\n      buildType: specific\n      buildVersionToDownload: specific\n      project: $(resources.pipeline.build_to_package.projectId)\n      pipeline: $(resources.pipeline.build_to_package.pipelineId)\n      runId: $(resources.pipeline.build_to_package.runID)\n    artifact: bin_$(HostArch)\n    targetPath: $(Pipeline.Workspace)\\bin_$(HostArch)\n\n- powershell: |\n    Write-Host \"##vso[task.setvariable variable=Python]${{ parameters.Binaries }}\\python.exe\"\n  condition: and(succeeded(), not(variables['HostArch']))\n  displayName: 'Set Python command'\n\n- powershell: |\n    Write-Host \"##vso[task.setvariable variable=Python]$(Pipeline.Workspace)\\bin_$(HostArch)\\python.exe\"\n  condition: and(succeeded(), variables['HostArch'])\n  displayName: 'Set Python command'\n\n- powershell: >\n    $layout_cmd = '&\n    \"$(Python)\"\n    \"{4}\\PC\\layout\"\n    -vv\n    --source \"{1}\"\n    --build \"{0}\"\n    --arch \"$(Arch)\"\n    --temp \"{2}\"\n    --include-cat \"{0}\\python.cat\"\n    --doc-build \"{3}\"'\n    -f (\n    \"${{ parameters.Binaries }}\",\n    \"${{ parameters.Sources }}\",\n    \"${{ parameters.Temp }}\",\n    \"${{ parameters.Docs}}\",\n    \"${{ coalesce(parameters.LayoutSources, parameters.Sources) }}\");\n    Write-Host \"##vso[task.setvariable variable=LayoutCmd]$layout_cmd\";\n    Write-Host \"Setting LayoutCmd=$layout_cmd\"\n  displayName: 'Set LayoutCmd'\n"
  },
  {
    "path": "windows-release/libffi-build.yml",
    "content": "parameters:\n- name: SourceTag\n  displayName: 'LibFFI Source Tag'\n  type: string\n- name: SigningCertificate\n  displayName: \"Code signing certificate\"\n  type: string\n  default: 'PythonSoftwareFoundation'\n  values:\n  - 'PythonSoftwareFoundation'\n  - 'TestSign'\n  - 'Unsigned'\n- name: SourcesRepo\n  displayName: 'Sources Repository'\n  type: string\n  default: 'https://github.com/python/cpython-source-deps'\n- name: LibFFIBuildScript\n  displayName: 'Build script'\n  type: string\n  default: 'https://github.com/python/cpython/raw/main/PCbuild/prepare_libffi.bat'\n\n\nname: ${{ parameters.SourceTag }}_$(Date:yyyyMMdd)$(Rev:.rr)\n\n\nvariables:\n- ${{ if eq(parameters.SigningCertificate, 'PythonSoftwareFoundation') }}:\n  - group: CPythonSign\n- ${{ if eq(parameters.SigningCertificate, 'TestSign') }}:\n  - group: CPythonTestSign\n- name: IntDir\n  value: '$(Build.BinariesDirectory)'\n- name: OutDir\n  value: '$(Build.ArtifactStagingDirectory)'\n- name: SigningDescription\n  value: 'LibFFI for Python (${{ parameters.SourceTag }})'\n\n\njobs:\n- job: Build_LibFFI\n  displayName: LibFFI\n  pool:\n    vmImage: windows-latest\n\n  workspace:\n    clean: all\n\n  steps:\n    - checkout: none\n\n    - template: ./find-tools.yml\n\n    - powershell: |\n       mkdir -Force \"$(IntDir)\\script\"\n       iwr \"${{ parameters.LibFFIBuildScript }}\" -outfile \"$(IntDir)\\script\\prepare_libffi.bat\"\n      displayName: 'Download build script'\n\n    - powershell: |\n        git clone ${{ parameters.SourcesRepo }} -b ${{ parameters.SourceTag }} --depth 1 -c core.autocrlf=false -c core.eol=lf .\n      displayName: 'Check out LibFFI sources'\n\n    - script: 'prepare_libffi.bat --install-cygwin'\n      workingDirectory: '$(IntDir)\\script'\n      displayName: 'Install Cygwin and build'\n      env:\n        VCVARSALL: '$(vcvarsall)'\n        LIBFFI_SOURCE: '$(Build.SourcesDirectory)'\n        LIBFFI_OUT: '$(OutDir)'\n\n    - powershell: |\n       if ((gci *\\*.dll).Count -lt 3) {\n           Write-Error \"Did not generate enough DLL files\"\n       }\n       if ((gci *\\Include\\ffi.h).Count -lt 3) {\n           Write-Error \"Did not generate enough include files\"\n       }\n      failOnStderr: true\n      workingDirectory: '$(OutDir)'\n      displayName: 'Verify files were created'\n\n    - publish: '$(OutDir)'\n      artifact: 'unsigned'\n      displayName: 'Publish unsigned build'\n\n\n- ${{ if ne(parameters.SigningCertificate, 'Unsigned') }}:\n  - job: Sign_LibFFI\n    displayName: Sign LibFFI\n    dependsOn: Build_LibFFI\n    pool:\n      vmImage: windows-latest\n\n    workspace:\n      clean: all\n\n    steps:\n      - checkout: none\n      - download: current\n        artifact: unsigned\n\n      - template: sign-files.yml\n        parameters:\n          Include: '-r *.dll'\n          WorkingDir: '$(Pipeline.Workspace)\\unsigned'\n          SigningCertificate: ${{ parameters.SigningCertificate }}\n\n      - publish: '$(Pipeline.Workspace)\\unsigned'\n        artifact: 'libffi'\n        displayName: 'Publish libffi'\n"
  },
  {
    "path": "windows-release/merge-and-upload.py",
    "content": "import hashlib\nimport json\nimport os\nimport re\nimport subprocess\nimport sys\nfrom pathlib import Path\nfrom urllib.parse import urlparse\nfrom urllib.request import Request, urlopen\n\nUPLOAD_URL_PREFIX = os.getenv(\"UPLOAD_URL_PREFIX\", \"https://www.python.org/ftp/\")\nUPLOAD_PATH_PREFIX = os.getenv(\"UPLOAD_PATH_PREFIX\", \"/srv/www.python.org/ftp/\")\nINDEX_URL = os.getenv(\"INDEX_URL\", UPLOAD_URL_PREFIX + \"python/index-windows.json\")\nINDEX_FILE = os.getenv(\"INDEX_FILE\")\n# A version will be inserted before the extension later on\nMANIFEST_FILE = os.getenv(\"MANIFEST_FILE\")\nUPLOAD_HOST = os.getenv(\"UPLOAD_HOST\", \"\")\nUPLOAD_HOST_KEY = os.getenv(\"UPLOAD_HOST_KEY\", \"\")\nUPLOAD_KEYFILE = os.getenv(\"UPLOAD_KEYFILE\", \"\")\nUPLOAD_USER = os.getenv(\"UPLOAD_USER\", \"\")\nNO_UPLOAD = os.getenv(\"NO_UPLOAD\", \"no\")[:1].lower() in \"yt1\"\nLOCAL_INDEX = os.getenv(\"LOCAL_INDEX\", \"no\")[:1].lower() in \"yt1\"\nSIGN_COMMAND = os.getenv(\"SIGN_COMMAND\", \"\")\n\n\ndef find_cmd(env, exe):\n    cmd = os.getenv(env)\n    if cmd:\n        cmd = Path(cmd)\n        if not cmd.is_file():\n            raise RuntimeError(\n                f\"Could not find {cmd} to perform upload. Incorrect %{env}% setting.\"\n            )\n        return cmd\n    for p in os.getenv(\"PATH\", \"\").split(\";\"):\n        if p:\n            cmd = Path(p) / exe\n            if cmd.is_file():\n                return cmd\n    if UPLOAD_HOST:\n        raise RuntimeError(\n            f\"Could not find {exe} to perform upload. Try setting %{env}% or %PATH%\"\n        )\n    print(f\"Did not find {exe}, but not uploading anyway.\")\n\n\nPLINK = find_cmd(\"PLINK\", \"plink.exe\")\nPSCP = find_cmd(\"PSCP\", \"pscp.exe\")\nMAKECAT = find_cmd(\"MAKECAT\", \"makecat.exe\")\n\n\ndef _std_args(cmd):\n    if not cmd:\n        raise RuntimeError(\"Cannot upload because command is missing\")\n    all_args = [cmd, \"-batch\"]\n    if UPLOAD_HOST_KEY:\n        all_args.append(\"-hostkey\")\n        all_args.append(UPLOAD_HOST_KEY)\n    if UPLOAD_KEYFILE:\n        all_args.append(\"-noagent\")\n        all_args.append(\"-i\")\n        all_args.append(UPLOAD_KEYFILE)\n    return all_args\n\n\nclass RunError(Exception):\n    pass\n\n\ndef _run(*args, single_cmd=False):\n    if single_cmd:\n        args = args[0]\n    with subprocess.Popen(\n        args,\n        stdout=subprocess.PIPE,\n        stderr=subprocess.STDOUT,\n        encoding=\"ascii\",\n        errors=\"replace\",\n    ) as p:\n        out, _ = p.communicate(None)\n        if out:\n            print(out.encode(\"ascii\", \"replace\").decode(\"ascii\"))\n        if p.returncode:\n            raise RunError(p.returncode, out)\n        return out\n\n\ndef call_ssh(*args, allow_fail=True):\n    if not UPLOAD_HOST or NO_UPLOAD or LOCAL_INDEX:\n        print(\"Skipping\", args, \"because UPLOAD_HOST is missing\")\n        return \"\"\n    try:\n        return _run(*_std_args(PLINK), f\"{UPLOAD_USER}@{UPLOAD_HOST}\", *args)\n    except RunError as ex:\n        if not allow_fail:\n            raise\n        return ex.args[1]\n\n\ndef upload_ssh(source, dest):\n    if not UPLOAD_HOST or NO_UPLOAD or LOCAL_INDEX:\n        print(\"Skipping upload of\", source, \"because UPLOAD_HOST is missing\")\n        return\n    _run(*_std_args(PSCP), source, f\"{UPLOAD_USER}@{UPLOAD_HOST}:{dest}\")\n    call_ssh(f\"chgrp downloads {dest} && chmod g-x,o+r {dest}\")\n\n\ndef download_ssh(source, dest):\n    if not UPLOAD_HOST:\n        print(\"Skipping download of\", source, \"because UPLOAD_HOST is missing\")\n        return\n    Path(dest).parent.mkdir(exist_ok=True, parents=True)\n    _run(*_std_args(PSCP), f\"{UPLOAD_USER}@{UPLOAD_HOST}:{source}\", dest)\n\n\ndef ls_ssh(dest):\n    if not UPLOAD_HOST or LOCAL_INDEX:\n        print(\"Skipping ls of\", dest, \"because UPLOAD_HOST is missing\")\n        return\n    try:\n        _run(*_std_args(PSCP), \"-ls\", f\"{UPLOAD_USER}@{UPLOAD_HOST}:{dest}\")\n    except RunError as ex:\n        if not ex.args[1].rstrip().endswith(\"No such file or directory\"):\n            raise\n        print(dest, \"was not found\")\n\n\ndef url2path(url):\n    if not UPLOAD_URL_PREFIX:\n        raise ValueError(\"%UPLOAD_URL_PREFIX% was not set\")\n    if not url:\n        raise ValueError(\"Unexpected empty URL\")\n    if not url.startswith(UPLOAD_URL_PREFIX):\n        if LOCAL_INDEX:\n            return url\n        raise ValueError(f\"Unexpected URL: {url}\")\n    return UPLOAD_PATH_PREFIX + url[len(UPLOAD_URL_PREFIX) :]\n\n\ndef get_hashes(src):\n    h = hashlib.sha256()\n    with open(src, \"rb\") as f:\n        chunk = f.read(1024 * 1024)\n        while chunk:\n            h.update(chunk)\n            chunk = f.read(1024 * 1024)\n    return {\"sha256\": h.hexdigest()}\n\n\ndef trim_install(install):\n    return {\n        k: v for k, v in install.items() if k not in (\"aliases\", \"run-for\", \"shortcuts\")\n    }\n\n\ndef validate_new_installs(installs):\n    ids = [i[\"id\"] for i in installs]\n    id_set = set(ids)\n    if len(id_set) < len(ids):\n        for i in id_set:\n            ids.remove(i)\n        print(\"WARNING: Duplicate id fields:\", *sorted(set(ids)))\n\n\ndef purge(url):\n    if not UPLOAD_HOST or NO_UPLOAD:\n        print(\"Skipping purge of\", url, \"because UPLOAD_HOST is missing\")\n        return\n    print(\"Purging\", url)\n    with urlopen(Request(url, method=\"PURGE\", headers={\"Fastly-Soft-Purge\": 1})) as r:\n        r.read()\n\n\ndef calculate_uploads():\n    cwd = Path.cwd()\n    for p in sorted(\n        [\n            *cwd.glob(\"__install__.*.json\"),\n            *[p / \"__install__.json\" for p in cwd.iterdir()],\n        ]\n    ):\n        if not p.is_file():\n            continue\n        print(\"Processing\", p)\n        i = json.loads(p.read_bytes())\n        u = urlparse(i[\"url\"])\n        src = p.parent / u.path.rpartition(\"/\")[-1]\n        dest = url2path(i[\"url\"])\n        if LOCAL_INDEX:\n            i[\"url\"] = str(src.relative_to(Path.cwd())).replace(\"\\\\\", \"/\")\n        sbom = src.with_suffix(\".spdx.json\")\n        sbom_dest = dest.rpartition(\"/\")[0] + sbom.name\n        if not sbom.is_file():\n            sbom = None\n            sbom_dest = None\n        yield (\n            i,\n            src,\n            url2path(i[\"url\"]),\n            sbom,\n            sbom_dest,\n        )\n\n\ndef sign_json(cat_file, *files):\n    if not MAKECAT:\n        if not UPLOAD_HOST or NO_UPLOAD:\n            print(\"makecat.exe not found, but not uploading, so skip signing.\")\n            return\n        raise RuntimeError(\"No makecat.exe found\")\n    if not SIGN_COMMAND:\n        if not UPLOAD_HOST or NO_UPLOAD:\n            print(\"No signing command set, but not uploading, so skip signing.\")\n            return\n        raise RuntimeError(\"No SIGN_COMMAND set\")\n\n    cat = Path(cat_file).absolute()\n    cdf = cat.with_suffix(\".cdf\")\n    cdf.parent.mkdir(parents=True, exist_ok=True)\n\n    with open(cdf, \"w\", encoding=\"ansi\") as f:\n        print(\"[CatalogHeader]\", file=f)\n        print(\"Name=\", cat.name, sep=\"\", file=f)\n        print(\"ResultDir=\", cat.parent, sep=\"\", file=f)\n        print(\"PublicVersion=0x00000001\", file=f)\n        print(\"CatalogVersion=2\", file=f)\n        print(\"HashAlgorithms=SHA256\", file=f)\n        print(\"EncodingType=\", file=f)\n        print(file=f)\n        print(\"[CatalogFiles]\", file=f)\n        for a in map(Path, files):\n            print(\"<HASH>\", a.name, \"=\", a.absolute(), sep=\"\", file=f)\n\n    _run(MAKECAT, \"-v\", cdf)\n    if not cat.is_file():\n        raise FileNotFoundError(cat)\n    # Pass as a single arg because the command variable has its own arguments\n    _run(f'{SIGN_COMMAND} \"{cat}\"', single_cmd=True)\n    cdf.unlink()\n\n\ndef remove_and_insert(index, new_installs):\n    new = {(i[\"id\"].casefold(), i[\"sort-version\"].casefold()) for i in new_installs}\n    to_remove = [\n        x\n        for x, i in enumerate(index)\n        if (i[\"id\"].casefold(), i[\"sort-version\"].casefold()) in new\n    ]\n    for i in reversed(to_remove):\n        del index[i]\n    index[:0] = new_installs\n    print(\"Added\", len(new_installs), \"entries:\")\n    for i in new_installs:\n        print(\"-\", i[\"id\"], i[\"sort-version\"])\n    print(\"Replaced\", len(to_remove), \"existing entries\")\n    print()\n\n\ndef hash_packages(uploads):\n    for i, src, *_ in uploads:\n        i[\"hash\"] = get_hashes(src)\n\n\ndef number_sortkey(n):\n    try:\n        return f\"{int(n):020}\"\n    except ValueError:\n        return n\n\n\ndef install_sortkey(install):\n    key = re.split(r\"(\\d+)\", install[\"id\"])\n    ver = re.split(r\"(\\d+)\", install[\"sort-version\"])\n    return (\n        tuple(number_sortkey(k) for k in key),\n        tuple(number_sortkey(k) for k in ver),\n    )\n\n\ndef find_missing_from_index(url, installs):\n    if not UPLOAD_HOST:\n        print(\"Skipping check for upload race because UPLOAD_HOST is missing\")\n        return []\n    if NO_UPLOAD:\n        print(\"Skipping check for upload race because NO_UPLOAD is set\")\n        return []\n    with urlopen(url) as r:\n        x = {install_sortkey(i) for i in json.load(r)[\"versions\"]}\n    y = {install_sortkey(i) for i in installs} - x\n    return [i for i in installs if install_sortkey(i) in y]\n\n\nUPLOADS = list(calculate_uploads())\n\nif not UPLOADS:\n    print(\"No files to upload!\")\n    sys.exit(1)\n\n\nhash_packages(UPLOADS)\n\n\nindex = {\"versions\": []}\n\nINDEX_MTIME = 0\n\nif INDEX_FILE:\n    INDEX_PATH = url2path(INDEX_URL)\n\n    try:\n        INDEX_MTIME = int(call_ssh(\"stat\", \"-c\", \"%Y\", INDEX_PATH) or \"0\")\n    except ValueError:\n        pass\n\n    try:\n        if not LOCAL_INDEX:\n            download_ssh(INDEX_PATH, INDEX_FILE)\n    except RunError as ex:\n        err = ex.args[1]\n        if not err.rstrip().endswith(\"no such file or directory\"):\n            raise\n    else:\n        try:\n            with open(INDEX_FILE, encoding=\"utf-8\") as f:\n                index = json.load(f)\n        except FileNotFoundError:\n            pass\n\n\nprint(INDEX_PATH, \"mtime =\", INDEX_MTIME)\n\n\nnew_installs = [trim_install(i) for i, *_ in UPLOADS]\nvalidate_new_installs(new_installs)\nnew_installs = sorted(new_installs, key=install_sortkey)\nremove_and_insert(index[\"versions\"], new_installs)\n\nif INDEX_FILE:\n    INDEX_FILE = Path(INDEX_FILE).absolute()\n    INDEX_CAT_FILE = INDEX_FILE.with_name(f\"{INDEX_FILE.name}.cat\")\n    INDEX_FILE.parent.mkdir(parents=True, exist_ok=True)\n    with open(INDEX_FILE, \"w\", encoding=\"utf-8\") as f:\n        json.dump(index, f)\n\n    sign_json(INDEX_CAT_FILE, INDEX_FILE)\n    INDEX_CAT_URL = f\"{INDEX_URL}.cat\"\n    INDEX_CAT_PATH = f\"{INDEX_PATH}.cat\"\nelse:\n    INDEX_CAT_FILE = None\n    INDEX_CAT_URL = None\n    INDEX_CAT_PATH = None\n\n\nif MANIFEST_FILE:\n    # Use the sort-version so that the manifest name includes prerelease marks\n    MANIFEST_FILE = Path(MANIFEST_FILE).absolute()\n    name = f\"{MANIFEST_FILE.stem}-{new_installs[0]['sort-version']}.json\"\n    MANIFEST_FILE = MANIFEST_FILE.with_name(name)\n    MANIFEST_URL = new_installs[0][\"url\"].rpartition(\"/\")[0] + \"/\" + name\n    MANIFEST_PATH = url2path(MANIFEST_URL)\n\n    with open(MANIFEST_FILE, \"w\", encoding=\"utf-8\") as f:\n        # Include an indent for readability. The release manifest is\n        # far more likely to be read by humans than the index.\n        json.dump({\"versions\": new_installs}, f, indent=2)\n\n\n# Upload last to ensure we've got a valid index first\nfor i, src, dest, sbom, sbom_dest in UPLOADS:\n    print(\"Uploading\", src, \"to\", dest)\n    destdir = dest.rpartition(\"/\")[0]\n    call_ssh(f\"mkdir {destdir} && chgrp downloads {destdir} && chmod a+rx {destdir}\")\n    upload_ssh(src, dest)\n    if sbom and sbom_dest:\n        upload_ssh(sbom, sbom_dest)\n\n\n# Check that nobody else has published while we were uploading\nif INDEX_FILE and INDEX_MTIME:\n    try:\n        mtime = int(call_ssh(\"stat\", \"-c\", \"%Y\", INDEX_PATH) or \"0\")\n    except ValueError:\n        mtime = 0\n    if mtime > INDEX_MTIME:\n        print(\"##[error]Lost a race with another publish step!\")\n        print(\"Expecting mtime\", INDEX_MTIME, \"but saw\", mtime)\n        sys.exit(1)\n\nTO_PURGE = [i[\"url\"] for i, *_ in UPLOADS]\n\nif MANIFEST_FILE:\n    print(\"Uploading\", MANIFEST_FILE, \"to\", MANIFEST_URL)\n    upload_ssh(MANIFEST_FILE, MANIFEST_PATH)\n    TO_PURGE.append(MANIFEST_URL)\n\nif INDEX_FILE:\n    print(\"Uploading\", INDEX_FILE, \"to\", INDEX_URL)\n    upload_ssh(INDEX_FILE, INDEX_PATH)\n    TO_PURGE.append(INDEX_URL)\n\nif INDEX_CAT_FILE:\n    print(\"Uploading\", INDEX_CAT_FILE, \"to\", INDEX_CAT_URL)\n    upload_ssh(INDEX_CAT_FILE, INDEX_CAT_PATH)\n    TO_PURGE.append(INDEX_CAT_URL)\n\n# Calculate directory parents for all files\nTO_PURGE.extend({i.rpartition(\"/\")[0] + \"/\" for i in TO_PURGE})\n\nprint(\"Purging\", len(TO_PURGE), \"uploaded files, indexes and directories\")\n\nfor i in TO_PURGE:\n    purge(i)\n\nif INDEX_URL:\n    missing = find_missing_from_index(INDEX_URL, [i for i, *_ in UPLOADS])\n    if missing:\n        print(\"##[error]Lost a race with another publish step!\")\n        print(\"Index at\", INDEX_URL, \"does not contain installs:\")\n        for m in missing:\n            print(m[\"id\"], m[\"sort-version\"])\n        sys.exit(1)\n"
  },
  {
    "path": "windows-release/msi-steps.yml",
    "content": "parameters:\n  BuildToPackage: current\n  DoFreethreaded: false\n  SigningCertificate: ''\n  Artifacts: []\n\nsteps:\n  - template: ./checkout.yml\n\n  - powershell: |\n      $d = (.\\PCbuild\\build.bat -V) | %{ if($_ -match '\\s+(\\w+):\\s*(.+)\\s*$') { @{$Matches[1] = $Matches[2];} }};\n      Write-Host \"##vso[task.setvariable variable=SigningDescription]Python $($d.PythonVersion)\"\n    displayName: 'Update signing description'\n    condition: and(succeeded(), not(variables['SigningDescription']))\n\n  - ${{ each a in parameters.Artifacts }}:\n    - task: DownloadPipelineArtifact@2\n      displayName: 'Download artifact: ${{ a.artifact }}'\n      inputs:\n        ${{ if eq(parameters.BuildToPackage, 'current') }}:\n          buildType: current\n        ${{ else }}:\n          buildType: specific\n          buildVersionToDownload: specific\n          project: $(resources.pipeline.build_to_package.projectId)\n          pipeline: $(resources.pipeline.build_to_package.pipelineId)\n          runId: $(resources.pipeline.build_to_package.runID)\n        artifact: ${{ a.artifact }}\n        ${{ if a.target }}:\n          targetPath: ${{ a.target }}\n        ${{ else }}:\n          targetPath: $(Pipeline.Workspace)\\${{ a.artifact }}\n\n    # Assuming we'll always run the amd64 build\n    - ${{ if eq(a.artifact, 'bin_amd64') }}:\n      - powershell: >\n          copy\n          \"${env:TARGET_PATH}\\Activate.ps1\"\n          \"$(Build.SourcesDirectory)\\Lib\\venv\\scripts\\common\\Activate.ps1\"\n          -Force\n          -Verbose\n        displayName: 'Copy signed files into sources'\n        env:\n          ${{ if a.target }}:\n            TARGET_PATH: ${{ a.target }}\n          ${{ else }}:\n            TARGET_PATH: $(Pipeline.Workspace)\\${{ a.artifact }}\n\n  - script: |\n      call PCbuild\\find_python.bat \"%PYTHON%\"\n      call Tools\\msi\\get_externals.bat\n      call PCbuild\\find_msbuild.bat\n      echo ##vso[task.setvariable variable=PYTHON]%PYTHON%\n      echo ##vso[task.setvariable variable=MSBUILD]%MSBUILD%\n      %PYTHON% -m ensurepip\n    displayName: 'Get external dependencies'\n    env:\n      PYTHON: $(Build.BinariesDirectory)\\win32\\python.exe\n      PYTHONHOME: $(Build.SourcesDirectory)\n\n  - powershell: |\n      (\"/p:Py_OutDir=\" + $env:BUILD_BINARIESDIRECTORY) | Out-File msbuild.rsp -Encoding UTF8\n      \"/p:BuildForRelease=true\" | Out-File msbuild.rsp -Append -Encoding UTF8\n      Write-Host \"##vso[task.setvariable variable=ResponseFile]$(gi msbuild.rsp)\"\n      gc msbuild.rsp\n    displayName: 'Generate response file'\n\n  - ${{ if eq(parameters.DoFreethreaded, 'true') }}:\n    - powershell: |\n        \"/p:IncludeFreethreaded=true\" | Out-File \"$(ResponseFile)\" -Append -Encoding UTF8\n        gc \"$(ResponseFile)\"\n      displayName: 'Add Include_freethreaded to response file'\n\n  - ${{ if parameters.SigningCertificate }}:\n    - template: sign-files.yml\n      parameters:\n        Include: ''\n        ExportCommand: SignCommand\n        SigningCertificate: ${{ parameters.SigningCertificate }}\n        # WiX never moved on from signtool.exe, so we'll use that here\n        InstallLegacyTool: true\n\n    - powershell: |\n        $cmd = $env:SignCommand -replace '\"', '\\\"'\n        \"/p:_SignCommand=\"\"$cmd\"\"\" | Out-File $env:ResponseFile -Append -Encoding UTF8\n      displayName: 'Inject sign command into response file'\n      env:\n        SignCommand: $(SignCommand)\n        ResponseFile: $(ResponseFile)\n\n  - script: |\n      %MSBUILD% Tools\\msi\\launcher\\launcher.wixproj \"@$(ResponseFile)\"\n    displayName: 'Build launcher installer'\n    env:\n      Platform: x86\n      # Only need the variable here for msi.props to detect\n      SigningCertificate: ${{ parameters.SigningCertificate }}\n      ${{ if parameters.SigningCertificate }}:\n        AZURE_TENANT_ID: $(TrustedSigningTenantId)\n        AZURE_CLIENT_ID: $(TrustedSigningClientId)\n        AZURE_CLIENT_SECRET: $(TrustedSigningClientSecret)\n\n  - ${{ each b in parameters.Bundles }}:\n    - script: |\n        %MSBUILD% Tools\\msi\\bundle\\releaselocal.wixproj /t:Rebuild /p:RebuildAll=true \"@$(ResponseFile)\"\n      displayName: 'Build ${{ b.bundle }} installer'\n      env:\n        Platform: ${{ b.Platform }}\n        PYTHON: ${{ b.PythonForBuild }}\\python.exe\n        PythonForBuild: ${{ b.PythonForBuild }}\\python.exe\n        PYTHONHOME: $(Build.SourcesDirectory)\n        ${{ if b.TclTkArtifact }}:\n          TclTkLibraryDir: $(Pipeline.Workspace)\\${{ b.TclTkArtifact }}\n        # Only need the variable here for msi.props to detect\n        SigningCertificate: ${{ parameters.SigningCertificate }}\n        ${{ if parameters.SigningCertificate }}:\n          AZURE_TENANT_ID: $(TrustedSigningTenantId)\n          AZURE_CLIENT_ID: $(TrustedSigningClientId)\n          AZURE_CLIENT_SECRET: $(TrustedSigningClientSecret)\n\n  - powershell: |\n      del $env:ResponseFile -ErrorAction Continue\n    displayName: 'Remove response file (always runs)'\n    condition: ne(variables['ResponseFile'], '')\n    env:\n      ResponseFile: $(ResponseFile)\n\n  - ${{ each b in parameters.Bundles }}:\n    - task: CopyFiles@2\n      displayName: 'Assemble artifact: msi (${{ b.bundle }})'\n      inputs:\n        sourceFolder: $(Build.BinariesDirectory)\\${{ b.bundle }}\\en-us\n        targetFolder: $(Build.ArtifactStagingDirectory)\\msi\\${{ b.bundle }}\n        contents: |\n          *.msi\n          *.cab\n          *.exe\n\n  - powershell: |\n      git clone $(Build.Repository.Uri) -b $(Build.SourceBranchName) --single-branch --no-checkout \"$(Pipeline.Workspace)\\release-tools\"\n      git -C \"$(Pipeline.Workspace)\\release-tools\" checkout $(Build.SourceVersion)\n    displayName: 'Clone the python/release-tools repository'\n\n  - powershell: >\n      & $(${env:Python}.Trim('\"'))\n      \"$(Pipeline.Workspace)\\release-tools\\sbom.py\"\n      \"--cpython-source-dir=$(Build.SourcesDirectory)\"\n      $(gci -r \"$(Build.ArtifactStagingDirectory)\\msi\\**\\python-*.exe\")\n    workingDirectory: $(Build.BinariesDirectory)\n    condition: and(succeeded(), not(variables['SkipSBOM']))\n    displayName: 'Create SBOMs for binaries'\n    env:\n      PYTHON: $(Build.BinariesDirectory)\\win32\\python.exe\n      PYTHONHOME: $(Build.SourcesDirectory)\n\n  - task: CopyFiles@2\n    displayName: 'Layout Artifact: sbom'\n    condition: and(succeeded(), not(variables['SkipSBOM']))\n    inputs:\n      sourceFolder: $(Build.ArtifactStagingDirectory)\\msi\n      targetFolder: $(Build.ArtifactStagingDirectory)\\sbom\n      flatten: true\n      flattenFolders: true\n      contents: |\n        **\\*.spdx.json\n\n  - publish: '$(Build.ArtifactStagingDirectory)\\msi'\n    artifact: msi\n    displayName: 'Publish MSI'\n\n  - task: PublishBuildArtifacts@1\n    displayName: 'Publish Artifact: sbom'\n    condition: and(succeeded(), not(variables['SkipSBOM']))\n    inputs:\n      PathtoPublish: '$(Build.ArtifactStagingDirectory)\\sbom'\n      ArtifactName: sbom\n"
  },
  {
    "path": "windows-release/openssl-build.yml",
    "content": "parameters:\n- name: SourceTag\n  displayName: 'OpenSSL Source Tag'\n  type: string\n- name: SigningCertificate\n  displayName: \"Code signing certificate\"\n  type: string\n  default: 'PythonSoftwareFoundation'\n  values:\n  - 'PythonSoftwareFoundation'\n  - 'TestSign'\n  - 'Unsigned'\n- name: SourcesRepo\n  displayName: 'Sources Repository'\n  type: string\n  default: 'https://github.com/python/cpython-source-deps'\n- name: CustomBuildVM\n  displayName: 'Custom build VM'\n  type: boolean\n  default: false\n- name: VCVarsOptions\n  displayName: 'vcvarsall.bat options'\n  type: string\n  default: '(none)'\n- name: IncludeX86\n  displayName: 'Include x86'\n  type: boolean\n  default: true\n- name: IncludeX64\n  displayName: 'Include x64'\n  type: boolean\n  default: true\n- name: IncludeARM64\n  displayName: 'Include ARM64'\n  type: boolean\n  default: true\n\n\nname: ${{ parameters.SourceTag }}_$(Date:yyyyMMdd)$(Rev:.rr)\n\n\nvariables:\n- ${{ if eq(parameters.SigningCertificate, 'PythonSoftwareFoundation') }}:\n  - group: CPythonSign\n- ${{ if eq(parameters.SigningCertificate, 'TestSign') }}:\n  - group: CPythonTestSign\n- name: IntDir\n  value: '$(Build.BinariesDirectory)'\n- name: OutDir\n  value: '$(Build.ArtifactStagingDirectory)'\n- name: SigningDescription\n  value: 'OpenSSL for Python (${{ parameters.SourceTag }})'\n\n\njobs:\n- job: Build_SSL\n  displayName: OpenSSL\n  pool:\n    ${{ if eq(parameters.CustomBuildVM, 'true') }}:\n      name: 'Windows Release'\n    ${{ else }}:\n      vmImage: windows-latest\n\n  strategy:\n    matrix:\n      ${{ if eq(parameters.IncludeX86, 'true') }}:\n        win32:\n          Platform: 'win32'\n          VCPlatform: 'amd64_x86'\n          OpenSSLPlatform: 'VC-WIN32 no-asm'\n      ${{ if eq(parameters.IncludeX64, 'true') }}:\n        amd64:\n          Platform: 'amd64'\n          VCPlatform: 'amd64'\n          OpenSSLPlatform: 'VC-WIN64A-masm'\n      ${{ if eq(parameters.IncludeARM64, 'true') }}:\n        arm64:\n          Platform: 'arm64'\n          VCPlatform: 'amd64_arm64'\n          OpenSSLPlatform: 'VC-WIN64-ARM'\n\n  workspace:\n    clean: all\n\n  steps:\n    - checkout: none\n\n    - template: ./find-tools.yml\n\n    - powershell: |\n        git clone ${{ parameters.SourcesRepo }} -b ${{ parameters.SourceTag }} --depth 1 .\n      displayName: 'Check out OpenSSL sources'\n\n    - script: |\n        call \"$(vcvarsall)\" $(VCPlatform) %EXTRA_OPTS%\n        perl \"$(Build.SourcesDirectory)\\Configure\" $(OpenSSLPlatform) no-uplink\n        nmake\n      workingDirectory: '$(IntDir)'\n      displayName: 'Build OpenSSL'\n      env:\n        ${{ if ne(parameters.VCVarsOptions, '(none)') }}:\n          EXTRA_OPTS: ${{ parameters.VCVarsOptions }}\n\n    - ${{ if ne(parameters.SigningCertificate, 'Unsigned') }}:\n      - template: sign-files.yml\n        parameters:\n          Include: 'lib*.dll'\n          WorkingDir: $(IntDir)\n          SigningCertificate: ${{ parameters.SigningCertificate }}\n\n    - task: CopyFiles@2\n      displayName: 'Copy built libraries for upload'\n      inputs:\n        SourceFolder: '$(IntDir)'\n        Contents: |\n          lib*.dll\n          lib*.pdb\n          lib*.lib\n          include\\openssl\\*.h\n        TargetFolder: '$(OutDir)'\n\n    - task: CopyFiles@2\n      displayName: 'Copy header files for upload'\n      inputs:\n        SourceFolder: '$(Build.SourcesDirectory)'\n        Contents: |\n          include\\openssl\\*\n        TargetFolder: '$(OutDir)'\n\n    - task: CopyFiles@2\n      displayName: 'Copy applink files for upload'\n      inputs:\n        SourceFolder: '$(Build.SourcesDirectory)\\ms'\n        Contents: applink.c\n        TargetFolder: '$(OutDir)\\include'\n\n    - task: CopyFiles@2\n      displayName: 'Copy LICENSE for upload'\n      inputs:\n        SourceFolder: '$(Build.SourcesDirectory)'\n        Contents: |\n          LICENSE\n          LICENSE.txt\n        TargetFolder: '$(OutDir)'\n\n    - publish: '$(OutDir)'\n      artifact: '$(Platform)'\n      displayName: 'Publishing $(Platform)'\n"
  },
  {
    "path": "windows-release/purge.py",
    "content": "# Purges the Fastly cache for Windows download files\n#\n# Usage:\n#   py -3 purge.py 3.5.1rc1\n#\n\n__author__ = \"Steve Dower <steve.dower@python.org>\"\n__version__ = \"1.0.0\"\n\nimport re\nimport sys\nfrom urllib.request import Request, urlopen\n\nVERSION_RE = re.compile(r\"(\\d+\\.\\d+\\.\\d+)([A-Za-z_]+\\d+)?$\")\n\ntry:\n    m = VERSION_RE.match(sys.argv[1])\n    if not m:\n        print(\"Invalid version:\", sys.argv[1])\n        print('Expected something like \"3.5.1rc1\"')\n        sys.exit(1)\nexcept LookupError:\n    print('Missing version argument. Expected something like \"3.5.1rc1\"')\n    sys.exit(1)\n\nURL = f\"https://www.python.org/ftp/python/{m.group(1)}/\"\nREL = m.group(2) or \"\"\n\nFILES = [\n    \"core.msi\",\n    \"core_d.msi\",\n    \"core_pdb.msi\",\n    \"dev.msi\",\n    \"dev_d.msi\",\n    \"doc.msi\",\n    \"exe.msi\",\n    \"exe_d.msi\",\n    \"exe_pdb.msi\",\n    \"freethreaded.msi\",\n    \"freethreaded_d.msi\",\n    \"freethreaded_pdb.msi\",\n    \"launcher.msi\",\n    \"lib.msi\",\n    \"lib_d.msi\",\n    \"lib_pdb.msi\",\n    \"path.msi\",\n    \"pip.msi\",\n    \"tcltk.msi\",\n    \"tcltk_d.msi\",\n    \"tcltk_pdb.msi\",\n    \"test.msi\",\n    \"test_d.msi\",\n    \"test_pdb.msi\",\n    \"tools.msi\",\n    \"ucrt.msi\",\n]\nPATHS = [\n    f\"python-{m.group(0)}.exe\",\n    f\"python-{m.group(0)}-webinstall.exe\",\n    f\"python-{m.group(0)}-amd64.exe\",\n    f\"python-{m.group(0)}-amd64-webinstall.exe\",\n    f\"python-{m.group(0)}-arm64.exe\",\n    f\"python-{m.group(0)}-arm64-webinstall.exe\",\n    f\"python-{m.group(0)}-embed-amd64.zip\",\n    f\"python-{m.group(0)}-embed-win32.zip\",\n    f\"python-{m.group(0)}-embed-arm64.zip\",\n    *[f\"win32{REL}/{f}\" for f in FILES],\n    *[f\"amd64{REL}/{f}\" for f in FILES],\n    *[f\"arm64{REL}/{f}\" for f in FILES],\n]\nPATHS = PATHS + [p + \".asc\" for p in PATHS]\n\nprint(\"Purged:\")\nfor n in PATHS:\n    u = URL + n\n    with urlopen(Request(u, method=\"PURGE\", headers={\"Fastly-Soft-Purge\": 1})) as r:\n        r.read()\n    print(\"  \", u)\n"
  },
  {
    "path": "windows-release/sign-files.yml",
    "content": "parameters:\n  Include: '*'\n  Exclude: ''\n  Filter: ''\n  WorkingDir: '$(Build.BinariesDirectory)'\n  ExtractDir: ''\n  SigningCertificate: ''\n  ExportCommand: ''\n  ExportLegacyCommand: ''\n  ContinueOnError: false\n  InstallTool: true\n  InstallLegacyTool: false\n  AzureServiceConnectionName: 'Python Signing'\n\nsteps:\n- ${{ if and(parameters.SigningCertificate, ne(parameters.SigningCertificate, 'Unsigned')) }}:\n  - ${{ if eq(parameters.InstallTool, 'true') }}:\n    - powershell: |\n        # Install sign tool\n        dotnet tool install --global --prerelease sign\n        $signtool = (gcm sign -EA SilentlyContinue).Source\n        if (-not $signtool) {\n          $signtool = (gi \"${env:USERPROFILE}\\.dotnet\\tools\\sign.exe\").FullName\n        }\n        $signargs = 'code artifact-signing -v Information ' + `\n          '-fd sha256 -t http://timestamp.acs.microsoft.com -td sha256 ' + `\n          \"-ase \"\"${env:ASE}\"\" -asa \"\"${env:ASA}\"\" -ascp \"\"${env:ASCP}\"\" \" + `\n          \"-act azure-cli -d \"\"${env:DESCRIPTION}\"\"\"\n\n        Write-Host \"##vso[task.setvariable variable=__TrustedSigningCmd]$signtool\"\n        Write-Host \"##vso[task.setvariable variable=__TrustedSigningArgs]$signargs\"\n        if ($env:EXPORT_COMMAND) {\n          $signcmd = \"\"\"$signtool\"\" $signargs\"\n          Write-Host \"##vso[task.setvariable variable=${env:EXPORT_COMMAND}]$signcmd\"\n        }\n      workingDirectory: $(Build.BinariesDirectory)\n      displayName: 'Install Azure Artifact Signing tools'\n      env:\n        ASE: $(TrustedSigningUri)\n        ASA: $(TrustedSigningAccount)\n        ASCP: $(TrustedSigningCertificateName)\n        DESCRIPTION: $(SigningDescription)\n        EXPORT_COMMAND: ${{ parameters.ExportCommand }}\n\n  - ${{ if eq(parameters.InstallLegacyTool, 'true') }}:\n    - powershell: |\n        git clone https://github.com/python/cpython-bin-deps --revision fb06137dccc43ed5b030cdd9e3560990b37f39da --depth 1 --progress -v \"signtool\"\n\n        $signtool = gi signtool\\x64\\signtool.exe\n        $dlib = gi signtool\\azure_trusted_signing\\x64\\Azure.CodeSigning.Dlib.dll\n        Write-Host \"##vso[task.setvariable variable=MAKECAT]$(gi signtool\\x64\\makecat.exe)\"\n\n        ConvertTo-Json @{\n            Endpoint=$env:ASE;\n            CodeSigningAccountName=$env:ASA;\n            CertificateProfileName=$env:ASCP;\n            # Only allow Azure CLI credentials\n            ExcludeCredentials=@(\n                \"ManagedIdentityCredential\",\n                \"WorkloadIdentityCredential\",\n                \"SharedTokenCacheCredential\",\n                \"EnvironmentCredential\",\n                \"VisualStudioCredential\",\n                \"VisualStudioCodeCredential\",\n                \"AzurePowerShellCredential\",\n                \"AzureDeveloperCliCredential\",\n                \"InteractiveBrowserCredential\"\n            );\n        } | Out-File signtool\\metadata.json -Encoding ascii\n        Write-Host \"##vso[task.setvariable variable=SIGNTOOL_METADATA]$(gi signtool\\metadata.json)\"\n\n        $signargs = 'sign /v /fd sha256 /tr http://timestamp.acs.microsoft.com /td sha256 ' + `\n                    \"/dlib \"\"$dlib\"\" /dmdf \"\"$(gi signtool\\metadata.json)\"\" \" + `\n                    \"/d \"\"${env:DESCRIPTION}\"\"\"\n        Write-Host \"##vso[task.setvariable variable=__TrustedSigningCmd]$signtool\"\n        Write-Host \"##vso[task.setvariable variable=__TrustedSigningArgs]$signargs\"\n\n        if ($env:EXPORT_COMMAND) {\n          $signcmd = \"\"\"$signtool\"\" $signargs\"\n          Write-Host \"##vso[task.setvariable variable=${env:EXPORT_COMMAND}]$signcmd\"\n        }\n      workingDirectory: $(Pipeline.Workspace)\n      displayName: 'Download signtool binaries'\n      env:\n        ASE: $(TrustedSigningUri)\n        ASA: $(TrustedSigningAccount)\n        ASCP: $(TrustedSigningCertificateName)\n        DESCRIPTION: $(SigningDescription)\n        EXPORT_COMMAND: ${{ parameters.ExportLegacyCommand }}\n\n  - ${{ if parameters.AzureServiceConnectionName }}:\n    # We sign in once with the AzureCLI task, as it uses OIDC to obtain a\n    # temporary token. But the task also logs out, and so we save the token and\n    # use it to log in persistently (for the rest of the build).\n    - task: AzureCLI@2\n      displayName: 'Authenticate signing tools (1/2)'\n      inputs:\n        azureSubscription: ${{ parameters.AzureServiceConnectionName }}\n        scriptType: 'ps'\n        scriptLocation: 'inlineScript'\n        inlineScript: |\n          \"##vso[task.setvariable variable=__AZURE_CLIENT_ID;issecret=true]${env:servicePrincipalId}\"\n          \"##vso[task.setvariable variable=__AZURE_ID_TOKEN;issecret=true]${env:idToken}\"\n          \"##vso[task.setvariable variable=__AZURE_TENANT_ID;issecret=true]${env:tenantId}\"\n        addSpnToEnvironment: true\n\n    - powershell: >\n        az login --service-principal\n        -u $(__AZURE_CLIENT_ID)\n        --tenant $(__AZURE_TENANT_ID)\n        --allow-no-subscriptions\n        --federated-token $(__AZURE_ID_TOKEN)\n      displayName: 'Authenticate signing tools (2/2)'\n\n  - ${{ if parameters.Include }}:\n    - powershell: |\n        if ($env:EXCLUDE) {\n          $files = (dir ($env:INCLUDE -split ',').Trim() -Exclude ($env:EXCLUDE -split ',').Trim() -File)\n        } else {\n          $files = (dir ($env:INCLUDE -split ',').Trim() -File)\n        }\n        if ($env:FILTER) {\n          ($env:FILTER -split ';').Trim() -join \"`n\" | Out-File __filelist.txt -Encoding utf8\n        } else {\n          \"*\" | Out-File __filelist.txt -Encoding utf8\n        }\n        foreach ($f in $files) {\n          & $env:TRUSTED_SIGNING_CMD @(-split $env:TRUSTED_SIGNING_ARGS) -fl __filelist.txt $f\n          if (-not $?) { exit $LASTEXITCODE }\n        }\n        del __filelist.txt\n      displayName: 'Sign binaries'\n      ${{ if eq(parameters.ContinueOnError, 'false') }}:\n        retryCountOnTaskFailure: 3\n      ${{ else }}:\n        continueOnError: true\n      workingDirectory: ${{ parameters.WorkingDir }}\n      env:\n        INCLUDE: ${{ parameters.Include }}\n        EXCLUDE: ${{ parameters.Exclude }}\n        TRUSTED_SIGNING_CMD: $(__TrustedSigningCmd)\n        TRUSTED_SIGNING_ARGS: $(__TrustedSigningArgs)\n        ${{ if parameters.Filter }}:\n          FILTER: ${{ parameters.Filter }}\n\n\n  - ${{ if parameters.ExtractDir }}:\n    - powershell: |\n        if ($env:EXCLUDE) {\n          $files = (dir ($env:INCLUDE -split ',').Trim() -Exclude ($env:EXCLUDE -split ',').Trim() -File)\n        } else {\n          $files = (dir ($env:INCLUDE -split ',').Trim() -File)\n        }\n        $c = $files | %{ (Get-AuthenticodeSignature $_).SignerCertificate } | ?{ $_ -ne $null } | select -First 1\n        if (-not $c) {\n          Write-Host \"Failed to find certificate for ${{ parameters.SigningCertificate }}\"\n          exit\n        }\n\n        $d = mkdir $env:EXTRACT_DIR -Force\n        $cf = \"$d\\cert.cer\"\n        [IO.File]::WriteAllBytes($cf, $c.RawData)\n        $csha = (Get-FileHash $cf -Algorithm SHA256).Hash.ToLower()\n\n        $info = @{ Subject=$c.Subject; SHA256=$csha; }\n        $info | ConvertTo-JSON -Compress | Out-File -Encoding utf8 \"$d\\certinfo.json\"\n      displayName: \"Extract certificate info\"\n      workingDirectory: ${{ parameters.WorkingDir }}\n      env:\n        INCLUDE: ${{ parameters.Include }}\n        EXCLUDE: ${{ parameters.Exclude }}\n        EXTRACT_DIR: ${{ parameters.ExtractDir }}\n"
  },
  {
    "path": "windows-release/stage-build.yml",
    "content": "parameters:\n  DoPGO: false\n  # DoPGOARM64 only applies if DoPGO is also true\n  DoPGOARM64: true\n  DoFreethreaded: false\n  ToBeSigned: false\n  ExtraOptions: ''\n  ExtraOptionsFreethreaded: '--disable-gil'\n  TailCallingOption: ''\n  Post315OutputDir: false\n\njobs:\n- job: Build_Docs\n  displayName: Docs build\n  dependsOn: []\n\n  workspace:\n    clean: all\n\n  steps:\n  - template: ./checkout.yml\n\n  - script: Doc\\make.bat html\n    displayName: 'Build HTML docs'\n    env:\n      BUILDDIR: $(Build.BinariesDirectory)\\Doc\n\n  - task: CopyFiles@2\n    displayName: 'Assemble artifact: Doc'\n    inputs:\n      sourceFolder: $(Build.BinariesDirectory)\\Doc\n      targetFolder: $(Build.ArtifactStagingDirectory)\\Doc\n      contents: |\n        html\\**\\*\n\n  - publish: $(Build.ArtifactStagingDirectory)\\Doc\n    artifact: doc\n    displayName: 'Publish artifact: doc'\n\n\n- job: Build_Python\n  displayName: Python build\n  dependsOn: []\n\n  workspace:\n    clean: all\n\n  strategy:\n    matrix:\n      win32:\n        Name: win32\n        Arch: win32\n        ArchDir: win32\n        Platform: x86\n        Configuration: Release\n        _HostPython: .\\python\n        ExtraOptions: ${{ parameters.ExtraOptions }}\n        ${{ if eq(parameters.ToBeSigned, 'true') }}:\n          Artifact: unsigned_win32\n        ${{ else }}:\n          Artifact: bin_win32\n      win32_d:\n        Name: win32_d\n        Arch: win32\n        ArchDir: win32\n        Platform: x86\n        Configuration: Debug\n        _HostPython: .\\python\n        ExtraOptions: ${{ parameters.ExtraOptions }}\n        Artifact: bin_win32_d\n      ${{ if ne(parameters.DoPGO, 'true') }}:\n        amd64:\n          Name: amd64\n          Arch: amd64\n          ArchDir: amd64\n          Platform: x64\n          Configuration: Release\n          _HostPython: .\\python\n          ExtraOptions: ${{ parameters.ExtraOptions }} ${{ parameters.TailCallingOption }}\n          ${{ if eq(parameters.ToBeSigned, 'true') }}:\n            Artifact: unsigned_amd64\n          ${{ else }}:\n            Artifact: bin_amd64\n      amd64_d:\n        Name: amd64_d\n        Arch: amd64\n        ArchDir: amd64\n        Platform: x64\n        Configuration: Debug\n        _HostPython: .\\python\n        ExtraOptions: ${{ parameters.ExtraOptions }}\n        Artifact: bin_amd64_d\n      ${{ if or(ne(parameters.DoPGO, 'true'), ne(parameters.DoPGOARM64, 'true')) }}:\n        arm64:\n          Name: arm64\n          Arch: arm64\n          ArchDir: arm64\n          Platform: ARM64\n          Configuration: Release\n          _HostPython: python\n          ExtraOptions: ${{ parameters.ExtraOptions }}\n          ${{ if eq(parameters.ToBeSigned, 'true') }}:\n            Artifact: unsigned_arm64\n          ${{ else }}:\n            Artifact: bin_arm64\n      arm64_d:\n        Name: arm64_d\n        Arch: arm64\n        ArchDir: arm64\n        Platform: ARM64\n        Configuration: Debug\n        _HostPython: python\n        ExtraOptions: ${{ parameters.ExtraOptions }}\n        Artifact: bin_arm64_d\n      ${{ if eq(parameters.DoFreethreaded, 'true') }}:\n        win32_t:\n          Name: win32_t\n          Arch: win32\n          ArchDir: ${{ iif(eq(parameters.Post315OutputDir, 'true'), 'win32t', 'win32') }}\n          Platform: x86\n          Configuration: Release\n          _HostPython: .\\python\n          ExtraOptions: ${{ parameters.ExtraOptionsFreethreaded }}\n          ${{ if eq(parameters.ToBeSigned, 'true') }}:\n            Artifact: unsigned_win32_t\n          ${{ else }}:\n            Artifact: bin_win32_t\n        win32_td:\n          Name: win32_td\n          Arch: win32\n          ArchDir: ${{ iif(eq(parameters.Post315OutputDir, 'true'), 'win32t', 'win32') }}\n          Platform: x86\n          Configuration: Debug\n          _HostPython: .\\python\n          ExtraOptions: ${{ parameters.ExtraOptionsFreethreaded }}\n          Artifact: bin_win32_td\n        ${{ if ne(parameters.DoPGO, 'true') }}:\n          amd64_t:\n            Name: amd64_t\n            Arch: amd64\n            ArchDir: ${{ iif(eq(parameters.Post315OutputDir, 'true'), 'amd64t', 'amd64') }}\n            Platform: x64\n            Configuration: Release\n            _HostPython: .\\python\n            ExtraOptions: ${{ parameters.ExtraOptionsFreethreaded }} ${{ parameters.TailCallingOption }}\n            ${{ if eq(parameters.ToBeSigned, 'true') }}:\n              Artifact: unsigned_amd64_t\n            ${{ else }}:\n              Artifact: bin_amd64_t\n        amd64_td:\n          Name: amd64_td\n          Arch: amd64\n          ArchDir: ${{ iif(eq(parameters.Post315OutputDir, 'true'), 'amd64t', 'amd64') }}\n          Platform: x64\n          Configuration: Debug\n          _HostPython: .\\python\n          ExtraOptions: ${{ parameters.ExtraOptionsFreethreaded }}\n          Artifact: bin_amd64_td\n        ${{ if or(ne(parameters.DoPGO, 'true'), ne(parameters.DoPGOARM64, 'true')) }}:\n          arm64_t:\n            Name: arm64_t\n            Arch: arm64\n            ArchDir: ${{ iif(eq(parameters.Post315OutputDir, 'true'), 'arm64t', 'arm64') }}\n            Platform: ARM64\n            Configuration: Release\n            _HostPython: python\n            ExtraOptions: ${{ parameters.ExtraOptionsFreethreaded }}\n            ${{ if eq(parameters.ToBeSigned, 'true') }}:\n              Artifact: unsigned_arm64_t\n            ${{ else }}:\n              Artifact: bin_arm64_t\n        arm64_td:\n          Name: arm64_td\n          Arch: arm64\n          ArchDir: ${{ iif(eq(parameters.Post315OutputDir, 'true'), 'arm64t', 'arm64') }}\n          Platform: ARM64\n          Configuration: Debug\n          _HostPython: python\n          ExtraOptions: ${{ parameters.ExtraOptionsFreethreaded }}\n          Artifact: bin_arm64_td\n\n  steps:\n    - template: ./build-steps.yml\n\n\n- ${{ if eq(parameters.DoPGO, 'true') }}:\n  - job: Build_Python_PGO_Native\n    displayName: Python PGO build\n    dependsOn: []\n\n    workspace:\n      clean: all\n\n    strategy:\n      matrix:\n        amd64:\n          Name: amd64\n          Arch: amd64\n          ArchDir: amd64\n          Platform: x64\n          _HostPython: .\\python\n          PythonExePattern: python.exe\n          ExtraOptions: ${{ parameters.ExtraOptions }} ${{ parameters.TailCallingOption }}\n          ${{ if eq(parameters.ToBeSigned, 'true') }}:\n            Artifact: unsigned_amd64\n          ${{ else }}:\n            Artifact: bin_amd64\n        ${{ if eq(parameters.DoFreethreaded, 'true') }}:\n          amd64_t:\n            Name: amd64_t\n            Arch: amd64\n            ArchDir: ${{ iif(eq(parameters.Post315OutputDir, 'true'), 'amd64t', 'amd64') }}\n            Platform: x64\n            _HostPython: .\\python\n            PythonExePattern: ${{ iif(eq(parameters.Post315OutputDir, 'true'), 'python.exe', 'python3*t.exe') }}\n            ExtraOptions: ${{ parameters.ExtraOptionsFreethreaded }} ${{ parameters.TailCallingOption }}\n            ${{ if eq(parameters.ToBeSigned, 'true') }}:\n              Artifact: unsigned_amd64_t\n            ${{ else }}:\n              Artifact: bin_amd64_t\n\n    steps:\n      - template: ./build-steps-pgo.yml\n        parameters:\n          PGInstrument: true\n          PGRun: true\n          PGUpdate: true\n\n  - ${{ if eq(parameters.DoPGOARM64, 'true') }}:\n    - job: Build_Python_PGO_1\n      displayName: Python PGO build\n      dependsOn: []\n\n      workspace:\n        clean: all\n\n      variables:\n        Platform: ARM64\n        _HostPython: python\n\n      strategy:\n        matrix:\n          arm64:\n            Name: arm64\n            Arch: arm64\n            ArchDir: arm64\n            PythonExePattern: python.exe\n            ExtraOptions: ${{ parameters.ExtraOptions }}\n            ${{ if eq(parameters.ToBeSigned, 'true') }}:\n              Artifact: unsigned_arm64\n            ${{ else }}:\n              Artifact: bin_arm64\n          ${{ if eq(parameters.DoFreethreaded, 'true') }}:\n            arm64_t:\n              Name: arm64_t\n              Arch: arm64\n              ArchDir: ${{ iif(eq(parameters.Post315OutputDir, 'true'), 'arm64t', 'arm64') }}\n              PythonExePattern: ${{ iif(eq(parameters.Post315OutputDir, 'true'), 'python.exe', 'python3*t.exe') }}\n              ExtraOptions: ${{ parameters.ExtraOptionsFreethreaded }}\n              ${{ if eq(parameters.ToBeSigned, 'true') }}:\n                Artifact: unsigned_arm64_t\n              ${{ else }}:\n                Artifact: bin_arm64_t\n\n      steps:\n        - template: ./build-steps-pgo.yml\n          parameters:\n            PGInstrument: true\n\n    - job: Build_Python_PGO_2\n      displayName: Collect PGO profile\n      dependsOn: Build_Python_PGO_1\n\n      # Allow up to five hours for PGO run\n      timeoutInMinutes: 300\n\n      pool:\n        name: 'Windows ARM64'\n\n      workspace:\n        clean: all\n\n      variables:\n        Platform: ARM64\n\n      strategy:\n        matrix:\n          arm64:\n            Name: arm64\n            Arch: arm64\n            ArchDir: arm64\n            PythonExePattern: python.exe\n            ${{ if eq(parameters.ToBeSigned, 'true') }}:\n              Artifact: unsigned_arm64\n            ${{ else }}:\n              Artifact: bin_arm64\n          ${{ if eq(parameters.DoFreethreaded, 'true') }}:\n            arm64_t:\n              Name: arm64_t\n              Arch: arm64\n              ArchDir: ${{ iif(eq(parameters.Post315OutputDir, 'true'), 'arm64t', 'arm64') }}\n              PythonExePattern: ${{ iif(eq(parameters.Post315OutputDir, 'true'), 'python.exe', 'python3*t.exe') }}\n              ${{ if eq(parameters.ToBeSigned, 'true') }}:\n                Artifact: unsigned_arm64_t\n              ${{ else }}:\n                Artifact: bin_arm64_t\n\n      steps:\n        - template: ./build-steps-pgo.yml\n          parameters:\n            PGRun: true\n\n    - job: Build_Python_PGO_3\n      displayName: Merge PGO profile\n      dependsOn: Build_Python_PGO_2\n\n      workspace:\n        clean: all\n\n      variables:\n        Platform: ARM64\n        _HostPython: python\n\n      strategy:\n        matrix:\n          arm64:\n            Name: arm64\n            Arch: arm64\n            ArchDir: arm64\n            PythonExePattern: python.exe\n            ExtraOptions: ${{ parameters.ExtraOptions }}\n            ${{ if eq(parameters.ToBeSigned, 'true') }}:\n              Artifact: unsigned_arm64\n            ${{ else }}:\n              Artifact: bin_arm64\n          ${{ if eq(parameters.DoFreethreaded, 'true') }}:\n            arm64_t:\n              Name: arm64_t\n              Arch: arm64\n              ArchDir: ${{ iif(eq(parameters.Post315OutputDir, 'true'), 'arm64t', 'arm64') }}\n              PythonExePattern: ${{ iif(eq(parameters.Post315OutputDir, 'true'), 'python.exe', 'python3*t.exe') }}\n              ExtraOptions: ${{ parameters.ExtraOptionsFreethreaded }}\n              ${{ if eq(parameters.ToBeSigned, 'true') }}:\n                Artifact: unsigned_arm64_t\n              ${{ else }}:\n                Artifact: bin_arm64_t\n\n      steps:\n        - template: ./build-steps-pgo.yml\n          parameters:\n            PGUpdate: true\n\n\n- job: TclTk_Lib\n  displayName: Publish Tcl/Tk Library\n  dependsOn: []\n\n  variables:\n    IncludeLLVM: false\n\n  workspace:\n    clean: all\n\n  steps:\n  - template: ./checkout.yml\n\n  - script: PCbuild\\get_externals.bat --no-openssl --no-libffi\n    displayName: 'Get external dependencies'\n\n  - task: MSBuild@1\n    displayName: 'Copy Tcl/Tk lib for publish'\n    inputs:\n      solution: PCbuild\\tcltk.props\n      platform: x86\n      msbuildArguments: /t:CopyTclTkLib /p:OutDir=\"$(Build.ArtifactStagingDirectory)\\tcl_win32\"\n\n  - task: MSBuild@1\n    displayName: 'Copy Tcl/Tk lib for publish'\n    inputs:\n      solution: PCbuild\\tcltk.props\n      platform: x64\n      msbuildArguments: /t:CopyTclTkLib /p:OutDir=\"$(Build.ArtifactStagingDirectory)\\tcl_amd64\"\n\n  - task: MSBuild@1\n    displayName: 'Copy Tcl/Tk lib for publish'\n    inputs:\n      solution: PCbuild\\tcltk.props\n      platform: ARM64\n      msbuildArguments: /t:CopyTclTkLib /p:OutDir=\"$(Build.ArtifactStagingDirectory)\\tcl_arm64\"\n\n  - publish: '$(Build.ArtifactStagingDirectory)\\tcl_win32'\n    artifact: tcltk_lib_win32\n    displayName: 'Publish artifact: tcltk_lib_win32'\n\n  - publish: '$(Build.ArtifactStagingDirectory)\\tcl_amd64'\n    artifact: tcltk_lib_amd64\n    displayName: 'Publish artifact: tcltk_lib_amd64'\n\n  - publish: '$(Build.ArtifactStagingDirectory)\\tcl_arm64'\n    artifact: tcltk_lib_arm64\n    displayName: 'Publish artifact: tcltk_lib_arm64'\n"
  },
  {
    "path": "windows-release/stage-layout-embed.yml",
    "content": "parameters:\n  BuildToPackage: current\n  SigningCertificate: ''\n\njobs:\n- job: Make_Embed_Layout\n  displayName: Make embeddable layout\n\n  workspace:\n    clean: all\n\n  variables:\n    PYTHONHOME: $(Build.SourcesDirectory)\n\n  strategy:\n    matrix:\n      win32:\n        Name: win32\n        Arch: win32\n      amd64:\n        Name: amd64\n        Arch: amd64\n      arm64:\n        Name: arm64\n        Arch: arm64\n        HostArch: amd64\n\n  steps:\n  - template: ./checkout.yml\n\n  - task: DownloadPipelineArtifact@2\n    displayName: 'Download artifact: bin_$(Name)'\n    inputs:\n      ${{ if eq(parameters.BuildToPackage, 'current') }}:\n        buildType: current\n      ${{ else }}:\n        buildType: specific\n        buildVersionToDownload: specific\n        project: $(resources.pipeline.build_to_package.projectId)\n        pipeline: $(resources.pipeline.build_to_package.pipelineId)\n        runId: $(resources.pipeline.build_to_package.runID)\n      artifact: bin_$(Name)\n      targetPath: $(Pipeline.Workspace)\\bin_$(Name)\n\n  - template: ./layout-command.yml\n    parameters:\n      BuildToPackage: ${{ parameters.BuildToPackage }}\n\n  - powershell: |\n      $d = (.\\PCbuild\\build.bat -V) | %{ if($_ -match '\\s+(\\w+):\\s*(.+)\\s*$') { @{$Matches[1] = $Matches[2];} }};\n      Write-Host \"##vso[task.setvariable variable=VersionText]$($d.PythonVersion)\"\n    displayName: 'Extract version numbers'\n\n  - powershell: >\n      $(LayoutCmd)\n      --copy \"$(Build.ArtifactStagingDirectory)\\layout\"\n      --zip \"$(Build.ArtifactStagingDirectory)\\embed\\python-$(VersionText)-embed-$(Name).zip\"\n      --preset-embed\n    displayName: 'Generate embeddable layout'\n\n  - powershell: |\n      git clone $(Build.Repository.Uri) -b $(Build.SourceBranchName) --single-branch --no-checkout \"$(Pipeline.Workspace)\\release-tools\"\n      git -C \"$(Pipeline.Workspace)\\release-tools\" checkout $(Build.SourceVersion)\n    displayName: 'Clone the python/release-tools repository'\n\n  - powershell: >\n      & \"$(Python)\"\n      \"$(Pipeline.Workspace)\\release-tools\\sbom.py\"\n      \"--cpython-source-dir=$(Build.SourcesDirectory)\"\n      \"$(Build.ArtifactStagingDirectory)\\embed\\python-$(VersionText)-embed-$(Name).zip\"\n    workingDirectory: $(Build.BinariesDirectory)\n    condition: and(succeeded(), not(variables['SkipSBOM']))\n    displayName: 'Create SBOMs for binaries'\n\n  - task: CopyFiles@2\n    displayName: 'Layout Artifact: sbom'\n    inputs:\n      sourceFolder: $(Build.ArtifactStagingDirectory)\\embed\n      targetFolder: $(Build.ArtifactStagingDirectory)\\sbom\n      flatten: true\n      contents: |\n        **\\*.spdx.json\n\n  - publish: '$(Build.ArtifactStagingDirectory)\\layout'\n    artifact: layout_embed_$(Name)\n    displayName: 'Publish Artifact: layout_embed_$(Name)'\n\n  - task: PublishBuildArtifacts@1\n    displayName: 'Publish Artifact: embed'\n    inputs:\n      PathtoPublish: $(Build.ArtifactStagingDirectory)\\embed\n      ArtifactName: embed\n\n  - task: PublishBuildArtifacts@1\n    displayName: 'Publish Artifact: sbom'\n    condition: and(succeeded(), not(variables['SkipSBOM']))\n    inputs:\n      PathtoPublish: $(Build.ArtifactStagingDirectory)\\sbom\n      ArtifactName: sbom\n"
  },
  {
    "path": "windows-release/stage-layout-full.yml",
    "content": "parameters:\n  BuildToPackage: current\n  DoFreethreaded: false\n  SigningCertificate: ''\n\njobs:\n- job: Make_Layouts\n  displayName: Make layouts\n\n  workspace:\n    clean: all\n\n  variables:\n    PYTHONHOME: $(Build.SourcesDirectory)\n\n  strategy:\n    matrix:\n      win32:\n        Name: win32\n        DebugName: win32_d\n        Arch: win32\n        TclLibrary: tcltk_lib_win32\n        ExtraOptions: ''\n      amd64:\n        Name: amd64\n        DebugName: amd64_d\n        Arch: amd64\n        TclLibrary: tcltk_lib_amd64\n        ExtraOptions: ''\n      arm64:\n        Name: arm64\n        DebugName: arm64_d\n        Arch: arm64\n        HostArch: amd64\n        TclLibrary: tcltk_lib_arm64\n        ExtraOptions: ''\n      ${{ if eq(parameters.DoFreethreaded, 'true') }}:\n        win32_t:\n          Name: win32_t\n          DebugName: win32_td\n          Arch: win32\n          HostArch: win32\n          TclLibrary: tcltk_lib_win32\n          ExtraOptions: --include-freethreaded\n        amd64_t:\n          Name: amd64_t\n          DebugName: amd64_td\n          Arch: amd64\n          HostArch: amd64\n          TclLibrary: tcltk_lib_amd64\n          ExtraOptions: --include-freethreaded\n        arm64_t:\n          Name: arm64_t\n          DebugName: arm64_td\n          Arch: arm64\n          HostArch: amd64\n          TclLibrary: tcltk_lib_arm64\n          ExtraOptions: --include-freethreaded\n\n  steps:\n  - template: ./checkout.yml\n\n  - task: DownloadPipelineArtifact@2\n    displayName: 'Download artifact: bin_$(Name)'\n    inputs:\n      ${{ if eq(parameters.BuildToPackage, 'current') }}:\n        buildType: current\n      ${{ else }}:\n        buildType: specific\n        buildVersionToDownload: specific\n        project: $(resources.pipeline.build_to_package.projectId)\n        pipeline: $(resources.pipeline.build_to_package.pipelineId)\n        runId: $(resources.pipeline.build_to_package.runID)\n      artifact: bin_$(Name)\n      targetPath: $(Pipeline.Workspace)\\bin_$(Name)\n\n  - task: DownloadPipelineArtifact@2\n    displayName: 'Download artifact: bin_$(DebugName)'\n    inputs:\n      ${{ if eq(parameters.BuildToPackage, 'current') }}:\n        buildType: current\n      ${{ else }}:\n        buildType: specific\n        buildVersionToDownload: specific\n        project: $(resources.pipeline.build_to_package.projectId)\n        pipeline: $(resources.pipeline.build_to_package.pipelineId)\n        runId: $(resources.pipeline.build_to_package.runID)\n      artifact: bin_$(DebugName)\n      targetPath: $(Pipeline.Workspace)\\bin_$(DebugName)\n\n  - powershell: |\n      $dest = \"$(Pipeline.Workspace)\\bin_$(Name)\"\n      dir \"$(Pipeline.Workspace)\\bin_$(DebugName)\" | `\n      ?{ -not (Test-Path \"$dest\\$($_.Name)\") } | `\n      %{ copy $_.FullName $dest }\n    displayName: 'Copy debug binaries'\n\n  - task: DownloadPipelineArtifact@2\n    displayName: 'Download artifact: doc'\n    inputs:\n      ${{ if eq(parameters.BuildToPackage, 'current') }}:\n        buildType: current\n      ${{ else }}:\n        buildType: specific\n        buildVersionToDownload: specific\n        project: $(resources.pipeline.build_to_package.projectId)\n        pipeline: $(resources.pipeline.build_to_package.pipelineId)\n        runId: $(resources.pipeline.build_to_package.runID)\n      artifact: doc\n      targetPath: $(Pipeline.Workspace)\\doc\n\n  - task: DownloadPipelineArtifact@2\n    displayName: 'Download artifact: $(TclLibrary)'\n    condition: and(succeeded(), variables['TclLibrary'])\n    inputs:\n      ${{ if eq(parameters.BuildToPackage, 'current') }}:\n        buildType: current\n      ${{ else }}:\n        buildType: specific\n        buildVersionToDownload: specific\n        project: $(resources.pipeline.build_to_package.projectId)\n        pipeline: $(resources.pipeline.build_to_package.pipelineId)\n        runId: $(resources.pipeline.build_to_package.runID)\n      artifact: $(TclLibrary)\n      targetPath: $(Pipeline.Workspace)\\$(TclLibrary)\n\n  - powershell: |\n      Write-Host \"##vso[task.setvariable variable=TCL_LIBRARY]$(Pipeline.Workspace)\\$(TclLibrary)\\tcl8\"\n    displayName: 'Update TCL_LIBRARY'\n    condition: and(succeeded(), variables['TclLibrary'])\n\n  - powershell: |\n      copy \"$(Pipeline.Workspace)\\bin_$(Name)\\Activate.ps1\" Lib\\venv\\scripts\\common\\Activate.ps1 -Force -Verbose\n    displayName: 'Copy signed files into sources'\n\n  - template: ./layout-command.yml\n    parameters:\n      BuildToPackage: ${{ parameters.BuildToPackage }}\n\n  - powershell: |\n      $(LayoutCmd) --copy \"$(Build.ArtifactStagingDirectory)\\layout\" --preset-default $(ExtraOptions)\n    displayName: 'Generate full layout'\n\n  - publish: '$(Build.ArtifactStagingDirectory)\\layout'\n    artifact: layout_full_$(Name)\n    displayName: 'Publish Artifact: layout_full_$(Name)'\n"
  },
  {
    "path": "windows-release/stage-layout-msix.yml",
    "content": "parameters:\n  BuildToPackage: current\n  SigningCertificate: ''\n\njobs:\n- job: Make_MSIX_Layout\n  displayName: Make MSIX layout\n\n  workspace:\n    clean: all\n\n  variables:\n    PYTHONHOME: $(Build.SourcesDirectory)\n\n  strategy:\n    matrix:\n      #win32:\n      #  Name: win32\n      #  TclLibrary: tcltk_lib_win32\n      #  ExtraOptions: --precompile\n      amd64:\n        Name: amd64\n        Arch: amd64\n        TclLibrary: tcltk_lib_amd64\n        ExtraOptions: --precompile\n      arm64:\n        Name: arm64\n        Arch: arm64\n        HostArch: amd64\n        TclLibrary: tcltk_lib_arm64\n        ExtraOptions: --precompile\n\n  steps:\n  - template: ./checkout.yml\n\n  - task: DownloadPipelineArtifact@2\n    displayName: 'Download artifact: bin_$(Name)'\n    inputs:\n      ${{ if eq(parameters.BuildToPackage, 'current') }}:\n        buildType: current\n      ${{ else }}:\n        buildType: specific\n        buildVersionToDownload: specific\n        project: $(resources.pipeline.build_to_package.projectId)\n        pipeline: $(resources.pipeline.build_to_package.pipelineId)\n        runId: $(resources.pipeline.build_to_package.runID)\n      artifact: bin_$(Name)\n      targetPath: $(Pipeline.Workspace)\\bin_$(Name)\n\n  - task: DownloadPipelineArtifact@2\n    displayName: 'Download artifact: $(TclLibrary)'\n    condition: and(succeeded(), variables['TclLibrary'])\n    inputs:\n      ${{ if eq(parameters.BuildToPackage, 'current') }}:\n        buildType: current\n      ${{ else }}:\n        buildType: specific\n        buildVersionToDownload: specific\n        project: $(resources.pipeline.build_to_package.projectId)\n        pipeline: $(resources.pipeline.build_to_package.pipelineId)\n        runId: $(resources.pipeline.build_to_package.runID)\n      artifact: $(TclLibrary)\n      targetPath: $(Pipeline.Workspace)\\$(TclLibrary)\n\n  - powershell: |\n      Write-Host \"##vso[task.setvariable variable=TCL_LIBRARY]$(Pipeline.Workspace)\\$(TclLibrary)\\tcl8\"\n    displayName: 'Update TCL_LIBRARY'\n    condition: and(succeeded(), variables['TclLibrary'])\n\n  - ${{ if parameters.SigningCertificate }}:\n    - task: DownloadPipelineArtifact@2\n      displayName: 'Download artifact: cert'\n      inputs:\n        ${{ if eq(parameters.BuildToPackage, 'current') }}:\n          buildType: current\n        ${{ else }}:\n          buildType: specific\n          buildVersionToDownload: specific\n          project: $(resources.pipeline.build_to_package.projectId)\n          pipeline: $(resources.pipeline.build_to_package.pipelineId)\n          runId: $(resources.pipeline.build_to_package.runID)\n        artifact: cert\n        targetPath: $(Pipeline.Workspace)\\cert\n\n  - powershell: |\n      copy \"$(Pipeline.Workspace)\\bin_$(Name)\\Activate.ps1\" Lib\\venv\\scripts\\common\\Activate.ps1 -Force -Verbose\n    displayName: 'Copy signed files into sources'\n\n  - template: ./layout-command.yml\n    parameters:\n      BuildToPackage: ${{ parameters.BuildToPackage }}\n\n  - powershell: |\n     Remove-Item \"$(Build.ArtifactStagingDirectory)\\appx-store\" -Recurse -Force -EA 0\n      $(LayoutCmd) --copy \"$(Build.ArtifactStagingDirectory)\\appx-store\" --preset-appx $(ExtraOptions)\n    displayName: 'Generate store APPX layout'\n    env:\n      TCL_LIBRARY: $(TclLibrary)\n\n  - ${{ if parameters.SigningCertificate }}:\n    # The dotnet sign tool shouldn't need this, but we do because of the sccd file\n    - powershell: |\n        $info = (gc \"$(Pipeline.Workspace)\\cert\\certinfo.json\" | ConvertFrom-JSON)\n        Write-Host \"Side-loadable APPX must be signed with '$($info.Subject)'\"\n        Write-Host \"##vso[task.setvariable variable=APPX_DATA_PUBLISHER]$($info.Subject)\"\n        Write-Host \"##vso[task.setvariable variable=APPX_DATA_SHA256]$($info.SHA256)\"\n      displayName: 'Override signing parameters'\n\n  - powershell: |\n      Remove-Item \"$(Build.ArtifactStagingDirectory)\\appx\" -Recurse -Force -EA 0\n      $(LayoutCmd) --copy \"$(Build.ArtifactStagingDirectory)\\appx\" --preset-appx --include-symbols --include-tests $(ExtraOptions)\n    displayName: 'Generate sideloading APPX layout'\n    env:\n      TCL_LIBRARY: $(TclLibrary)\n\n  - publish: '$(Build.ArtifactStagingDirectory)\\appx-store'\n    artifact: layout_appxstore_$(Name)\n    displayName: 'Publish Artifact: layout_appxstore_$(Name)'\n\n  - publish: '$(Build.ArtifactStagingDirectory)\\appx'\n    artifact: layout_appx_$(Name)\n    displayName: 'Publish Artifact: layout_appx_$(Name)'\n"
  },
  {
    "path": "windows-release/stage-layout-nuget.yml",
    "content": "parameters:\n  BuildToPackage: current\n  DoFreethreaded: false\n  SigningCertificate: ''\n\njobs:\n- job: Make_Nuget_Layout\n  displayName: Make Nuget layout\n\n  workspace:\n    clean: all\n\n  variables:\n    PYTHONHOME: $(Build.SourcesDirectory)\n\n  strategy:\n    matrix:\n      win32:\n        Name: win32\n        Arch: win32\n        ExtraOptions: ''\n      amd64:\n        Name: amd64\n        Arch: amd64\n        ExtraOptions: ''\n      arm64:\n        Name: arm64\n        Arch: arm64\n        HostArch: amd64\n        ExtraOptions: ''\n      ${{ if eq(parameters.DoFreethreaded, 'true') }}:\n        win32_t:\n          Name: win32_t\n          Arch: win32\n          HostArch: win32\n          ExtraOptions: --include-freethreaded\n        amd64_t:\n          Name: amd64_t\n          Arch: amd64\n          HostArch: amd64\n          ExtraOptions: --include-freethreaded\n        arm64_t:\n          Name: arm64_t\n          Arch: arm64\n          HostArch: amd64\n          ExtraOptions: --include-freethreaded\n\n  steps:\n  - template: ./checkout.yml\n\n  - task: DownloadPipelineArtifact@2\n    displayName: 'Download artifact: bin_$(Name)'\n    inputs:\n      ${{ if eq(parameters.BuildToPackage, 'current') }}:\n        buildType: current\n      ${{ else }}:\n        buildType: specific\n        buildVersionToDownload: specific\n        project: $(resources.pipeline.build_to_package.projectId)\n        pipeline: $(resources.pipeline.build_to_package.pipelineId)\n        runId: $(resources.pipeline.build_to_package.runID)\n      artifact: bin_$(Name)\n      targetPath: $(Pipeline.Workspace)\\bin_$(Name)\n\n  - powershell: |\n      copy $(Pipeline.Workspace)\\bin_$(Name)\\Activate.ps1 Lib\\venv\\scripts\\common\\Activate.ps1 -Force -Verbose\n    displayName: 'Copy signed files into sources'\n\n  - template: ./layout-command.yml\n    parameters:\n      BuildToPackage: ${{ parameters.BuildToPackage }}\n\n  - powershell: |\n      $(LayoutCmd) --copy \"$(Build.ArtifactStagingDirectory)\\nuget\" --preset-nuget $(ExtraOptions)\n    displayName: 'Generate nuget layout'\n\n  - publish: '$(Build.ArtifactStagingDirectory)\\nuget'\n    artifact: layout_nuget_$(Name)\n    displayName: 'Publish Artifact: layout_nuget_$(Name)'\n"
  },
  {
    "path": "windows-release/stage-layout-pymanager.yml",
    "content": "parameters:\n  BuildToPackage: current\n  DoFreethreaded: false\n  DoEmbed: false\n  LayoutScriptBranch: main\n  SigningCertificate: ''\n\njobs:\n- job: Make_PyManager_Layouts\n  displayName: Make PyManager layouts\n\n  workspace:\n    clean: all\n\n  variables:\n    PYTHONHOME: $(Build.SourcesDirectory)\\cpython\n\n  strategy:\n    matrix:\n      win32:\n        Name: win32\n        BinArtifact: bin_win32\n        Arch: win32\n        TclLibrary: tcltk_lib_win32\n        LayoutOptions: '--preset-pymanager'\n        IncludeDoc: true\n      amd64:\n        Name: amd64\n        BinArtifact: bin_amd64\n        Arch: amd64\n        TclLibrary: tcltk_lib_amd64\n        LayoutOptions: '--preset-pymanager'\n        IncludeDoc: true\n      arm64:\n        Name: arm64\n        BinArtifact: bin_arm64\n        Arch: arm64\n        HostArch: amd64\n        TclLibrary: tcltk_lib_arm64\n        LayoutOptions: '--preset-pymanager'\n        IncludeDoc: true\n      win32_test:\n        Name: win32_test\n        BinArtifact: bin_win32\n        Arch: win32\n        TclLibrary: tcltk_lib_win32\n        LayoutOptions: '--preset-pymanager-test'\n        IncludeDoc: true\n      amd64_test:\n        Name: amd64_test\n        BinArtifact: bin_amd64\n        Arch: amd64\n        TclLibrary: tcltk_lib_amd64\n        LayoutOptions: '--preset-pymanager-test'\n        IncludeDoc: true\n      arm64_test:\n        Name: arm64_test\n        BinArtifact: bin_arm64\n        Arch: arm64\n        HostArch: amd64\n        TclLibrary: tcltk_lib_arm64\n        LayoutOptions: '--preset-pymanager-test'\n        IncludeDoc: true\n      ${{ if eq(parameters.DoFreethreaded, 'true') }}:\n        win32_t:\n          Name: win32_t\n          BinArtifact: bin_win32_t\n          Arch: win32\n          HostArch: win32\n          TclLibrary: tcltk_lib_win32\n          LayoutOptions: '--preset-pymanager --include-freethreaded'\n          IncludeDoc: true\n        amd64_t:\n          Name: amd64_t\n          BinArtifact: bin_amd64_t\n          Arch: amd64\n          HostArch: amd64\n          TclLibrary: tcltk_lib_amd64\n          LayoutOptions: '--preset-pymanager --include-freethreaded'\n          IncludeDoc: true\n        arm64_t:\n          Name: arm64_t\n          BinArtifact: bin_arm64_t\n          Arch: arm64\n          HostArch: amd64\n          TclLibrary: tcltk_lib_arm64\n          LayoutOptions: '--preset-pymanager --include-freethreaded'\n          IncludeDoc: true\n      ${{ if eq(parameters.DoEmbed, 'true') }}:\n        win32_embed:\n          Name: win32_embed\n          BinArtifact: bin_win32\n          Arch: win32\n          HostArch: win32\n          LayoutOptions: '--preset-embed --include-install-embed-json'\n        amd64_embed:\n          Name: amd64_embed\n          BinArtifact: bin_amd64\n          Arch: amd64\n          HostArch: amd64\n          LayoutOptions: '--preset-embed --include-install-embed-json'\n        arm64_embed:\n          Name: arm64_embed\n          BinArtifact: bin_arm64\n          Arch: arm64\n          HostArch: amd64\n          LayoutOptions: '--preset-embed --include-install-embed-json'\n\n  steps:\n  - template: ./checkout.yml\n    parameters:\n      IncludeSelf: true\n      Path: $(Build.SourcesDirectory)\\cpython\n\n  - ${{ if ne(parameters.BuildToPackage, 'current') }}:\n    - powershell: >\n        git clone\n        --progress\n        -v\n        --depth 1\n        --branch ${{ parameters.LayoutScriptBranch }}\n        --single-branch\n        https://github.com/$(GitRemote)/cpython.git\n        \"$(Build.SourcesDirectory)\\layout-script\"\n      displayName: 'Clone PC/layout script from main'\n\n  - task: DownloadPipelineArtifact@2\n    displayName: 'Download artifact: $(BinArtifact)'\n    inputs:\n      ${{ if eq(parameters.BuildToPackage, 'current') }}:\n        buildType: current\n      ${{ else }}:\n        buildType: specific\n        buildVersionToDownload: specific\n        project: $(resources.pipeline.build_to_package.projectId)\n        pipeline: $(resources.pipeline.build_to_package.pipelineId)\n        runId: $(resources.pipeline.build_to_package.runID)\n      artifact: $(BinArtifact)\n      targetPath: $(Pipeline.Workspace)\\$(BinArtifact)\n\n  - task: DownloadPipelineArtifact@2\n    displayName: 'Download artifact: doc'\n    condition: and(succeeded(), variables['IncludeDoc'])\n    inputs:\n      ${{ if eq(parameters.BuildToPackage, 'current') }}:\n        buildType: current\n      ${{ else }}:\n        buildType: specific\n        buildVersionToDownload: specific\n        project: $(resources.pipeline.build_to_package.projectId)\n        pipeline: $(resources.pipeline.build_to_package.pipelineId)\n        runId: $(resources.pipeline.build_to_package.runID)\n      artifact: doc\n      targetPath: $(Pipeline.Workspace)\\doc\n\n  - task: DownloadPipelineArtifact@2\n    displayName: 'Download artifact: $(TclLibrary)'\n    condition: and(succeeded(), variables['TclLibrary'])\n    inputs:\n      ${{ if eq(parameters.BuildToPackage, 'current') }}:\n        buildType: current\n      ${{ else }}:\n        buildType: specific\n        buildVersionToDownload: specific\n        project: $(resources.pipeline.build_to_package.projectId)\n        pipeline: $(resources.pipeline.build_to_package.pipelineId)\n        runId: $(resources.pipeline.build_to_package.runID)\n      artifact: $(TclLibrary)\n      targetPath: $(Pipeline.Workspace)\\$(TclLibrary)\n\n  - powershell: |\n      Write-Host \"##vso[task.setvariable variable=TCL_LIBRARY]$(Pipeline.Workspace)\\$(TclLibrary)\\tcl8\"\n    displayName: 'Update TCL_LIBRARY'\n    condition: and(succeeded(), variables['TclLibrary'])\n\n  - powershell: |\n      copy \"$(Pipeline.Workspace)\\$(BinArtifact)\\Activate.ps1\" Lib\\venv\\scripts\\common\\Activate.ps1 -Force -Verbose\n    displayName: 'Copy signed files into sources'\n    workingDirectory: $(Build.SourcesDirectory)\\cpython\n\n  - template: ./layout-command.yml\n    parameters:\n      Binaries: $(Pipeline.Workspace)\\$(BinArtifact)\n      Sources: $(Build.SourcesDirectory)\\cpython\n      Docs: $(Pipeline.Workspace)\\doc\n      BuildToPackage: ${{ parameters.BuildToPackage }}\n      ${{ if ne(parameters.BuildToPackage, 'current') }}:\n        LayoutSources: $(Build.SourcesDirectory)\\layout-script\n\n  - powershell: >\n      $(LayoutCmd)\n      --copy \"$(Build.ArtifactStagingDirectory)\\layout\"\n      --zip \"$(Build.ArtifactStagingDirectory)\\zip\\package.zip\"\n      $(LayoutOptions)\n    displayName: 'Generate PyManager layout'\n\n  - powershell: |\n      # ConvertFrom-Json can't handle empty keys, but we don't need them anyway.\n      # Replace with an underscore so it can load.\n      $install = (gc -raw \"layout\\__install__.json\") -replace '\"\":', '\"_\":' | ConvertFrom-Json\n      # Bring a copy of the install data separate from the ZIP\n      copy \"layout\\__install__.json\" \"zip\\__install__.json\"\n      # Rename the ZIP to match the target filename\n      # (which we didn't know when we named it package.zip)\n      $filename = Split-Path -Leaf $install.url\n      move \"zip\\package.zip\" \"zip\\$filename\"\n    displayName: 'Prepare PyManager distribution files'\n    workingDirectory: $(Build.ArtifactStagingDirectory)\n\n  - powershell: >\n      & $(Python) \"$(Pipeline.Workspace)\\release-tools\\sbom.py\"\n      \"--cpython-source-dir=$(Build.SourcesDirectory)\\cpython\"\n      $(gci \"zip\\*.zip\")\n    workingDirectory: $(Build.ArtifactStagingDirectory)\n    condition: and(succeeded(), not(variables['SkipSBOM']))\n    displayName: 'Create SBOMs for package'\n\n  - publish: '$(Build.ArtifactStagingDirectory)\\layout'\n    artifact: layout_pymanager_$(Name)\n    displayName: 'Publish Artifact: layout_pymanager_$(Name)'\n\n  - publish: '$(Build.ArtifactStagingDirectory)\\zip'\n    artifact: pymanager_$(Name)\n    displayName: 'Publish Artifact: pymanager_$(Name)'\n"
  },
  {
    "path": "windows-release/stage-layout-symbols.yml",
    "content": "parameters:\n  BuildToPackage: current\n  DoFreethreaded: false\n  Packages:\n  - win32\n  - amd64\n  - arm64\n  - win32_d\n  - amd64_d\n  - arm64_d\n  PackagesFreethreaded:\n  - win32_t\n  - amd64_t\n  - arm64_t\n  - win32_td\n  - amd64_td\n  - arm64_td\n  SigningCertificate: ''\n\n\njobs:\n- job: Layout_Symbols\n  displayName: Make symbols layout\n\n  workspace:\n    clean: all\n\n  steps:\n  - checkout: none\n\n  - ${{ each p in parameters.Packages }}:\n    - task: DownloadPipelineArtifact@2\n      displayName: 'Download artifact: bin_${{ p }}'\n      inputs:\n        ${{ if eq(parameters.BuildToPackage, 'current') }}:\n          buildType: current\n        ${{ else }}:\n          buildType: specific\n          buildVersionToDownload: specific\n          project: $(resources.pipeline.build_to_package.projectId)\n          pipeline: $(resources.pipeline.build_to_package.pipelineId)\n          runId: $(resources.pipeline.build_to_package.runID)\n        artifact: bin_${{ p }}\n        itemPattern: '**\\*.pdb'\n        targetPath: $(Build.ArtifactStagingDirectory)\\${{ p }}\n\n  - ${{ if eq(parameters.DoFreethreaded, 'true') }}:\n    - ${{ each p in parameters.PackagesFreethreaded }}:\n      - task: DownloadPipelineArtifact@2\n        displayName: 'Download artifact: bin_${{ p }}'\n        inputs:\n          ${{ if eq(parameters.BuildToPackage, 'current') }}:\n            buildType: current\n          ${{ else }}:\n            buildType: specific\n            buildVersionToDownload: specific\n            project: $(resources.pipeline.build_to_package.projectId)\n            pipeline: $(resources.pipeline.build_to_package.pipelineId)\n            runId: $(resources.pipeline.build_to_package.runID)\n          artifact: bin_${{ p }}\n          itemPattern: '**\\*.pdb'\n          targetPath: $(Build.ArtifactStagingDirectory)\\${{ p }}\n\n  - publish: $(Build.ArtifactStagingDirectory)\n    artifact: symbols\n    displayName: 'Publish Artifact: symbols'\n"
  },
  {
    "path": "windows-release/stage-msi.yml",
    "content": "parameters:\n  BuildToPackage: current\n  DoARM64: true\n  DoFreethreaded: false\n  SigningCertificate: ''\n  Post315OutputDir: false\n\njobs:\n- job: Make_MSI\n  displayName: Make MSI\n\n  variables:\n  - ${{ if eq(parameters.SigningCertificate, 'PythonSoftwareFoundation') }}:\n    - group: CPythonSign\n  - ${{ if eq(parameters.SigningCertificate, 'TestSign') }}:\n    - group: CPythonTestSign\n  - name: ReleaseUri\n    value: http://www.python.org/{arch}\n  - name: DownloadUrl\n    value: https://www.python.org/ftp/python/{version}/{arch}{releasename}/{msi}\n  - name: Py_OutDir\n    value: $(Build.BinariesDirectory)\n\n  workspace:\n    clean: all\n\n  steps:\n  - template: msi-steps.yml\n    parameters:\n      BuildToPackage: ${{ parameters.BuildToPackage }}\n      DoFreethreaded: ${{ parameters.DoFreethreaded }}\n      SigningCertificate: ${{ parameters.SigningCertificate }}\n      Artifacts:\n      - artifact: doc\n        target: $(Build.SourcesDirectory)\\Doc\\build\n      - artifact: bin_win32\n        target: $(Build.BinariesDirectory)\\win32\n      - artifact: bin_win32_d\n        target: $(Build.BinariesDirectory)\\win32\n      - artifact: tcltk_lib_win32\n      - artifact: bin_amd64\n        target: $(Build.BinariesDirectory)\\amd64\n      - artifact: bin_amd64_d\n        target: $(Build.BinariesDirectory)\\amd64\n      - artifact: tcltk_lib_amd64\n      - ${{ if eq(parameters.DoARM64, 'true') }}:\n        - artifact: bin_arm64\n          target: $(Build.BinariesDirectory)\\arm64\n        - artifact: bin_arm64_d\n          target: $(Build.BinariesDirectory)\\arm64\n        - artifact: tcltk_lib_arm64\n      # Freethreaded binaries copy into the same target directory, but files\n      # are not overwritten.\n      - ${{ if eq(parameters.DoFreethreaded, 'true') }}:\n        - ${{ if eq(parameters.Post315OutputDir, 'true') }}:\n          - artifact: bin_win32_t\n            target: $(Build.BinariesDirectory)\\win32t\n          - artifact: bin_win32_td\n            target: $(Build.BinariesDirectory)\\win32t\n          - artifact: bin_amd64_t\n            target: $(Build.BinariesDirectory)\\amd64t\n          - artifact: bin_amd64_td\n            target: $(Build.BinariesDirectory)\\amd64t\n          - ${{ if eq(parameters.DoARM64, 'true') }}:\n            - artifact: bin_arm64_t\n              target: $(Build.BinariesDirectory)\\arm64t\n            - artifact: bin_arm64_td\n              target: $(Build.BinariesDirectory)\\arm64t\n        - ${{ else }}:\n          - artifact: bin_win32_t\n            target: $(Build.BinariesDirectory)\\win32\n          - artifact: bin_win32_td\n            target: $(Build.BinariesDirectory)\\win32\n          - artifact: bin_amd64_t\n            target: $(Build.BinariesDirectory)\\amd64\n          - artifact: bin_amd64_td\n            target: $(Build.BinariesDirectory)\\amd64\n          - ${{ if eq(parameters.DoARM64, 'true') }}:\n            - artifact: bin_arm64_t\n              target: $(Build.BinariesDirectory)\\arm64\n            - artifact: bin_arm64_td\n              target: $(Build.BinariesDirectory)\\arm64\n      Bundles:\n      - bundle: win32\n        Platform: x86\n        PythonForBuild: $(Build.BinariesDirectory)\\win32\n        TclTkArtifact: tcltk_lib_win32\n      - bundle: amd64\n        Platform: x64\n        PythonForBuild: $(Build.BinariesDirectory)\\amd64\n        TclTkArtifact: tcltk_lib_amd64\n      - ${{ if eq(parameters.DoARM64, 'true') }}:\n        - bundle: arm64\n          Platform: ARM64\n          PythonForBuild: $(Build.BinariesDirectory)\\win32\n          TclTkArtifact: tcltk_lib_arm64\n"
  },
  {
    "path": "windows-release/stage-pack-msix.yml",
    "content": "parameters:\n  SigningCertificate: ''\n\njobs:\n- job: Pack_MSIX\n  displayName: Pack MSIX bundles\n\n  workspace:\n    clean: all\n\n  variables:\n    SigningCertificate: ${{ parameters.SigningCertificate }}\n\n  strategy:\n    matrix:\n      amd64:\n        Name: amd64\n        Artifact: appx\n        Suffix:\n        ShouldSign: true\n      amd64_store:\n        Name: amd64\n        Artifact: appxstore\n        Suffix: -store\n        CreateMsixUpload: true\n      arm64:\n        Name: arm64\n        Artifact: appx\n        Suffix:\n        ShouldSign: true\n      arm64_store:\n        Name: arm64\n        Artifact: appxstore\n        Suffix: -store\n        CreateMsixUpload: true\n\n  steps:\n  - template: ./checkout.yml\n\n  - download: current\n    artifact: layout_$(Artifact)_$(Name)\n    displayName: 'Download artifact: layout_$(Artifact)_$(Name)'\n\n  - download: current\n    artifact: symbols\n    patterns: $(Name)\\*\n    displayName: 'Download artifact: symbols'\n\n  - powershell: |\n      $d = (.\\PCbuild\\build.bat -V) | %{ if($_ -match '\\s+(\\w+):\\s*(.+)\\s*$') { @{$Matches[1] = $Matches[2];} }};\n      Write-Host \"##vso[task.setvariable variable=VersionText]$($d.PythonVersion)\"\n      Write-Host \"##vso[task.setvariable variable=VersionNumber]$($d.PythonVersionNumber)\"\n      Write-Host \"##vso[task.setvariable variable=VersionHex]$($d.PythonVersionHex)\"\n      Write-Host \"##vso[task.setvariable variable=VersionUnique]$($d.PythonVersionUnique)\"\n      Write-Host \"##vso[task.setvariable variable=Filename]python-$($d.PythonVersion)-$(Name)$(Suffix)\"\n    displayName: 'Extract version numbers'\n\n  - powershell: |\n      ./Tools/msi/make_appx.ps1 -layout \"$(Pipeline.Workspace)\\layout_$(Artifact)_$(Name)\" -msix \"$(Build.ArtifactStagingDirectory)\\msix\\$(Filename).msix\"\n    displayName: 'Build msix'\n\n  - powershell: |\n      7z a -tzip \"$(Build.ArtifactStagingDirectory)\\msix\\$(Filename).appxsym\" *.pdb\n    displayName: 'Build appxsym'\n    workingDirectory: $(Pipeline.Workspace)\\symbols\\$(Name)\n\n  - powershell: |\n      7z a -tzip \"$(Build.ArtifactStagingDirectory)\\msixupload\\$(Filename).msixupload\" *\n    displayName: 'Build msixupload'\n    condition: and(succeeded(), eq(variables['CreateMsixUpload'], 'true'))\n    workingDirectory: $(Build.ArtifactStagingDirectory)\\msix\n\n  - task: PublishBuildArtifacts@1\n    displayName: 'Publish Artifact: MSIX'\n    inputs:\n      PathtoPublish: '$(Build.ArtifactStagingDirectory)\\msix'\n      ${{ if parameters.SigningCertificate }}:\n        ArtifactName: unsigned_msix\n      ${{ else }}:\n        ArtifactName: msix\n\n  - task: PublishBuildArtifacts@1\n    displayName: 'Publish Artifact: MSIXUpload'\n    condition: and(succeeded(), eq(variables['CreateMsixUpload'], 'true'))\n    inputs:\n      PathtoPublish: '$(Build.ArtifactStagingDirectory)\\msixupload'\n      ArtifactName: msixupload\n\n- ${{ if parameters.SigningCertificate }}:\n  - job: Sign_MSIX\n    displayName: Sign side-loadable MSIX bundles\n    dependsOn:\n    - Pack_MSIX\n\n    workspace:\n      clean: all\n\n    variables:\n    - ${{ if eq(parameters.SigningCertificate, 'PythonSoftwareFoundation') }}:\n      - group: CPythonSign\n    - ${{ if eq(parameters.SigningCertificate, 'TestSign') }}:\n      - group: CPythonTestSign\n\n    steps:\n    - template: ./checkout.yml\n\n    - powershell: |\n        $d = (.\\PCbuild\\build.bat -V) | %{ if($_ -match '\\s+(\\w+):\\s*(.+)\\s*$') { @{$Matches[1] = $Matches[2];} }};\n        Write-Host \"##vso[task.setvariable variable=SigningDescription]Python $($d.PythonVersion)\"\n      displayName: 'Update signing description'\n      condition: and(succeeded(), not(variables['SigningDescription']))\n\n    - task: DownloadBuildArtifacts@0\n      displayName: 'Download Artifact: unsigned_msix'\n      inputs:\n        artifactName: unsigned_msix\n        downloadPath: $(Build.BinariesDirectory)\n\n    # Getting \"Error: SignerSign() failed.\" (-2147024885/0x8007000b)\"?\n    # It may be that the certificate info collected in stage-sign.yml is wrong. Check that\n    # you do not have multiple matches for the certificate name you have specified.\n    - template: sign-files.yml\n      parameters:\n        Include: '*.msix'\n        # Additional filter to avoid recursively signing package contents\n        Filter: '*.msix'\n        WorkingDir: $(Build.BinariesDirectory)\\unsigned_msix\n        SigningCertificate: ${{ parameters.SigningCertificate }}\n\n    - task: PublishBuildArtifacts@1\n      displayName: 'Publish Artifact: MSIX'\n      inputs:\n        PathtoPublish: '$(Build.BinariesDirectory)\\unsigned_msix'\n        ArtifactName: msix\n"
  },
  {
    "path": "windows-release/stage-pack-nuget.yml",
    "content": "parameters:\n  DoFreethreaded: false\n  SigningCertificate: ''\n\njobs:\n- job: Pack_Nuget\n  displayName: Pack Nuget bundles\n\n  workspace:\n    clean: all\n\n  strategy:\n    matrix:\n      amd64:\n        Name: amd64\n      win32:\n        Name: win32\n      arm64:\n        Name: arm64\n      ${{ if eq(parameters.DoFreethreaded, 'true') }}:\n        amd64_t:\n          Name: amd64_t\n        win32_t:\n          Name: win32_t\n        arm64_t:\n          Name: arm64_t\n\n  variables:\n  - ${{ if eq(parameters.SigningCertificate, 'PythonSoftwareFoundation') }}:\n    - group: CPythonSign\n  - ${{ if eq(parameters.SigningCertificate, 'TestSign') }}:\n    - group: CPythonTestSign\n\n  steps:\n  - checkout: none\n\n  - download: current\n    artifact: layout_nuget_$(Name)\n    displayName: 'Download artifact: layout_nuget_$(Name)'\n\n  - task: NugetToolInstaller@0\n    displayName: 'Install Nuget'\n    inputs:\n      versionSpec: '>=5.0'\n\n  - powershell: >\n      nuget pack\n      \"$(Pipeline.Workspace)\\layout_nuget_$(Name)\\python.nuspec\"\n      -OutputDirectory $(Build.ArtifactStagingDirectory)\n      -NoPackageAnalysis\n      -NonInteractive\n    condition: and(succeeded(), not(variables['OverrideNugetVersion']))\n    displayName: 'Create nuget package'\n\n  - powershell: >\n      nuget pack\n      \"$(Pipeline.Workspace)\\layout_nuget_$(Name)\\python.nuspec\"\n      -OutputDirectory $(Build.ArtifactStagingDirectory)\n      -NoPackageAnalysis\n      -NonInteractive\n      -Version \"$(OverrideNugetVersion)\"\n    condition: and(succeeded(), variables['OverrideNugetVersion'])\n    displayName: 'Create nuget package'\n\n  - template: sign-files.yml\n    parameters:\n      Include: '*.nupkg'\n      # Additional filter to avoid recursively signing package contents\n      Filter: '*.nupkg'\n      WorkingDir: $(Build.ArtifactStagingDirectory)\n      SigningCertificate: ${{ parameters.SigningCertificate }}\n      # Nuget signing is not supported by our test certificate, so ignore errors\n      ${{ if eq(parameters.SigningCertificate, 'TestSign') }}:\n        ContinueOnError: true\n\n  - task: PublishBuildArtifacts@1\n    displayName: 'Publish Artifact: nuget'\n    inputs:\n      PathtoPublish: '$(Build.ArtifactStagingDirectory)'\n      ArtifactName: nuget\n"
  },
  {
    "path": "windows-release/stage-pack-pymanager.yml",
    "content": "parameters:\n  DoFreethreaded: false\n  DoEmbed: false\n  SigningCertificate: ''\n\n  Artifacts:\n  - name: win32\n  - name: amd64\n  - name: arm64\n  - name: win32_test\n  - name: amd64_test\n  - name: arm64_test\n  - name: win32_t\n    freethreaded: true\n  - name: amd64_t\n    freethreaded: true\n  - name: arm64_t\n    freethreaded: true\n  - name: win32_embed\n    embed: true\n  - name: amd64_embed\n    embed: true\n  - name: arm64_embed\n    embed: true\n\njobs:\n- job: Pack_PyManager\n  displayName: Pack PyManager bundle\n\n  workspace:\n    clean: all\n\n  steps:\n  - task: UsePythonVersion@0\n    displayName: 'Use Python 3.10 or later'\n    inputs:\n      versionSpec: '>=3.10'\n\n  - ${{ each a in parameters.artifacts }}:\n    - ${{ if and(or(not(a.freethreaded), eq(parameters.DoFreethreaded, 'true')), or(not(a.embed), eq(parameters.DoEmbed, 'true'))) }}:\n      - task: DownloadPipelineArtifact@2\n        displayName: 'Download artifact: pymanager_${{ a.name }}'\n        inputs:\n          buildType: current\n          artifact: pymanager_${{ a.name }}\n          targetPath: $(Build.BinariesDirectory)\\${{ a.name }}\n\n      - powershell: |\n          cp *.zip (mkdir $env:TARGET -Force)\n          cp __install__.json \"${env:TARGET}\\__install__.${{ a.name }}.json\"\n        displayName: 'Relocate ${{ a.name }}'\n        workingDirectory: $(Build.BinariesDirectory)\\${{ a.name }}\n        env:\n          TARGET: $(Build.ArtifactStagingDirectory)\n\n  - powershell: |\n      \"Bundling the following packages:\"\n      (dir __install__.*.json).FullName\n      python \"$(Build.SourcesDirectory)\\windows-release\\merge-and-upload.py\"\n      del __install__.*.json\n    workingDirectory: $(Build.ArtifactStagingDirectory)\n    displayName: 'Generate local index'\n    env:\n      LOCAL_INDEX: 1\n      NO_UPLOAD: 1\n      INDEX_FILE: 'index.json'\n\n  - publish: '$(Build.ArtifactStagingDirectory)'\n    artifact: pymanager_bundle\n    displayName: 'Publish Artifact: pymanager_bundle'\n"
  },
  {
    "path": "windows-release/stage-publish-nugetorg.yml",
    "content": "parameters:\n  BuildToPublish: current\n\njobs:\n- job: Publish_Nuget\n  displayName: Publish Nuget packages\n  condition: and(succeeded(), ne(variables['SkipNugetPublish'], 'true'))\n\n  workspace:\n    clean: all\n\n  steps:\n  - checkout: none\n\n  - task: DownloadBuildArtifacts@1\n    displayName: 'Download artifact: nuget'\n    inputs:\n      ${{ if eq(parameters.BuildToPublish, 'current') }}:\n        buildType: current\n      ${{ else }}:\n        buildType: specific\n        buildVersionToDownload: specific\n        project: $(resources.pipeline.build_to_publish.projectId)\n        pipeline: $(resources.pipeline.build_to_publish.pipelineId)\n        runId: $(resources.pipeline.build_to_publish.runID)\n      artifactName: nuget\n      downloadPath: $(Build.BinariesDirectory)\n\n  - powershell: 'gci pythonarm*.nupkg | %{ Write-Host \"Not publishing: $($_.Name)\"; gi $_ } | del'\n    displayName: 'Prevent publishing ARM64 packages'\n    workingDirectory: '$(Build.BinariesDirectory)\\nuget'\n    condition: and(succeeded(), ne(variables['PublishARM64'], 'true'))\n\n  - task: NuGetCommand@2\n    displayName: Push packages\n    condition: and(succeeded(), eq(variables['IsRealSigned'], 'true'))\n    inputs:\n      command: push\n      packagesToPush: '$(Build.BinariesDirectory)\\nuget\\*.nupkg'\n      nuGetFeedType: external\n      publishFeedCredentials: 'Python on Nuget'\n"
  },
  {
    "path": "windows-release/stage-publish-pymanager.yml",
    "content": "parameters:\n  BuildToPublish: current\n  DoFreethreaded: false\n  DoEmbed: false\n  HashAlgorithms: ['SHA256']\n  SigningCertificate: ''\n\n  Artifacts:\n  - name: win32\n  - name: amd64\n  - name: arm64\n  - name: win32_test\n  - name: amd64_test\n  - name: arm64_test\n  - name: win32_t\n    freethreaded: true\n  - name: amd64_t\n    freethreaded: true\n  - name: arm64_t\n    freethreaded: true\n  - name: win32_embed\n    embed: true\n  - name: amd64_embed\n    embed: true\n  - name: arm64_embed\n    embed: true\n\njobs:\n- job: Publish_PyManager\n  displayName: Publish PyManager packages to python.org\n  condition: and(succeeded(), ne(variables['SkipPythonOrgPublish'], 'true'))\n\n  variables:\n  - group: PythonOrgPublish\n  - ${{ if eq(parameters.SigningCertificate, 'PythonSoftwareFoundation') }}:\n    - group: CPythonSign\n  - ${{ if eq(parameters.SigningCertificate, 'TestSign') }}:\n    - group: CPythonTestSign\n  # Override the SigningDescription here, since we're only signing the feed\n  # and not the actual binaries.\n  - name: SigningDescription\n    value: \"Python $(Build.BuildNumber)\"\n\n  workspace:\n    clean: all\n\n  steps:\n  - task: UsePythonVersion@0\n    displayName: 'Use Python 3.10 or later'\n    inputs:\n      versionSpec: '>=3.10'\n\n  - ${{ each a in parameters.artifacts }}:\n    - ${{ if and(or(not(a.freethreaded), eq(parameters.DoFreethreaded, 'true')), or(not(a.embed), eq(parameters.DoEmbed, 'true'))) }}:\n      - task: DownloadPipelineArtifact@2\n        displayName: 'Download artifact: pymanager_${{ a.name }}'\n        inputs:\n          ${{ if eq(parameters.BuildToPublish, 'current') }}:\n            buildType: current\n          ${{ else }}:\n            buildType: specific\n            buildVersionToDownload: specific\n            project: $(resources.pipeline.build_to_publish.projectId)\n            pipeline: $(resources.pipeline.build_to_publish.pipelineId)\n            runId: $(resources.pipeline.build_to_publish.runID)\n          artifact: pymanager_${{ a.name }}\n          targetPath: $(Build.BinariesDirectory)\\${{ a.name }}\n\n  - ${{ if eq(parameters.SigningCertificate, 'PythonSoftwareFoundation') }}:\n    - task: DownloadSecureFile@1\n      name: sshkey\n      inputs:\n        secureFile: pydotorg-ssh.ppk\n      displayName: 'Download PuTTY key'\n\n  - powershell: |\n      git clone https://github.com/python/cpython-bin-deps --revision 9f9e6fc31a55406ee5ff0198ea47bbb445eeb942 --depth 1 --progress -v \"putty\"\n      \"##vso[task.prependpath]$(gi putty)\"\n    workingDirectory: $(Pipeline.Workspace)\n    displayName: 'Download PuTTY binaries'\n\n  # Use the template just to configure the signing tool.\n  # This will set MAKECAT and SIGN_COMMAND to be injected into later build steps\n  - template: sign-files.yml\n    parameters:\n      Include: \"\"\n      InstallTool: false\n      InstallLegacyTool: true\n      ExportLegacyCommand: SIGN_COMMAND\n      SigningCertificate: ${{ parameters.SigningCertificate }}\n\n  - powershell: |\n      if ($env:FILENAME) {\n        \"##vso[task.setvariable variable=_PyManagerIndexFilename]${env:FILENAME}\"\n        \"Updating index named '${env:FILENAME}'\"\n      } else {\n        \"##vso[task.setvariable variable=_PyManagerIndexFilename]index-windows.json\"\n        \"Updating index named 'index-windows.json'\"\n      }\n    env:\n      FILENAME: $(PyManagerIndexFilename)\n    displayName: 'Infer index filename'\n\n  - ${{ if ne(parameters.SigningCertificate, 'PythonSoftwareFoundation') }}:\n    - powershell: |\n        \"Preparing following packages:\"\n        (dir \"__install__.*.json\").FullName\n        (dir \"*\\__install__.json\").FullName\n        python \"$(Build.SourcesDirectory)\\windows-release\\merge-and-upload.py\"\n      workingDirectory: $(Build.BinariesDirectory)\n      displayName: 'Produce uploadable ZIPs (no upload)'\n      env:\n        NO_UPLOAD: 1\n        INDEX_URL: '$(PyDotOrgUrlPrefix)python/$(_PyManagerIndexFilename)'\n        INDEX_FILE: '$(Build.ArtifactStagingDirectory)\\index\\$(_PyManagerIndexFilename)'\n        MANIFEST_FILE: '$(Build.ArtifactStagingDirectory)\\index\\windows.json'\n\n  - ${{ else }}:\n    - powershell: |\n        \"Uploading following packages:\"\n        (dir \"__install__.*.json\").FullName\n        (dir \"*\\__install__.json\").FullName\n        python \"$(Build.SourcesDirectory)\\windows-release\\merge-and-upload.py\"\n      workingDirectory: $(Build.BinariesDirectory)\n      displayName: 'Upload ZIPs'\n      env:\n        INDEX_URL: '$(PyDotOrgUrlPrefix)python/$(_PyManagerIndexFilename)'\n        INDEX_FILE: '$(Build.ArtifactStagingDirectory)\\index\\$(_PyManagerIndexFilename)'\n        MANIFEST_FILE: '$(Build.ArtifactStagingDirectory)\\index\\windows.json'\n        UPLOAD_URL_PREFIX: $(PyDotOrgUrlPrefix)\n        UPLOAD_PATH_PREFIX: $(PyDotOrgUploadPathPrefix)\n        UPLOAD_HOST: $(PyDotOrgServer)\n        UPLOAD_HOST_KEY: $(PyDotOrgHostKey)\n        UPLOAD_USER: $(PyDotOrgUsername)\n        UPLOAD_KEYFILE: $(sshkey.secureFilePath)\n\n  - ${{ each alg in parameters.HashAlgorithms }}:\n    - powershell: |\n        $files = (dir \"*\\__install__.json\").Directory | %{ dir -File \"$_\\*.zip\" }\n        $files = $files, (dir -File \"${env:INDEX_DIR}\\*.json\")\n        $hashes = $files  | `\n            Sort-Object Name | `\n            Format-Table Name, @{\n              Label=\"${{ alg }}\";\n              Expression={(Get-FileHash $_ -Algorithm ${{ alg }}).Hash}\n            }, Length -AutoSize | `\n            Out-String -Width 4096\n        $d = mkdir \"$(Build.ArtifactStagingDirectory)\\hashes\" -Force\n        $hashes | Out-File \"$d\\hashes.txt\" -Encoding ascii -Append\n        $hashes\n      workingDirectory: $(Build.BinariesDirectory)\n      displayName: 'Generate hashes (${{ alg }})'\n      env:\n        INDEX_DIR: '$(Build.ArtifactStagingDirectory)\\index'\n\n  - publish: '$(Build.ArtifactStagingDirectory)\\index'\n    artifact: pymanager_index\n\n  - publish: '$(Build.ArtifactStagingDirectory)\\hashes'\n    artifact: pymanager_hashes\n    displayName: 'Publish Artifact: hashes'\n"
  },
  {
    "path": "windows-release/stage-publish-pythonorg.yml",
    "content": "parameters:\n  BuildToPublish: current\n  DoEmbed: true\n  IncludeGPG: false\n  HashAlgorithms: ['SHA256', 'MD5']\n\njobs:\n- job: Publish_Python\n  displayName: Publish python.org packages\n  condition: and(succeeded(), ne(variables['SkipPythonOrgPublish'], 'true'))\n\n  variables:\n  - group: PythonOrgPublish\n\n  workspace:\n    clean: all\n\n  steps:\n  - task: UsePythonVersion@0\n    displayName: 'Use Python 3.10 or later'\n    inputs:\n      versionSpec: '>=3.10'\n\n  - task: DownloadPipelineArtifact@2\n    displayName: 'Download artifact: msi'\n    inputs:\n      ${{ if eq(parameters.BuildToPublish, 'current') }}:\n        buildType: current\n      ${{ else }}:\n        buildType: specific\n        buildVersionToDownload: specific\n        project: $(resources.pipeline.build_to_publish.projectId)\n        pipeline: $(resources.pipeline.build_to_publish.pipelineId)\n        runId: $(resources.pipeline.build_to_publish.runID)\n      artifact: msi\n      targetPath: $(Pipeline.Workspace)\\msi\n\n  - ${{ if eq(parameters.DoEmbed, 'true') }}:\n    - task: DownloadBuildArtifacts@1\n      displayName: 'Download artifact: embed'\n      inputs:\n        ${{ if eq(parameters.BuildToPublish, 'current') }}:\n          buildType: current\n        ${{ else }}:\n          buildType: specific\n          buildVersionToDownload: specific\n          project: $(resources.pipeline.build_to_publish.projectId)\n          pipeline: $(resources.pipeline.build_to_publish.pipelineId)\n          buildId: $(resources.pipeline.build_to_publish.runID)\n        artifactName: embed\n        # Artifact name is added to path for DownloadBuildArtifacts\n        downloadPath: $(Pipeline.Workspace)\n\n  - task: DownloadBuildArtifacts@1\n    displayName: 'Download artifact: sbom'\n    inputs:\n      ${{ if eq(parameters.BuildToPublish, 'current') }}:\n        buildType: current\n      ${{ else }}:\n        buildType: specific\n        buildVersionToDownload: specific\n        project: $(resources.pipeline.build_to_publish.projectId)\n        pipeline: $(resources.pipeline.build_to_publish.pipelineId)\n        buildId: $(resources.pipeline.build_to_publish.runID)\n      artifactName: sbom\n      # Artifact name is added to path for DownloadBuildArtifacts\n      downloadPath: $(Pipeline.Workspace)\n\n\n  - ${{ if eq(parameters.DoEmbed, 'true') }}:\n    # Note that ARM64 MSIs are skipped at build when this option is specified\n    - powershell: 'gci *embed-arm*.zip | %{ Write-Host \"Not publishing: $($_.Name)\"; gi $_ } | del'\n      displayName: 'Prevent publishing ARM64 packages'\n      workingDirectory: '$(Pipeline.Workspace)\\embed'\n      condition: and(succeeded(), ne(variables['PublishARM64'], 'true'))\n\n\n  - ${{ if eq(parameters.IncludeGPG, 'true') }}:\n    - task: DownloadSecureFile@1\n      name: gpgkey\n      inputs:\n        secureFile: 'python-signing.key'\n      displayName: 'Download GPG key'\n\n    - powershell: |\n        git clone https://github.com/python/cpython-bin-deps --branch gpg --single-branch --depth 1 --progress -v \"gpg\"\n        gpg/gpg2.exe --import \"$(gpgkey.secureFilePath)\"\n        $files = gci -File \"msi\\*\\*\", \"embed\\*.zip\" -EA SilentlyContinue\n        $files.FullName | %{\n            gpg/gpg2.exe -ba --batch --passphrase $(GPGPassphrase) $_\n            \"Made signature for $_\"\n        }\n      displayName: 'Generate GPG signatures'\n      workingDirectory: $(Pipeline.Workspace)\n\n    - powershell: |\n        $p = gps \"gpg-agent\" -EA 0\n        if ($p) { $p.Kill() }\n      displayName: 'Kill GPG agent'\n      condition: true\n\n\n  - task: DownloadSecureFile@1\n    name: sshkey\n    inputs:\n      secureFile: pydotorg-ssh.ppk\n    displayName: 'Download PuTTY key'\n\n  - powershell: |\n      git clone https://github.com/python/cpython-bin-deps --branch putty --single-branch --depth 1 --progress -v \"putty\"\n      \"##vso[task.prependpath]$(gi putty)\"\n    workingDirectory: $(Pipeline.Workspace)\n    displayName: 'Download PuTTY binaries'\n\n  - powershell: >\n      $(Build.SourcesDirectory)\\windows-release\\uploadrelease.ps1\n      -build msi\n      -user $(PyDotOrgUsername)\n      -server $(PyDotOrgServer)\n      -hostkey $(PyDotOrgHostKey)\n      -keyfile \"$(sshkey.secureFilePath)\"\n      -embed embed\n      -sbom sbom\n    workingDirectory: $(Pipeline.Workspace)\n    condition: and(succeeded(), eq(variables['IsRealSigned'], 'true'))\n    displayName: 'Upload files to python.org'\n\n  - powershell: >\n      python\n      \"$(Build.SourcesDirectory)\\windows-release\\purge.py\"\n      (gci msi\\*\\python-*.exe | %{ $_.Name -replace 'python-(.+?)(-|\\.exe).+', '$1' } | select -First 1)\n    workingDirectory: $(Pipeline.Workspace)\n    condition: and(succeeded(), eq(variables['IsRealSigned'], 'true'))\n    displayName: 'Purge CDN'\n\n  - powershell: |\n      $failures = 0\n      gci \"msi\\*\\*.exe\" -File | %{\n          $d = mkdir \"tests\\$($_.BaseName)\" -Force\n          gci $d -r -File | del\n          $ic = copy $_ $d -PassThru\n          \"Checking layout for $($ic.Name)\"\n          Start-Process -wait $ic \"/passive\", \"/layout\", \"$d\\layout\", \"/log\", \"$d\\log\\install.log\"\n          if (-not $?) {\n              Write-Error \"Failed to validate layout of $($inst.Name)\"\n              $failures += 1\n          }\n      }\n      if ($failures) {\n        Write-Error \"Failed to validate $failures installers\"\n        exit 1\n      }\n    workingDirectory: $(Pipeline.Workspace)\n    condition: and(succeeded(), eq(variables['IsRealSigned'], 'true'))\n    displayName: 'Test layouts'\n\n  - ${{ each alg in parameters.HashAlgorithms }}:\n    - powershell: |\n        $files = gci -File \"msi\\*\\*.exe\", \"embed\\*.zip\" -EA SilentlyContinue\n        $hashes = $files  | `\n            Sort-Object Name | `\n            Format-Table Name, @{\n              Label=\"${{ alg }}\";\n              Expression={(Get-FileHash $_ -Algorithm ${{ alg }}).Hash}\n            }, Length -AutoSize | `\n            Out-String -Width 4096\n        $d = mkdir \"$(Build.ArtifactStagingDirectory)\\hashes\" -Force\n        $hashes | Out-File \"$d\\hashes.txt\" -Encoding ascii -Append\n        $hashes\n      workingDirectory: $(Pipeline.Workspace)\n      displayName: 'Generate hashes (${{ alg }})'\n\n  - ${{ if eq(parameters.IncludeGPG, 'true') }}:\n    - powershell: |\n        \"Copying:\"\n        $files = gci -File \"msi\\*\\python*.asc\", \"embed\\*.asc\" -EA SilentlyContinue\n        $files.FullName\n        $d = mkdir \"$(Build.ArtifactStagingDirectory)\\hashes\" -Force\n        move $files $d -Force\n        gci msi -Directory | %{ move \"msi\\$_\\*.asc\" (mkdir \"$d\\$_\" -Force) }\n      workingDirectory: $(Pipeline.Workspace)\n      displayName: 'Copy GPG signatures for build'\n\n  - publish: '$(Build.ArtifactStagingDirectory)\\hashes'\n    artifact: hashes\n    displayName: 'Publish Artifact: hashes'\n"
  },
  {
    "path": "windows-release/stage-sign.yml",
    "content": "parameters:\n  Include: '*.exe, *.dll, *.pyd, *.cat, *.ps1'\n  Exclude: 'vcruntime*, libffi*, libcrypto*, libssl*'\n  SigningCertificate: ''\n  DoFreethreaded: 'false'\n\njobs:\n- ${{ if and(parameters.SigningCertificate, ne(parameters.SigningCertificate, 'Unsigned')) }}:\n  - job: Sign_Files\n    displayName: Sign Python binaries\n\n    workspace:\n      clean: all\n\n    strategy:\n      matrix:\n        win32:\n          Name: win32\n        amd64:\n          Name: amd64\n        arm64:\n          Name: arm64\n        ${{ if eq(parameters.DoFreethreaded, 'true') }}:\n          win32_t:\n            Name: win32_t\n          amd64_t:\n            Name: amd64_t\n          arm64_t:\n            Name: arm64_t\n\n    variables:\n    - ${{ if eq(parameters.SigningCertificate, 'PythonSoftwareFoundation') }}:\n      - group: CPythonSign\n    - ${{ if eq(parameters.SigningCertificate, 'TestSign') }}:\n      - group: CPythonTestSign\n\n    steps:\n    - template: ./checkout.yml\n\n    - powershell: |\n        $d = (.\\PCbuild\\build.bat -V) | %{ if($_ -match '\\s+(\\w+):\\s*(.+)\\s*$') { @{$Matches[1] = $Matches[2];} }};\n        $tag = git rev-parse --short HEAD\n        $desc = \"Python $($d.PythonVersion) ($tag)\"\n        Write-Host \"##vso[task.setvariable variable=SigningDescription]$desc\"\n        Write-Host \"Updated signing description to: $desc\"\n      displayName: 'Update signing description'\n      condition: and(succeeded(), not(variables['SigningDescription']))\n\n    - powershell: |\n        Write-Host \"##vso[build.addbuildtag]signed\"\n      displayName: 'Add build tags'\n\n    - task: DownloadPipelineArtifact@2\n      displayName: 'Download artifact: unsigned_$(Name)'\n      inputs:\n        artifactName: unsigned_$(Name)\n        targetPath: $(Build.BinariesDirectory)\\bin\n\n    - template: sign-files.yml\n      parameters:\n        Include: ${{ parameters.Include }}\n        Exclude: ${{ parameters.Exclude }}\n        WorkingDir: $(Build.BinariesDirectory)\\bin\n        ExtractDir: $(Build.BinariesDirectory)\\cert\n        SigningCertificate: ${{ parameters.SigningCertificate }}\n\n    - publish: '$(Build.BinariesDirectory)\\bin'\n      artifact: bin_$(Name)\n      displayName: 'Publish artifact: bin_$(Name)'\n\n    - publish: '$(Build.BinariesDirectory)\\cert'\n      artifact: cert\n      displayName: 'Publish artifact: cert'\n\n\n- ${{ else }}:\n  - job: Mark_Unsigned\n    displayName: Tag unsigned build\n\n    steps:\n    - checkout: none\n\n    - powershell: |\n        Write-Host \"##vso[build.addbuildtag]unsigned\"\n      displayName: 'Add build tag'\n"
  },
  {
    "path": "windows-release/stage-test-embed.yml",
    "content": "jobs:\n- job: Test_Embed\n  displayName: Test Embed\n\n  workspace:\n    clean: all\n\n  strategy:\n    matrix:\n      win32:\n        Name: win32\n      amd64:\n        Name: amd64\n\n  steps:\n  - checkout: none\n\n  - task: DownloadBuildArtifacts@0\n    displayName: 'Download artifact: embed'\n    inputs:\n      artifactName: embed\n      downloadPath: $(Build.BinariesDirectory)\n\n  - powershell: |\n      $p = gi \"$(Build.BinariesDirectory)\\embed\\python*embed-$(Name).zip\"\n      Expand-Archive -Path $p -DestinationPath \"$(Build.BinariesDirectory)\\Python\"\n      $p = gi \"$(Build.BinariesDirectory)\\Python\\python.exe\"\n      Write-Host \"##vso[task.prependpath]$(Split-Path -Parent $p)\"\n    displayName: 'Install Python and add to PATH'\n\n  - script: |\n      python -c \"import sys; print(sys.version)\"\n    displayName: 'Collect version number'\n    condition: and(succeeded(), not(variables['SkipTests']))\n\n  - script: |\n      python -m site\n    displayName: 'Collect site'\n    condition: and(succeeded(), not(variables['SkipTests']))\n"
  },
  {
    "path": "windows-release/stage-test-msi.yml",
    "content": "parameters:\n  DoFreethreaded: false\n\njobs:\n- job: Test_MSI\n  displayName: Test MSI\n\n  workspace:\n    clean: all\n\n  variables:\n    ${{ if eq(parameters.DoFreethreaded, 'true') }}:\n      IncludeFreethreadedOpt: Include_freethreaded=1\n    ${{ else }}:\n      IncludeFreethreadedOpt: ''\n\n  strategy:\n    matrix:\n      win32_User:\n        ExeMatch: 'python-[\\dabrc.]+\\.exe'\n        Logs: $(Build.ArtifactStagingDirectory)\\logs\\win32_User\n        InstallAllUsers: 0\n      win32_Machine:\n        ExeMatch: 'python-[\\dabrc.]+\\.exe'\n        Logs: $(Build.ArtifactStagingDirectory)\\logs\\win32_Machine\n        InstallAllUsers: 1\n      amd64_User:\n        ExeMatch: 'python-[\\dabrc.]+-amd64\\.exe'\n        Logs: $(Build.ArtifactStagingDirectory)\\logs\\amd64_User\n        InstallAllUsers: 0\n      amd64_Machine:\n        ExeMatch: 'python-[\\dabrc.]+-amd64\\.exe'\n        Logs: $(Build.ArtifactStagingDirectory)\\logs\\amd64_Machine\n        InstallAllUsers: 1\n\n  steps:\n  - checkout: none\n\n  - task: DownloadPipelineArtifact@2\n    displayName: 'Download artifact: msi'\n    inputs:\n      artifactName: msi\n      targetPath: $(Build.BinariesDirectory)\\msi\n\n  - powershell: |\n      $p = (gci -r *.exe | ?{ $_.Name -match '$(ExeMatch)' } | select -First 1)\n      Write-Host \"##vso[task.setvariable variable=SetupExe]$($p.FullName)\"\n      Write-Host \"##vso[task.setvariable variable=SetupExeName]$($p.Name)\"\n    displayName: 'Find installer executable'\n    workingDirectory: $(Build.BinariesDirectory)\\msi\n\n  - script: >\n      \"$(SetupExe)\"\n      /passive\n      /log \"$(Logs)\\install\\log.txt\"\n      TargetDir=\"$(Build.BinariesDirectory)\\Python\"\n      Include_debug=1\n      Include_symbols=1\n      InstallAllUsers=$(InstallAllUsers)\n      $(IncludeFreethreadedOpt)\n    displayName: 'Install Python'\n\n  - powershell: |\n      gci \"$(Build.BinariesDirectory)\\python\"\n    displayName: 'List installed files'\n\n  - powershell: |\n      $p = gi \"$(Build.BinariesDirectory)\\Python\\python.exe\"\n      Write-Host \"##vso[task.prependpath]$(Split-Path -Parent $p)\"\n    displayName: 'Add test Python to PATH'\n\n  - script: |\n      python -c \"import sys; print(sys.version)\"\n    displayName: 'Collect version number'\n    condition: and(succeeded(), not(variables['SkipTests']))\n\n  - script: |\n      python -m site\n    displayName: 'Collect site'\n    condition: and(succeeded(), not(variables['SkipTests']))\n\n  - ${{ if eq(parameters.DoFreethreaded, 'true') }}:\n    - powershell: |\n        $p = (gci \"$(Build.BinariesDirectory)\\Python\\python3*t.exe\" | select -First 1)\n        Write-Host \"Found $p\"\n        if (-not $p) {\n            Write-Host \"Did not find python3*t.exe in:\"\n            dir \"$(Build.BinariesDirectory)\\Python\"\n            throw \"Free-threaded binaries were not installed\"\n        } else {\n            & $p -c \"import sys; print(sys.version)\"\n        }\n      displayName: 'Collect free-threaded version number'\n      condition: and(succeeded(), not(variables['SkipTests']))\n\n  - powershell: |\n      gci -r \"${env:PROGRAMDATA}\\Microsoft\\Windows\\Start Menu\\Programs\\Python*\"\n    displayName: 'Capture per-machine Start Menu items'\n  - powershell: |\n      gci -r \"${env:APPDATA}\\Microsoft\\Windows\\Start Menu\\Programs\\Python*\"\n    displayName: 'Capture per-user Start Menu items'\n\n  - powershell: |\n      gci -r \"HKLM:\\Software\\WOW6432Node\\Python\"\n    displayName: 'Capture per-machine 32-bit registry'\n  - powershell: |\n      gci -r \"HKLM:\\Software\\Python\"\n    displayName: 'Capture per-machine native registry'\n  - powershell: |\n      gci -r \"HKCU:\\Software\\Python\"\n    displayName: 'Capture current-user registry'\n\n  - script: |\n      python -m pip install \"azure<0.10\"\n      python -m pip uninstall -y azure python-dateutil six\n    displayName: 'Test (un)install package'\n    condition: and(succeeded(), not(variables['SkipTests']))\n\n  - powershell: |\n      if (Test-Path -Type Container \"$(Build.BinariesDirectory)\\Python\\Lib\\test\\test_ttk\") {\n        # New set of tests (3.12 and later)\n        python -m test -uall -v test_ttk test_tkinter test_idle\n      } else {\n        # Old set of tests\n        python -m test -uall -v test_ttk_guionly test_tk test_idle\n      }\n    displayName: 'Test Tkinter and Idle'\n    condition: and(succeeded(), not(variables['SkipTests']), not(variables['SkipTkTests']))\n\n  - script: >\n      \"$(SetupExe)\"\n      /passive\n      /uninstall\n      /log \"$(Logs)\\uninstall\\log.txt\"\n    displayName: 'Uninstall Python'\n\n  - task: PublishBuildArtifacts@1\n    displayName: 'Publish Artifact: logs'\n    condition: true\n    continueOnError: true\n    inputs:\n      PathtoPublish: '$(Build.ArtifactStagingDirectory)\\logs'\n      ArtifactName: msi_testlogs\n"
  },
  {
    "path": "windows-release/stage-test-nuget.yml",
    "content": "jobs:\n- job: Test_Nuget\n  displayName: Test Nuget\n\n  workspace:\n    clean: all\n\n  strategy:\n    matrix:\n      win32:\n        Package: pythonx86\n      amd64:\n        Package: python\n\n  steps:\n  - checkout: none\n\n  - task: DownloadBuildArtifacts@0\n    displayName: 'Download artifact: nuget'\n    inputs:\n      artifactName: nuget\n      downloadPath: $(Build.BinariesDirectory)\n\n  - task: NugetToolInstaller@0\n    inputs:\n      versionSpec: '>= 5'\n\n  - powershell: >\n      nuget install\n      $(Package)\n      -Source \"$(Build.BinariesDirectory)\\nuget\"\n      -OutputDirectory \"$(Build.BinariesDirectory)\\install\"\n      -Prerelease\n      -ExcludeVersion\n      -NonInteractive\n    displayName: 'Install Python'\n\n  - powershell: |\n      $p = gi \"$(Build.BinariesDirectory)\\install\\$(Package)\\tools\\python.exe\"\n      Write-Host \"##vso[task.prependpath]$(Split-Path -Parent $p)\"\n    displayName: 'Add test Python to PATH'\n\n  - script: |\n      python -c \"import sys; print(sys.version)\"\n    displayName: 'Collect version number'\n    condition: and(succeeded(), not(variables['SkipTests']))\n\n  - script: |\n      python -m site\n    displayName: 'Collect site'\n    condition: and(succeeded(), not(variables['SkipTests']))\n\n  - script: |\n      python -m pip install \"azure<0.10\"\n      python -m pip uninstall -y azure python-dateutil six\n    displayName: 'Test (un)install package'\n    condition: and(succeeded(), not(variables['SkipTests']))\n"
  },
  {
    "path": "windows-release/stage-test-pymanager.yml",
    "content": "parameters:\n  DoEmbed: false\n  DoFreethreaded: false\n\njobs:\n- job: Test_PyManager\n  displayName: Test PyManager\n\n  workspace:\n    clean: all\n\n  strategy:\n    matrix:\n      win32:\n        Name: win32\n      amd64:\n        Name: amd64\n      win32_test:\n        Name: win32_test\n      amd64_test:\n        Name: amd64_test\n      ${{ if eq(parameters.DoEmbed, 'true') }}:\n        win32_embed:\n          Name: win32_embed\n          SkipPipTest: 1\n        amd64_embed:\n          Name: amd64_embed\n          SkipPipTest: 1\n      ${{ if eq(parameters.DoFreethreaded, 'true') }}:\n        win32_t:\n          Name: win32_t\n        amd64_t:\n          Name: amd64_t\n\n  steps:\n  - checkout: none\n\n  # TODO: Install PyManager and use that to install the package\n  - download: current\n    artifact: layout_pymanager_$(Name)\n    displayName: 'Download artifact: layout_pymanager_$(Name)'\n\n  - powershell: |\n      $p = gi \"$(Pipeline.Workspace)\\layout_pymanager_$(Name)\\python*.exe\" | select -First 1\n      Write-Host \"##vso[task.setvariable variable=PYTHON]$p\"\n    displayName: 'Add test Python to PATH'\n\n  - powershell: |\n      & $env:PYTHON -c \"import sys; print(sys.version)\"\n    displayName: 'Collect version number'\n    condition: and(succeeded(), not(variables['SkipTests']))\n\n  - powershell: |\n      & $env:PYTHON -m site\n    displayName: 'Collect site'\n    condition: and(succeeded(), not(variables['SkipTests']))\n\n  - powershell: |\n      & $env:PYTHON -m pip install \"azure<0.10\"\n      & $env:PYTHON -m pip uninstall -y azure python-dateutil six\n    displayName: 'Test (un)install package'\n    condition: and(succeeded(), not(variables['SkipTests']), not(variables['SkipPipTest']))\n"
  },
  {
    "path": "windows-release/start-arm64vm.yml",
    "content": "parameters:\n  DoARM64: false\n  DoPGOARM64: false\n\njobs:\n# Only include the job if we need the VM, which means ARM64 PGO.\n- ${{ if eq(parameters.DoPGOARM64, 'true') }}:\n  - job: Start_ARM64VM\n    displayName: 'Ensure ARM64 VM is running'\n    dependsOn: []\n\n    steps:\n    - checkout: none\n\n    - task: AzureCLI@2\n      displayName: 'Start pythonarm64 and set auto-shutdown to (UTC now - 1h)'\n      inputs:\n        azureSubscription: \"Steve's VM\"   # WIF service connection name\n        scriptType: pscore\n        scriptLocation: inlineScript\n        inlineScript: |\n          $ErrorActionPreference = 'Stop'\n\n          $rg = 'cpythonbuild'\n          $vm = 'pythonarm64'\n\n          # Compute UTC time minus 12 hours, format HHmm (e.g. 1830)\n          $shutdownTime = (Get-Date).ToUniversalTime().AddHours(-12).ToString('HHmm')\n          Write-Host \"Setting auto-shutdown time to: $shutdownTime UTC\"\n\n          # Configure daily auto-shutdown in 12 hours\n          az vm auto-shutdown -g $rg -n $vm --time $shutdownTime | Out-Null\n          if ($?) {\n            Write-Host \"Successfully configured auto-shutdown for ARM64 VM in 12 hours.\"\n          } else {\n            Write-Host \"##[warning]Failed to configure ARM64 VM auto-shutdown.\"\n          }\n\n          # Start VM, but don't fail if it's already running\n          az vm start -g $rg -n $vm | Out-Null\n          $u = \"https://dev.azure.com/Python/cpython/_settings/agentqueues?queueId=24&view=agents\"\n          if ($?) {\n            Write-Host \"Successfully started ARM64 VM. Check $u for running status.\"\n          } else {\n            Write-Host \"##[warning]Failed to start ARM64 VM. Check $u in case it is already active, or ping Steve.\"\n          }\n"
  },
  {
    "path": "windows-release/tcltk-build.yml",
    "content": "parameters:\n- name: TclSourceTag\n  displayName: 'Tcl Source Tag'\n  type: string\n- name: TkSourceTag\n  displayName: 'Tk Source Tag'\n  type: string\n- name: IncludeTix\n  displayName: 'Include Tix (pre-3.13)'\n  type: boolean\n  default: false\n- name: TixSourceTag\n  displayName: 'Tix Source Tag'\n  type: string\n  default: tix-8.4.3.6\n- name: SigningCertificate\n  displayName: \"Code signing certificate\"\n  type: string\n  default: 'PythonSoftwareFoundation'\n  values:\n  - 'PythonSoftwareFoundation'\n  - 'TestSign'\n  - 'Unsigned'\n- name: SourcesRepo\n  displayName: 'Sources Repository'\n  type: string\n  default: 'https://github.com/python/cpython-source-deps'\n\n\nname: tcltk$(TkSourceTag)_$(Date:yyyyMMdd)$(Rev:.rr)\n\n\nresources:\n  repositories:\n  - repository: cpython\n    type: github\n    name: Python/cpython\n    endpoint: \"Steve's github repos\"\n\n\nvariables:\n- ${{ if eq(parameters.SigningCertificate, 'PythonSoftwareFoundation') }}:\n  - group: CPythonSign\n- ${{ if eq(parameters.SigningCertificate, 'TestSign') }}:\n  - group: CPythonTestSign\n- name: IntDir\n  value: '$(Build.BinariesDirectory)\\obj'\n- name: ExternalsDir\n  value: '$(Build.BinariesDirectory)\\externals'\n- name: OutDir\n  value: '$(Build.ArtifactStagingDirectory)'\n- name: Configuration\n  value: 'Release'\n- name: SigningDescription\n  value: 'Tcl/Tk for Python (${{ parameters.TclSourceTag }})'\n- name: SourcesRepo\n  value: ${{ parameters.SourcesRepo }}\n- name: TclSourceTag\n  value: ${{ parameters.TclSourceTag }}\n- name: TkSourceTag\n  value: ${{ parameters.TkSourceTag }}\n- name: TixSourceTag\n  value: ${{ parameters.TixSourceTag }}\n\n\njobs:\n- job: Build_TclTk\n  displayName: 'Tcl/Tk'\n  pool:\n    vmImage: windows-latest\n\n  workspace:\n    clean: all\n\n  steps:\n    - checkout: self\n    - checkout: cpython\n    - template: ./find-tools.yml\n\n    - powershell: |\n        git clone $(SourcesRepo) -b $(TclSourceTag) --depth 1 \"$(ExternalsDir)\\$(TclSourceTag)\"\n      displayName: 'Check out Tcl sources'\n\n    - powershell: |\n        git clone $(SourcesRepo) -b $(TkSourceTag) --depth 1 \"$(ExternalsDir)\\$(TkSourceTag)\"\n      displayName: 'Check out Tk sources'\n\n    - ${{ if eq(parameters.IncludeTix, 'true') }}:\n      - powershell: |\n          git clone $(SourcesRepo) -b $(TixSourceTag) --depth 1 \"$(ExternalsDir)\\$(TixSourceTag)\"\n        displayName: 'Check out Tix sources'\n\n    # This msbuild.rsp file will be used by the build to forcibly override these variables\n    - powershell: |\n        del -Force -EA 0 msbuild.rsp\n        \"/p:IntDir=$(IntDir)\\\" >> msbuild.rsp\n        \"/p:ExternalsDir=$(ExternalsDir)\\\" >> msbuild.rsp\n        \"/p:tclDir=$(ExternalsDir)\\$(TclSourceTag)\\\" >> msbuild.rsp\n        \"/p:tkDir=$(ExternalsDir)\\$(TkSourceTag)\\\" >> msbuild.rsp\n        \"/p:tixDir=$(ExternalsDir)\\$(TixSourceTag)\\\" >> msbuild.rsp\n      displayName: 'Generate msbuild.rsp'\n\n    - powershell: |\n        & \"$(msbuild)\" cpython\\PCbuild\\tcl.vcxproj \"@msbuild.rsp\" /p:Platform=Win32 /p:tcltkDir=\"$(OutDir)\\win32\"\n        & \"$(msbuild)\" cpython\\PCbuild\\tk.vcxproj  \"@msbuild.rsp\" /p:Platform=Win32 /p:tcltkDir=\"$(OutDir)\\win32\"\n      displayName: 'Build for win32'\n\n    - powershell: |\n        & \"$(msbuild)\" cpython\\PCbuild\\tcl.vcxproj \"@msbuild.rsp\" /p:Platform=x64 /p:tcltkDir=\"$(OutDir)\\amd64\"\n        & \"$(msbuild)\" cpython\\PCbuild\\tk.vcxproj  \"@msbuild.rsp\" /p:Platform=x64 /p:tcltkDir=\"$(OutDir)\\amd64\"\n      displayName: 'Build for amd64'\n\n    - powershell: |\n        & \"$(msbuild)\" cpython\\PCbuild\\tcl.vcxproj \"@msbuild.rsp\" /p:Platform=ARM64 /p:tcltkDir=\"$(OutDir)\\arm64\"\n        & \"$(msbuild)\" cpython\\PCbuild\\tk.vcxproj  \"@msbuild.rsp\" /p:Platform=ARM64 /p:tcltkDir=\"$(OutDir)\\arm64\"\n      displayName: 'Build for arm64'\n\n    - ${{ if eq(parameters.IncludeTix, 'true') }}:\n      - powershell: |\n          & \"$(msbuild)\" cpython\\PCbuild\\tix.vcxproj \"@msbuild.rsp\" /p:Platform=Win32 /p:tcltkDir=\"$(OutDir)\\win32\"\n          & \"$(msbuild)\" cpython\\PCbuild\\tix.vcxproj \"@msbuild.rsp\" /p:Platform=x64 /p:tcltkDir=\"$(OutDir)\\amd64\"\n          & \"$(msbuild)\" cpython\\PCbuild\\tix.vcxproj \"@msbuild.rsp\" /p:Platform=ARM64 /p:tcltkDir=\"$(OutDir)\\arm64\"\n        displayName: 'Build Tix'\n\n    - ${{ if ne(parameters.SigningCertificate, 'Unsigned') }}:\n      - template: sign-files.yml\n        parameters:\n          Include: '-r *.dll'\n          WorkingDir: '$(OutDir)'\n          SigningCertificate: ${{ parameters.SigningCertificate }}\n\n    - publish: '$(OutDir)'\n      artifact: 'tcltk'\n      displayName: 'Publishing tcltk'\n"
  },
  {
    "path": "windows-release/uploadrelease.ps1",
    "content": "<#\n.Synopsis\n    Uploads from a VSTS release build layout to python.org\n.Description\n    Given the downloaded/extracted build artifact from a release\n    build run on python.visualstudio.com, this script uploads\n    the files to the correct locations.\n.Parameter build\n    The location on disk of the extracted build artifact.\n.Parameter user\n    The username to use when logging into the host.\n.Parameter server\n    The host or PuTTY session name.\n.Parameter target\n    The subdirectory on the host to copy files to.\n.Parameter tests\n    The path to run download tests in.\n.Parameter embed\n    Optional path besides -build to locate ZIP files.\n#>\nparam(\n    [Parameter(Mandatory=$true)][string]$build,\n    [Parameter(Mandatory=$true)][string]$user,\n    [Parameter(Mandatory=$true)][string]$server,\n    [Parameter(Mandatory=$true)][string]$hostkey,\n    [Parameter(Mandatory=$true)][string]$keyfile,\n    [string]$target=\"/srv/www.python.org/ftp/python\",\n    [string]$tests=${env:TEMP},\n    [string]$embed=$null,\n    [string]$sbom=$null\n)\n\nif (-not $build) { throw \"-build option is required\" }\nif (-not $user) { throw \"-user option is required\" }\n\n$tools = $script:MyInvocation.MyCommand.Path | Split-Path -parent;\n\nif (-not ((Test-Path \"$build\\win32\\python-*.exe\") -or (Test-Path \"$build\\amd64\\python-*.exe\"))) {\n    throw \"-build argument does not look like a 'build' directory\"\n}\n\nfunction find-putty-tool {\n    param ([string]$n)\n    $t = gcm $n -EA 0\n    if (-not $t) { $t = gcm \".\\$n\" -EA 0 }\n    if (-not $t) { $t = gcm \"${env:ProgramFiles}\\PuTTY\\$n\" -EA 0 }\n    if (-not $t) { $t = gcm \"${env:ProgramFiles(x86)}\\PuTTY\\$n\" -EA 0 }\n    if (-not $t) { throw \"Unable to locate $n.exe. Please put it on $PATH\" }\n    return gi $t.Path\n}\n\n$p = gci -r \"$build\\python-*.exe\" | `\n    ?{ $_.Name -match '^python-(\\d+\\.\\d+\\.\\d+)((a|b|rc)\\d+)?-.+' } | `\n    select -first 1 | `\n    %{ $Matches[1], $Matches[2] }\n\n\"Uploading version $($p[0]) $($p[1])\"\n\"  from: $build\"\n\"    to: $($server):$target/$($p[0])\"\n\"\"\n\n# Upload files to the server\n$pscp = find-putty-tool \"pscp\"\n$plink = find-putty-tool \"plink\"\n\n\"Upload using $pscp and $plink\"\n\"\"\n\n$d = \"$target/$($p[0])/\"\n& $plink -batch -hostkey $hostkey -noagent -i $keyfile \"$user@$server\" mkdir $d\n& $plink -batch -hostkey $hostkey -noagent -i $keyfile \"$user@$server\" chgrp downloads $d\n& $plink -batch -hostkey $hostkey -noagent -i $keyfile \"$user@$server\" chmod \"a+rx\" $d\n\n$dirs = gci \"$build\" -Directory\nif ($embed -and (Test-Path $embed)) {\n    $dirs = ($dirs, (gi $embed)) | %{ $_ }\n}\nif ($sbom -and (Test-Path $sbom)) {\n    $dirs = ($dirs, $sbom) | %{ $_ }\n}\n\nforeach ($a in $dirs) {\n    \"Uploading files from $($a.FullName)\"\n    pushd \"$($a.FullName)\"\n    $exe = gci *.exe, *.exe.asc, *.zip, *.zip.asc\n    $msi = gci *.msi, *.msi.asc, *.msu, *.msu.asc\n    $spdx_json = gci *.spdx.json\n    popd\n\n    if ($exe) {\n        & $pscp -batch -hostkey $hostkey -noagent -i $keyfile $exe.FullName \"$user@${server}:$d\"\n        if (-not $?) { throw \"Failed to upload $exe\" }\n    }\n\n    if ($spdx_json) {\n        & $pscp -batch -hostkey $hostkey -noagent -i $keyfile $spdx_json.FullName \"$user@${server}:$d\"\n        if (-not $?) { Write-Host \"##[warning]Failed to upload $spdx_json\" }\n    }\n\n    if ($msi) {\n        $sd = \"$d$($a.Name)$($p[1])/\"\n        & $plink -batch -hostkey $hostkey -noagent -i $keyfile \"$user@$server\" mkdir $sd\n        & $plink -batch -hostkey $hostkey -noagent -i $keyfile \"$user@$server\" chgrp downloads $sd\n        & $plink -batch -hostkey $hostkey -noagent -i $keyfile \"$user@$server\" chmod \"a+rx\" $sd\n        & $pscp -batch -hostkey $hostkey -noagent -i $keyfile $msi.FullName \"$user@${server}:$sd\"\n        if (-not $?) { throw \"Failed to upload $msi\" }\n        & $plink -batch -hostkey $hostkey -noagent -i $keyfile \"$user@$server\" chgrp downloads $sd*\n        & $plink -batch -hostkey $hostkey -noagent -i $keyfile \"$user@$server\" chmod \"g-x,o+r\" $sd*\n    }\n}\n\n& $plink -batch -hostkey $hostkey -noagent -i $keyfile \"$user@$server\" chgrp downloads $d*\n& $plink -batch -hostkey $hostkey -noagent -i $keyfile \"$user@$server\" chmod \"g-x,o+r\" $d*\n& $pscp -batch -hostkey $hostkey -noagent -i $keyfile -ls \"$user@${server}:$d\"\n"
  }
]