[
  {
    "path": ".eslintrc.json",
    "content": "// ~~ Generated by projen. To modify, edit .projenrc.ts and run \"npx projen\".\n{\n  \"env\": {\n    \"jest\": true,\n    \"node\": true\n  },\n  \"root\": true,\n  \"plugins\": [\n    \"@typescript-eslint\",\n    \"import\",\n    \"@stylistic\"\n  ],\n  \"parser\": \"@typescript-eslint/parser\",\n  \"parserOptions\": {\n    \"ecmaVersion\": 2018,\n    \"sourceType\": \"module\",\n    \"project\": \"./tsconfig.dev.json\"\n  },\n  \"extends\": [\n    \"plugin:import/typescript\"\n  ],\n  \"settings\": {\n    \"import/parsers\": {\n      \"@typescript-eslint/parser\": [\n        \".ts\",\n        \".tsx\"\n      ]\n    },\n    \"import/resolver\": {\n      \"node\": {},\n      \"typescript\": {\n        \"project\": \"./tsconfig.dev.json\",\n        \"alwaysTryTypes\": true\n      }\n    }\n  },\n  \"ignorePatterns\": [\n    \"*.js\",\n    \"*.d.ts\",\n    \"node_modules/\",\n    \"*.generated.ts\",\n    \"coverage\",\n    \"!.projenrc.ts\",\n    \"!projenrc/**/*.ts\"\n  ],\n  \"rules\": {\n    \"@stylistic/indent\": [\n      \"error\",\n      2\n    ],\n    \"@stylistic/quotes\": [\n      \"error\",\n      \"single\",\n      {\n        \"avoidEscape\": true\n      }\n    ],\n    \"@stylistic/comma-dangle\": [\n      \"error\",\n      \"always-multiline\"\n    ],\n    \"@stylistic/comma-spacing\": [\n      \"error\",\n      {\n        \"before\": false,\n        \"after\": true\n      }\n    ],\n    \"@stylistic/no-multi-spaces\": [\n      \"error\",\n      {\n        \"ignoreEOLComments\": false\n      }\n    ],\n    \"@stylistic/array-bracket-spacing\": [\n      \"error\",\n      \"never\"\n    ],\n    \"@stylistic/array-bracket-newline\": [\n      \"error\",\n      \"consistent\"\n    ],\n    \"@stylistic/object-curly-spacing\": [\n      \"error\",\n      \"always\"\n    ],\n    \"@stylistic/object-curly-newline\": [\n      \"error\",\n      {\n        \"multiline\": true,\n        \"consistent\": true\n      }\n    ],\n    \"@stylistic/object-property-newline\": [\n      \"error\",\n      {\n        \"allowAllPropertiesOnSameLine\": true\n      }\n    ],\n    \"@stylistic/keyword-spacing\": [\n      \"error\"\n    ],\n    \"@stylistic/brace-style\": [\n      \"error\",\n      \"1tbs\",\n      {\n        \"allowSingleLine\": true\n      }\n    ],\n    \"@stylistic/space-before-blocks\": [\n      \"error\"\n    ],\n    \"@stylistic/member-delimiter-style\": [\n      \"error\"\n    ],\n    \"@stylistic/semi\": [\n      \"error\",\n      \"always\"\n    ],\n    \"@stylistic/max-len\": [\n      \"error\",\n      {\n        \"code\": 150,\n        \"ignoreUrls\": true,\n        \"ignoreStrings\": true,\n        \"ignoreTemplateLiterals\": true,\n        \"ignoreComments\": true,\n        \"ignoreRegExpLiterals\": true\n      }\n    ],\n    \"@stylistic/quote-props\": [\n      \"error\",\n      \"consistent-as-needed\"\n    ],\n    \"@stylistic/key-spacing\": [\n      \"error\"\n    ],\n    \"@stylistic/no-multiple-empty-lines\": [\n      \"error\"\n    ],\n    \"@stylistic/no-trailing-spaces\": [\n      \"error\"\n    ],\n    \"curly\": [\n      \"error\",\n      \"multi-line\",\n      \"consistent\"\n    ],\n    \"@typescript-eslint/no-require-imports\": \"error\",\n    \"import/no-extraneous-dependencies\": [\n      \"error\",\n      {\n        \"devDependencies\": [\n          \"**/lib/__tests__/**\",\n          \"**/build-tools/**\",\n          \".projenrc.ts\",\n          \"projenrc/**/*.ts\"\n        ],\n        \"optionalDependencies\": false,\n        \"peerDependencies\": true\n      }\n    ],\n    \"import/no-unresolved\": [\n      \"error\"\n    ],\n    \"import/order\": [\n      \"warn\",\n      {\n        \"groups\": [\n          \"builtin\",\n          \"external\"\n        ],\n        \"alphabetize\": {\n          \"order\": \"asc\",\n          \"caseInsensitive\": true\n        }\n      }\n    ],\n    \"import/no-duplicates\": [\n      \"error\"\n    ],\n    \"no-shadow\": [\n      \"off\"\n    ],\n    \"@typescript-eslint/no-shadow\": \"error\",\n    \"@typescript-eslint/no-floating-promises\": \"error\",\n    \"no-return-await\": [\n      \"off\"\n    ],\n    \"@typescript-eslint/return-await\": \"error\",\n    \"dot-notation\": [\n      \"error\"\n    ],\n    \"no-bitwise\": [\n      \"error\"\n    ],\n    \"@typescript-eslint/member-ordering\": [\n      \"error\",\n      {\n        \"default\": [\n          \"public-static-field\",\n          \"public-static-method\",\n          \"protected-static-field\",\n          \"protected-static-method\",\n          \"private-static-field\",\n          \"private-static-method\",\n          \"field\",\n          \"constructor\",\n          \"method\"\n        ]\n      }\n    ]\n  },\n  \"overrides\": [\n    {\n      \"files\": [\n        \".projenrc.ts\"\n      ],\n      \"rules\": {\n        \"@typescript-eslint/no-require-imports\": \"off\",\n        \"import/no-extraneous-dependencies\": \"off\"\n      }\n    }\n  ]\n}\n"
  },
  {
    "path": ".gitattributes",
    "content": "# ~~ Generated by projen. To modify, edit .projenrc.ts and run \"npx projen\".\n\n* text=auto eol=lf\n*.snap linguist-generated\n/.eslintrc.json linguist-generated\n/.gitattributes linguist-generated\n/.github/workflows/auto-approve.yml linguist-generated\n/.github/workflows/auto-queue.yml linguist-generated\n/.github/workflows/build.yml linguist-generated\n/.github/workflows/pull-request-lint.yml linguist-generated\n/.github/workflows/release.yml linguist-generated\n/.github/workflows/retry-automerge.yml linguist-generated\n/.github/workflows/upgrade-cdklabs-projen-project-types-main.yml linguist-generated\n/.github/workflows/upgrade-dev-deps-main.yml linguist-generated\n/.github/workflows/upgrade-main.yml linguist-generated\n/.gitignore linguist-generated\n/.npmignore linguist-generated\n/.projen/** linguist-generated\n/.projen/deps.json linguist-generated\n/.projen/files.json linguist-generated\n/.projen/jest-snapshot-resolver.js linguist-generated\n/.projen/tasks.json linguist-generated\n/LICENSE linguist-generated\n/package.json linguist-generated\n/tsconfig.dev.json linguist-generated\n/tsconfig.json linguist-generated\n/yarn.lock linguist-generated"
  },
  {
    "path": ".github/PULL_REQUEST_TEMPLATE.md",
    "content": "\n-----\n\nBy submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license."
  },
  {
    "path": ".github/workflows/auto-approve.yml",
    "content": "# ~~ Generated by projen. To modify, edit .projenrc.ts and run \"npx projen\".\n\nname: auto-approve\non:\n  pull_request_target:\n    types:\n      - labeled\n      - opened\n      - synchronize\n      - reopened\n      - ready_for_review\njobs:\n  approve:\n    runs-on: ubuntu-latest\n    permissions:\n      pull-requests: write\n    if: contains(github.event.pull_request.labels.*.name, 'auto-approve') && (github.event.pull_request.user.login == 'cdklabs-automation' || github.event.pull_request.user.login == 'dependabot[bot]')\n    steps:\n      - uses: hmarr/auto-approve-action@f0939ea97e9205ef24d872e76833fa908a770363\n        with:\n          github-token: ${{ secrets.GITHUB_TOKEN }}\n"
  },
  {
    "path": ".github/workflows/auto-queue.yml",
    "content": "# ~~ Generated by projen. To modify, edit .projenrc.ts and run \"npx projen\".\n\nname: auto-queue\non:\n  pull_request_target:\n    types:\n      - opened\n      - reopened\n      - ready_for_review\njobs:\n  enableAutoQueue:\n    name: \"Set AutoQueue on PR #${{ github.event.number }}\"\n    runs-on: ubuntu-latest\n    permissions:\n      pull-requests: write\n      contents: write\n    steps:\n      - uses: peter-evans/enable-pull-request-automerge@v3\n        with:\n          token: ${{ secrets.PROJEN_GITHUB_TOKEN }}\n          pull-request-number: ${{ github.event.number }}\n          merge-method: squash\n"
  },
  {
    "path": ".github/workflows/build.yml",
    "content": "# ~~ Generated by projen. To modify, edit .projenrc.ts and run \"npx projen\".\n\nname: build\non:\n  pull_request: {}\n  workflow_dispatch: {}\n  merge_group: {}\njobs:\n  build:\n    runs-on: ubuntu-latest\n    permissions:\n      contents: write\n    outputs:\n      self_mutation_happened: ${{ steps.self_mutation.outputs.self_mutation_happened }}\n    env:\n      CI: \"true\"\n    steps:\n      - name: Checkout\n        uses: actions/checkout@v5\n        with:\n          ref: ${{ github.event.pull_request.head.ref }}\n          repository: ${{ github.event.pull_request.head.repo.full_name }}\n      - name: Setup Node.js\n        uses: actions/setup-node@v5\n        with:\n          node-version: lts/*\n      - name: Install dependencies\n        run: yarn install --check-files\n      - name: build\n        run: npx projen build\n      - name: Find mutations\n        id: self_mutation\n        run: |-\n          git add .\n          git diff --staged --patch --exit-code > repo.patch || echo \"self_mutation_happened=true\" >> $GITHUB_OUTPUT\n        shell: bash\n        working-directory: ./\n      - name: Upload patch\n        if: steps.self_mutation.outputs.self_mutation_happened\n        uses: actions/upload-artifact@v4.6.2\n        with:\n          name: repo.patch\n          path: repo.patch\n          overwrite: true\n      - name: Fail build on mutation\n        if: steps.self_mutation.outputs.self_mutation_happened\n        run: |-\n          echo \"::error::Files were changed during build (see build log). If this was triggered from a fork, you will need to update your branch.\"\n          cat repo.patch\n          exit 1\n  self-mutation:\n    needs: build\n    runs-on: ubuntu-latest\n    permissions:\n      contents: write\n    if: always() && needs.build.outputs.self_mutation_happened && !(github.event.pull_request.head.repo.full_name != github.repository)\n    steps:\n      - name: Checkout\n        uses: actions/checkout@v5\n        with:\n          token: ${{ secrets.PROJEN_GITHUB_TOKEN }}\n          ref: ${{ github.event.pull_request.head.ref }}\n          repository: ${{ github.event.pull_request.head.repo.full_name }}\n      - name: Download patch\n        uses: actions/download-artifact@v5\n        with:\n          name: repo.patch\n          path: ${{ runner.temp }}\n      - name: Apply patch\n        run: '[ -s ${{ runner.temp }}/repo.patch ] && git apply ${{ runner.temp }}/repo.patch || echo \"Empty patch. Skipping.\"'\n      - name: Set git identity\n        run: |-\n          git config user.name \"github-actions[bot]\"\n          git config user.email \"41898282+github-actions[bot]@users.noreply.github.com\"\n      - name: Push changes\n        env:\n          PULL_REQUEST_REF: ${{ github.event.pull_request.head.ref }}\n        run: |-\n          git add .\n          git commit -s -m \"chore: self mutation\"\n          git push origin \"HEAD:$PULL_REQUEST_REF\"\n"
  },
  {
    "path": ".github/workflows/pull-request-lint.yml",
    "content": "# ~~ Generated by projen. To modify, edit .projenrc.ts and run \"npx projen\".\n\nname: pull-request-lint\non:\n  pull_request_target:\n    types:\n      - labeled\n      - opened\n      - synchronize\n      - reopened\n      - ready_for_review\n      - edited\n  merge_group: {}\njobs:\n  validate:\n    name: Validate PR title\n    runs-on: ubuntu-latest\n    permissions:\n      pull-requests: write\n    if: (github.event_name == 'pull_request' || github.event_name == 'pull_request_target')\n    steps:\n      - uses: amannn/action-semantic-pull-request@v6\n        env:\n          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}\n        with:\n          types: |-\n            feat\n            fix\n            chore\n          requireScope: false\n"
  },
  {
    "path": ".github/workflows/release.yml",
    "content": "# ~~ Generated by projen. To modify, edit .projenrc.ts and run \"npx projen\".\n\nname: release\non:\n  push:\n    branches:\n      - main\n  workflow_dispatch: {}\nconcurrency:\n  group: ${{ github.workflow }}\n  cancel-in-progress: false\njobs:\n  release:\n    runs-on: ubuntu-latest\n    permissions:\n      contents: write\n    outputs:\n      latest_commit: ${{ steps.git_remote.outputs.latest_commit }}\n      tag_exists: ${{ steps.check_tag_exists.outputs.exists }}\n    env:\n      CI: \"true\"\n    steps:\n      - name: Checkout\n        uses: actions/checkout@v5\n        with:\n          fetch-depth: 0\n      - name: Set git identity\n        run: |-\n          git config user.name \"github-actions[bot]\"\n          git config user.email \"41898282+github-actions[bot]@users.noreply.github.com\"\n      - name: Setup Node.js\n        uses: actions/setup-node@v5\n        with:\n          node-version: lts/*\n      - name: Install dependencies\n        run: yarn install --check-files --frozen-lockfile\n      - name: release\n        run: npx projen release\n      - name: Check if version has already been tagged\n        id: check_tag_exists\n        run: |-\n          TAG=$(cat dist/releasetag.txt)\n          ([ ! -z \"$TAG\" ] && git ls-remote -q --exit-code --tags origin $TAG && (echo \"exists=true\" >> $GITHUB_OUTPUT)) || (echo \"exists=false\" >> $GITHUB_OUTPUT)\n          cat $GITHUB_OUTPUT\n      - name: Check for new commits\n        id: git_remote\n        run: |-\n          echo \"latest_commit=$(git ls-remote origin -h ${{ github.ref }} | cut -f1)\" >> $GITHUB_OUTPUT\n          cat $GITHUB_OUTPUT\n        shell: bash\n      - name: Backup artifact permissions\n        if: ${{ steps.git_remote.outputs.latest_commit == github.sha }}\n        run: cd dist && getfacl -R . > permissions-backup.acl\n        continue-on-error: true\n      - name: Upload artifact\n        if: ${{ steps.git_remote.outputs.latest_commit == github.sha }}\n        uses: actions/upload-artifact@v4.6.2\n        with:\n          name: build-artifact\n          path: dist\n          overwrite: true\n  release_github:\n    name: Publish to GitHub Releases\n    needs:\n      - release\n      - release_npm\n    runs-on: ubuntu-latest\n    permissions:\n      contents: write\n    environment: release\n    if: needs.release.outputs.tag_exists != 'true' && needs.release.outputs.latest_commit == github.sha\n    steps:\n      - uses: actions/setup-node@v5\n        with:\n          node-version: lts/*\n      - name: Download build artifacts\n        uses: actions/download-artifact@v5\n        with:\n          name: build-artifact\n          path: dist\n      - name: Restore build artifact permissions\n        run: cd dist && setfacl --restore=permissions-backup.acl\n        continue-on-error: true\n      - name: Release\n        env:\n          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}\n        run: errout=$(mktemp); gh release create $(cat dist/releasetag.txt) -R $GITHUB_REPOSITORY -F dist/changelog.md -t $(cat dist/releasetag.txt) --target $GITHUB_SHA 2> $errout && true; exitcode=$?; if [ $exitcode -ne 0 ] && ! grep -q \"Release.tag_name already exists\" $errout; then cat $errout; exit $exitcode; fi\n  release_npm:\n    name: Publish to npm\n    needs: release\n    runs-on: ubuntu-latest\n    permissions:\n      id-token: write\n      contents: read\n    environment: release\n    if: needs.release.outputs.tag_exists != 'true' && needs.release.outputs.latest_commit == github.sha\n    steps:\n      - uses: actions/setup-node@v5\n        with:\n          node-version: lts/*\n      - name: Download build artifacts\n        uses: actions/download-artifact@v5\n        with:\n          name: build-artifact\n          path: dist\n      - name: Restore build artifact permissions\n        run: cd dist && setfacl --restore=permissions-backup.acl\n        continue-on-error: true\n      - name: Release\n        env:\n          NPM_DIST_TAG: latest\n          NPM_REGISTRY: registry.npmjs.org\n          NPM_CONFIG_PROVENANCE: \"true\"\n          NPM_TRUSTED_PUBLISHER: \"true\"\n        run: npx -p publib@latest publib-npm\n"
  },
  {
    "path": ".github/workflows/retry-automerge.yml",
    "content": "# ~~ Generated by projen. To modify, edit .projenrc.ts and run \"npx projen\".\n\nname: retry-automerge\non:\n  pull_request:\n    types:\n      - auto_merge_disabled\njobs:\n  retry-automerge:\n    runs-on: ubuntu-latest\n    permissions: {}\n    steps:\n      - name: Print github context\n        env:\n          GITHUB_CONTEXT: ${{ toJson(github) }}\n        run: echo \"$GITHUB_CONTEXT\"\n      - name: Print github event file\n        run: jq . \"$GITHUB_EVENT_PATH\"\n"
  },
  {
    "path": ".github/workflows/stale.yml",
    "content": "# ~~ Generated by projen. To modify, edit .projenrc.js and run \"npx projen\".\n\nname: stale\non:\n  schedule:\n    - cron: 0 1 * * *\n  workflow_dispatch: {}\njobs:\n  stale:\n    runs-on: ubuntu-latest\n    permissions:\n      issues: write\n      pull-requests: write\n    steps:\n      - uses: actions/stale@v4\n        with:\n          days-before-stale: -1\n          days-before-close: -1\n          days-before-pr-stale: 14\n          days-before-pr-close: 2\n          stale-pr-message: This pull request is now marked as stale because it hasn't seen activity for a while. Add a comment or it will be closed soon. If you wish to exclude this issue from being marked as stale, add the \"backlog\" label.\n          close-pr-message: Closing this pull request as it hasn't seen activity for a while. Please add a comment @mentioning a maintainer to reopen. If you wish to exclude this issue from being marked as stale, add the \"backlog\" label.\n          stale-pr-label: stale\n          exempt-pr-labels: backlog\n          days-before-issue-stale: 60\n          days-before-issue-close: 7\n          stale-issue-message: This issue is now marked as stale because it hasn't seen activity for a while. Add a comment or it will be closed soon. If you wish to exclude this issue from being marked as stale, add the \"backlog\" label.\n          close-issue-message: Closing this issue as it hasn't seen activity for a while. Please add a comment @mentioning a maintainer to reopen. If you wish to exclude this issue from being marked as stale, add the \"backlog\" label.\n          stale-issue-label: stale\n          exempt-issue-labels: backlog\n"
  },
  {
    "path": ".github/workflows/upgrade-cdklabs-projen-project-types-main.yml",
    "content": "# ~~ Generated by projen. To modify, edit .projenrc.ts and run \"npx projen\".\n\nname: upgrade-cdklabs-projen-project-types-main\non:\n  workflow_dispatch: {}\njobs:\n  upgrade:\n    name: Upgrade\n    runs-on: ubuntu-latest\n    permissions:\n      contents: read\n    outputs:\n      patch_created: ${{ steps.create_patch.outputs.patch_created }}\n    steps:\n      - name: Checkout\n        uses: actions/checkout@v5\n        with:\n          ref: main\n      - name: Setup Node.js\n        uses: actions/setup-node@v5\n      - name: Install dependencies\n        run: yarn install --check-files --frozen-lockfile\n      - name: Upgrade dependencies\n        run: npx projen upgrade-cdklabs-projen-project-types\n      - name: Find mutations\n        id: create_patch\n        run: |-\n          git add .\n          git diff --staged --patch --exit-code > repo.patch || echo \"patch_created=true\" >> $GITHUB_OUTPUT\n        shell: bash\n        working-directory: ./\n      - name: Upload patch\n        if: steps.create_patch.outputs.patch_created\n        uses: actions/upload-artifact@v4.6.2\n        with:\n          name: repo.patch\n          path: repo.patch\n          overwrite: true\n  pr:\n    name: Create Pull Request\n    needs: upgrade\n    runs-on: ubuntu-latest\n    permissions:\n      contents: read\n    if: ${{ needs.upgrade.outputs.patch_created }}\n    steps:\n      - name: Checkout\n        uses: actions/checkout@v5\n        with:\n          ref: main\n      - name: Download patch\n        uses: actions/download-artifact@v5\n        with:\n          name: repo.patch\n          path: ${{ runner.temp }}\n      - name: Apply patch\n        run: '[ -s ${{ runner.temp }}/repo.patch ] && git apply ${{ runner.temp }}/repo.patch || echo \"Empty patch. Skipping.\"'\n      - name: Set git identity\n        run: |-\n          git config user.name \"github-actions[bot]\"\n          git config user.email \"41898282+github-actions[bot]@users.noreply.github.com\"\n      - name: Create Pull Request\n        id: create-pr\n        uses: peter-evans/create-pull-request@v7\n        with:\n          token: ${{ secrets.PROJEN_GITHUB_TOKEN }}\n          commit-message: |-\n            chore(deps): upgrade cdklabs-projen-project-types\n\n            Upgrades project dependencies. See details in [workflow run].\n\n            [Workflow Run]: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}\n\n            ------\n\n            *Automatically created by projen via the \"upgrade-cdklabs-projen-project-types-main\" workflow*\n          branch: github-actions/upgrade-cdklabs-projen-project-types-main\n          title: \"chore(deps): upgrade cdklabs-projen-project-types\"\n          labels: auto-approve\n          body: |-\n            Upgrades project dependencies. See details in [workflow run].\n\n            [Workflow Run]: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}\n\n            ------\n\n            *Automatically created by projen via the \"upgrade-cdklabs-projen-project-types-main\" workflow*\n          author: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>\n          committer: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>\n          signoff: true\n"
  },
  {
    "path": ".github/workflows/upgrade-dev-deps-main.yml",
    "content": "# ~~ Generated by projen. To modify, edit .projenrc.ts and run \"npx projen\".\n\nname: upgrade-dev-deps-main\non:\n  workflow_dispatch: {}\n  schedule:\n    - cron: 0 22 * * 1\njobs:\n  upgrade:\n    name: Upgrade\n    runs-on: ubuntu-latest\n    permissions:\n      contents: read\n    outputs:\n      patch_created: ${{ steps.create_patch.outputs.patch_created }}\n    steps:\n      - name: Checkout\n        uses: actions/checkout@v5\n        with:\n          ref: main\n      - name: Setup Node.js\n        uses: actions/setup-node@v5\n        with:\n          node-version: lts/*\n      - name: Install dependencies\n        run: yarn install --check-files --frozen-lockfile\n      - name: Upgrade dependencies\n        run: npx projen upgrade-dev-deps\n      - name: Find mutations\n        id: create_patch\n        run: |-\n          git add .\n          git diff --staged --patch --exit-code > repo.patch || echo \"patch_created=true\" >> $GITHUB_OUTPUT\n        shell: bash\n        working-directory: ./\n      - name: Upload patch\n        if: steps.create_patch.outputs.patch_created\n        uses: actions/upload-artifact@v4.6.2\n        with:\n          name: repo.patch\n          path: repo.patch\n          overwrite: true\n  pr:\n    name: Create Pull Request\n    needs: upgrade\n    runs-on: ubuntu-latest\n    permissions:\n      contents: read\n    if: ${{ needs.upgrade.outputs.patch_created }}\n    steps:\n      - name: Checkout\n        uses: actions/checkout@v5\n        with:\n          ref: main\n      - name: Download patch\n        uses: actions/download-artifact@v5\n        with:\n          name: repo.patch\n          path: ${{ runner.temp }}\n      - name: Apply patch\n        run: '[ -s ${{ runner.temp }}/repo.patch ] && git apply ${{ runner.temp }}/repo.patch || echo \"Empty patch. Skipping.\"'\n      - name: Set git identity\n        run: |-\n          git config user.name \"github-actions[bot]\"\n          git config user.email \"41898282+github-actions[bot]@users.noreply.github.com\"\n      - name: Create Pull Request\n        id: create-pr\n        uses: peter-evans/create-pull-request@v7\n        with:\n          token: ${{ secrets.PROJEN_GITHUB_TOKEN }}\n          commit-message: |-\n            chore(deps): upgrade dev dependencies\n\n            Upgrades project dependencies. See details in [workflow run].\n\n            [Workflow Run]: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}\n\n            ------\n\n            *Automatically created by projen via the \"upgrade-dev-deps-main\" workflow*\n          branch: github-actions/upgrade-dev-deps-main\n          title: \"chore(deps): upgrade dev dependencies\"\n          labels: auto-approve\n          body: |-\n            Upgrades project dependencies. See details in [workflow run].\n\n            [Workflow Run]: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}\n\n            ------\n\n            *Automatically created by projen via the \"upgrade-dev-deps-main\" workflow*\n          author: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>\n          committer: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>\n          signoff: true\n"
  },
  {
    "path": ".github/workflows/upgrade-main.yml",
    "content": "# ~~ Generated by projen. To modify, edit .projenrc.ts and run \"npx projen\".\n\nname: upgrade-main\non:\n  workflow_dispatch: {}\n  schedule:\n    - cron: 0 18 * * 1\njobs:\n  upgrade:\n    name: Upgrade\n    runs-on: ubuntu-latest\n    permissions:\n      contents: read\n    outputs:\n      patch_created: ${{ steps.create_patch.outputs.patch_created }}\n    steps:\n      - name: Checkout\n        uses: actions/checkout@v5\n        with:\n          ref: main\n      - name: Setup Node.js\n        uses: actions/setup-node@v5\n        with:\n          node-version: lts/*\n      - name: Install dependencies\n        run: yarn install --check-files --frozen-lockfile\n      - name: Upgrade dependencies\n        run: npx projen upgrade\n      - name: Find mutations\n        id: create_patch\n        run: |-\n          git add .\n          git diff --staged --patch --exit-code > repo.patch || echo \"patch_created=true\" >> $GITHUB_OUTPUT\n        shell: bash\n        working-directory: ./\n      - name: Upload patch\n        if: steps.create_patch.outputs.patch_created\n        uses: actions/upload-artifact@v4.6.2\n        with:\n          name: repo.patch\n          path: repo.patch\n          overwrite: true\n  pr:\n    name: Create Pull Request\n    needs: upgrade\n    runs-on: ubuntu-latest\n    permissions:\n      contents: read\n    if: ${{ needs.upgrade.outputs.patch_created }}\n    steps:\n      - name: Checkout\n        uses: actions/checkout@v5\n        with:\n          ref: main\n      - name: Download patch\n        uses: actions/download-artifact@v5\n        with:\n          name: repo.patch\n          path: ${{ runner.temp }}\n      - name: Apply patch\n        run: '[ -s ${{ runner.temp }}/repo.patch ] && git apply ${{ runner.temp }}/repo.patch || echo \"Empty patch. Skipping.\"'\n      - name: Set git identity\n        run: |-\n          git config user.name \"github-actions[bot]\"\n          git config user.email \"41898282+github-actions[bot]@users.noreply.github.com\"\n      - name: Create Pull Request\n        id: create-pr\n        uses: peter-evans/create-pull-request@v7\n        with:\n          token: ${{ secrets.PROJEN_GITHUB_TOKEN }}\n          commit-message: |-\n            fix(deps): upgrade dependencies\n\n            Upgrades project dependencies. See details in [workflow run].\n\n            [Workflow Run]: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}\n\n            ------\n\n            *Automatically created by projen via the \"upgrade-main\" workflow*\n          branch: github-actions/upgrade-main\n          title: \"fix(deps): upgrade dependencies\"\n          labels: auto-approve\n          body: |-\n            Upgrades project dependencies. See details in [workflow run].\n\n            [Workflow Run]: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}\n\n            ------\n\n            *Automatically created by projen via the \"upgrade-main\" workflow*\n          author: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>\n          committer: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>\n          signoff: true\n"
  },
  {
    "path": ".gitignore",
    "content": "# ~~ Generated by projen. To modify, edit .projenrc.ts and run \"npx projen\".\n!/.gitattributes\n!/.projen/tasks.json\n!/.projen/deps.json\n!/.projen/files.json\n!/.github/workflows/pull-request-lint.yml\n!/.github/workflows/auto-approve.yml\n!/package.json\n!/LICENSE\n!/.npmignore\nlogs\n*.log\nnpm-debug.log*\nyarn-debug.log*\nyarn-error.log*\nlerna-debug.log*\nreport.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json\npids\n*.pid\n*.seed\n*.pid.lock\nlib-cov\ncoverage\n*.lcov\n.nyc_output\nbuild/Release\nnode_modules/\njspm_packages/\n*.tsbuildinfo\n.eslintcache\n*.tgz\n.yarn-integrity\n.cache\n/test-reports/\njunit.xml\n/coverage/\n!/.github/workflows/build.yml\n/dist/changelog.md\n/dist/version.txt\n!/.github/workflows/release.yml\n!/lib/__tests__/\n!/tsconfig.json\n!/tsconfig.dev.json\n!/lib/\n/lib/**/*.js\n/lib/**/*.d.ts\n/lib/**/*.d.ts.map\n/dist/\n!/.projen/jest-snapshot-resolver.js\n!/.eslintrc.json\n!/.github/workflows/retry-automerge.yml\n!/.github/workflows/auto-queue.yml\n!/.github/workflows/upgrade-cdklabs-projen-project-types-main.yml\n!/.github/workflows/upgrade-main.yml\n!/.github/workflows/upgrade-dev-deps-main.yml\ncdk.out\npipeline/*.js\npipeline/*.d.ts\n!lib/package-integrity/handler/JSONStream.d.ts\n!/.projenrc.ts\n"
  },
  {
    "path": ".npmignore",
    "content": "# ~~ Generated by projen. To modify, edit .projenrc.ts and run \"npx projen\".\n/.projen/\n/test-reports/\njunit.xml\n/coverage/\npermissions-backup.acl\n/dist/changelog.md\n/dist/version.txt\n/tsconfig.dev.json\n!/lib/\n!/lib/**/*.js\ndist\n/tsconfig.json\n/.github/\n/.vscode/\n/.idea/\n/.projenrc.js\n/.eslintrc.json\n/lib/**/*.ts\n!/lib/**/*.d.ts\n!/lib/**/node_modules/**\n/lib/__tests__/\ntsconfig.json\ntsconfig.dev.json\ntsconfig.tsbuildinfo\n/build-*.sh\ncdk.out/\ncdk.json\n/.gitattributes\n/.projenrc.ts\n/projenrc\n"
  },
  {
    "path": ".projen/deps.json",
    "content": "{\n  \"dependencies\": [\n    {\n      \"name\": \"@aws-sdk/client-cloudwatch\",\n      \"type\": \"build\"\n    },\n    {\n      \"name\": \"@aws-sdk/client-codepipeline\",\n      \"type\": \"build\"\n    },\n    {\n      \"name\": \"@aws-sdk/client-s3\",\n      \"type\": \"build\"\n    },\n    {\n      \"name\": \"@aws-sdk/client-secrets-manager\",\n      \"type\": \"build\"\n    },\n    {\n      \"name\": \"@aws-sdk/client-ssm\",\n      \"type\": \"build\"\n    },\n    {\n      \"name\": \"@babel/plugin-transform-modules-commonjs\",\n      \"type\": \"build\"\n    },\n    {\n      \"name\": \"@stylistic/eslint-plugin\",\n      \"version\": \"^2\",\n      \"type\": \"build\"\n    },\n    {\n      \"name\": \"@types/adm-zip\",\n      \"type\": \"build\"\n    },\n    {\n      \"name\": \"@types/aws-lambda\",\n      \"type\": \"build\"\n    },\n    {\n      \"name\": \"@types/follow-redirects\",\n      \"type\": \"build\"\n    },\n    {\n      \"name\": \"@types/fs-extra\",\n      \"type\": \"build\"\n    },\n    {\n      \"name\": \"@types/jest\",\n      \"type\": \"build\"\n    },\n    {\n      \"name\": \"@types/node\",\n      \"version\": \"^18\",\n      \"type\": \"build\"\n    },\n    {\n      \"name\": \"@types/tar\",\n      \"type\": \"build\"\n    },\n    {\n      \"name\": \"@typescript-eslint/eslint-plugin\",\n      \"version\": \"^8\",\n      \"type\": \"build\"\n    },\n    {\n      \"name\": \"@typescript-eslint/parser\",\n      \"version\": \"^8\",\n      \"type\": \"build\"\n    },\n    {\n      \"name\": \"adm-zip\",\n      \"type\": \"build\"\n    },\n    {\n      \"name\": \"aws-cdk\",\n      \"type\": \"build\"\n    },\n    {\n      \"name\": \"aws-cdk-lib\",\n      \"type\": \"build\"\n    },\n    {\n      \"name\": \"cdklabs-projen-project-types\",\n      \"type\": \"build\"\n    },\n    {\n      \"name\": \"commit-and-tag-version\",\n      \"version\": \"^12\",\n      \"type\": \"build\"\n    },\n    {\n      \"name\": \"constructs\",\n      \"version\": \"^10.0.0\",\n      \"type\": \"build\"\n    },\n    {\n      \"name\": \"esbuild\",\n      \"type\": \"build\"\n    },\n    {\n      \"name\": \"eslint-import-resolver-typescript\",\n      \"type\": \"build\"\n    },\n    {\n      \"name\": \"eslint-plugin-import\",\n      \"type\": \"build\"\n    },\n    {\n      \"name\": \"eslint\",\n      \"version\": \"^9\",\n      \"type\": \"build\"\n    },\n    {\n      \"name\": \"follow-redirects\",\n      \"type\": \"build\"\n    },\n    {\n      \"name\": \"fs-extra\",\n      \"type\": \"build\"\n    },\n    {\n      \"name\": \"jest\",\n      \"type\": \"build\"\n    },\n    {\n      \"name\": \"jest-junit\",\n      \"version\": \"^16\",\n      \"type\": \"build\"\n    },\n    {\n      \"name\": \"JSONStream\",\n      \"type\": \"build\"\n    },\n    {\n      \"name\": \"minipass\",\n      \"version\": \"3.2.1\",\n      \"type\": \"build\"\n    },\n    {\n      \"name\": \"node-ical\",\n      \"version\": \"0.15.1\",\n      \"type\": \"build\"\n    },\n    {\n      \"name\": \"projen\",\n      \"type\": \"build\"\n    },\n    {\n      \"name\": \"rrule\",\n      \"type\": \"build\"\n    },\n    {\n      \"name\": \"standard-version\",\n      \"type\": \"build\"\n    },\n    {\n      \"name\": \"tar\",\n      \"type\": \"build\"\n    },\n    {\n      \"name\": \"ts-jest\",\n      \"type\": \"build\"\n    },\n    {\n      \"name\": \"ts-node\",\n      \"type\": \"build\"\n    },\n    {\n      \"name\": \"typescript\",\n      \"version\": \"~5.0.0\",\n      \"type\": \"build\"\n    },\n    {\n      \"name\": \"aws-cdk-lib\",\n      \"version\": \"^2.187.0\",\n      \"type\": \"peer\"\n    },\n    {\n      \"name\": \"constructs\",\n      \"type\": \"peer\"\n    },\n    {\n      \"name\": \"changelog-parser\",\n      \"type\": \"runtime\"\n    }\n  ],\n  \"//\": \"~~ Generated by projen. To modify, edit .projenrc.ts and run \\\"npx projen\\\".\"\n}\n"
  },
  {
    "path": ".projen/files.json",
    "content": "{\n  \"files\": [\n    \".eslintrc.json\",\n    \".gitattributes\",\n    \".github/workflows/auto-approve.yml\",\n    \".github/workflows/auto-queue.yml\",\n    \".github/workflows/build.yml\",\n    \".github/workflows/pull-request-lint.yml\",\n    \".github/workflows/release.yml\",\n    \".github/workflows/retry-automerge.yml\",\n    \".github/workflows/upgrade-cdklabs-projen-project-types-main.yml\",\n    \".github/workflows/upgrade-dev-deps-main.yml\",\n    \".github/workflows/upgrade-main.yml\",\n    \".gitignore\",\n    \".npmignore\",\n    \".projen/deps.json\",\n    \".projen/files.json\",\n    \".projen/jest-snapshot-resolver.js\",\n    \".projen/tasks.json\",\n    \"LICENSE\",\n    \"tsconfig.dev.json\",\n    \"tsconfig.json\"\n  ],\n  \"//\": \"~~ Generated by projen. To modify, edit .projenrc.ts and run \\\"npx projen\\\".\"\n}\n"
  },
  {
    "path": ".projen/jest-snapshot-resolver.js",
    "content": "const path = require(\"path\");\nconst libtest = \"lib/__tests__\";\nconst srctest= \"lib/__tests__\";\nmodule.exports = {\n  resolveSnapshotPath: (test, ext) => {\n            const fullpath = test.replace(libtest, srctest);\n            return path.join(path.dirname(fullpath), \"__snapshots__\", path.basename(fullpath, \".js\") + \".ts\" + ext);\n        },\n  resolveTestPath: (snap, ext) => {\n            const filename = path.basename(snap, \".ts\" + ext) + \".js\";\n            const dir = path.dirname(path.dirname(snap)).replace(srctest, libtest);\n            return path.join(dir, filename);\n        },\n  testPathForConsistencyCheck: path.join('some', '__tests__', 'example.test.js')\n};"
  },
  {
    "path": ".projen/tasks.json",
    "content": "{\n  \"tasks\": {\n    \"build\": {\n      \"name\": \"build\",\n      \"description\": \"Full release build\",\n      \"steps\": [\n        {\n          \"spawn\": \"default\"\n        },\n        {\n          \"spawn\": \"pre-compile\"\n        },\n        {\n          \"spawn\": \"compile\"\n        },\n        {\n          \"spawn\": \"post-compile\"\n        },\n        {\n          \"spawn\": \"test\"\n        },\n        {\n          \"spawn\": \"package\"\n        }\n      ]\n    },\n    \"build:publishing/github\": {\n      \"name\": \"build:publishing/github\",\n      \"steps\": [\n        {\n          \"exec\": \"yarn install --frozen-lockfile\",\n          \"cwd\": \"lib/publishing/github\"\n        },\n        {\n          \"exec\": \"yarn tsc --build\",\n          \"cwd\": \"lib/publishing/github\"\n        }\n      ]\n    },\n    \"bump\": {\n      \"name\": \"bump\",\n      \"description\": \"Bumps version based on latest git tag and generates a changelog entry\",\n      \"env\": {\n        \"OUTFILE\": \"package.json\",\n        \"CHANGELOG\": \"dist/changelog.md\",\n        \"BUMPFILE\": \"dist/version.txt\",\n        \"RELEASETAG\": \"dist/releasetag.txt\",\n        \"RELEASE_TAG_PREFIX\": \"\",\n        \"BUMP_PACKAGE\": \"commit-and-tag-version@^12\",\n        \"RELEASABLE_COMMITS\": \"git log --no-merges --oneline $LATEST_TAG..HEAD -E --grep \\\"^(feat|fix){1}(\\\\([^()[:space:]]+\\\\))?(!)?:[[:blank:]]+.+\\\"\"\n      },\n      \"steps\": [\n        {\n          \"builtin\": \"release/bump-version\"\n        }\n      ],\n      \"condition\": \"git log --oneline -1 | grep -qv \\\"chore(release):\\\"\"\n    },\n    \"bundle:package-integrity\": {\n      \"name\": \"bundle:package-integrity\",\n      \"description\": \"Bundle the package integrity script\",\n      \"steps\": [\n        {\n          \"exec\": \"esbuild --bundle lib/package-integrity/handler/validate.js --target=\\\"node14\\\" --platform=\\\"node\\\" --outfile=\\\"lib/package-integrity/handler/validate.bundle.js\\\" --sourcemap=inline\"\n        }\n      ]\n    },\n    \"clobber\": {\n      \"name\": \"clobber\",\n      \"description\": \"hard resets to HEAD of origin and cleans the local repo\",\n      \"env\": {\n        \"BRANCH\": \"$(git branch --show-current)\"\n      },\n      \"steps\": [\n        {\n          \"exec\": \"git checkout -b scratch\",\n          \"name\": \"save current HEAD in \\\"scratch\\\" branch\"\n        },\n        {\n          \"exec\": \"git checkout $BRANCH\"\n        },\n        {\n          \"exec\": \"git fetch origin\",\n          \"name\": \"fetch latest changes from origin\"\n        },\n        {\n          \"exec\": \"git reset --hard origin/$BRANCH\",\n          \"name\": \"hard reset to origin commit\"\n        },\n        {\n          \"exec\": \"git clean -fdx\",\n          \"name\": \"clean all untracked files\"\n        },\n        {\n          \"say\": \"ready to rock! (unpushed commits are under the \\\"scratch\\\" branch)\"\n        }\n      ],\n      \"condition\": \"git diff --exit-code > /dev/null\"\n    },\n    \"compile\": {\n      \"name\": \"compile\",\n      \"description\": \"Only compile\",\n      \"steps\": [\n        {\n          \"spawn\": \"compile:custom-resource-handlers\"\n        },\n        {\n          \"spawn\": \"build:publishing/github\"\n        },\n        {\n          \"exec\": \"tsc --build\"\n        },\n        {\n          \"spawn\": \"bundle:package-integrity\"\n        }\n      ]\n    },\n    \"compile:custom-resource-handlers\": {\n      \"name\": \"compile:custom-resource-handlers\",\n      \"steps\": [\n        {\n          \"exec\": \"/bin/bash ./build-custom-resource-handlers.sh\"\n        }\n      ]\n    },\n    \"default\": {\n      \"name\": \"default\",\n      \"description\": \"Synthesize project files\",\n      \"steps\": [\n        {\n          \"exec\": \"ts-node --project tsconfig.dev.json .projenrc.ts\"\n        }\n      ]\n    },\n    \"eject\": {\n      \"name\": \"eject\",\n      \"description\": \"Remove projen from the project\",\n      \"env\": {\n        \"PROJEN_EJECTING\": \"true\"\n      },\n      \"steps\": [\n        {\n          \"spawn\": \"default\"\n        }\n      ]\n    },\n    \"eslint\": {\n      \"name\": \"eslint\",\n      \"description\": \"Runs eslint against the codebase\",\n      \"env\": {\n        \"ESLINT_USE_FLAT_CONFIG\": \"false\",\n        \"NODE_NO_WARNINGS\": \"1\"\n      },\n      \"steps\": [\n        {\n          \"exec\": \"eslint --ext .ts,.tsx --fix --no-error-on-unmatched-pattern $@ lib lib/__tests__ build-tools projenrc .projenrc.ts\",\n          \"receiveArgs\": true\n        }\n      ]\n    },\n    \"install\": {\n      \"name\": \"install\",\n      \"description\": \"Install project dependencies and update lockfile (non-frozen)\",\n      \"steps\": [\n        {\n          \"exec\": \"yarn install --check-files\"\n        }\n      ]\n    },\n    \"install:ci\": {\n      \"name\": \"install:ci\",\n      \"description\": \"Install project dependencies using frozen lockfile\",\n      \"steps\": [\n        {\n          \"exec\": \"yarn install --check-files --frozen-lockfile\"\n        }\n      ]\n    },\n    \"integ:diff\": {\n      \"name\": \"integ:diff\",\n      \"steps\": [\n        {\n          \"exec\": \"/bin/bash ./lib/__tests__/run-test.sh\"\n        }\n      ]\n    },\n    \"integ:update\": {\n      \"name\": \"integ:update\",\n      \"steps\": [\n        {\n          \"exec\": \"/bin/bash ./lib/__tests__/run-test.sh update\"\n        }\n      ]\n    },\n    \"package\": {\n      \"name\": \"package\",\n      \"description\": \"Creates the distribution package\",\n      \"steps\": [\n        {\n          \"exec\": \"mkdir -p dist/js\"\n        },\n        {\n          \"exec\": \"npm pack --pack-destination dist/js\"\n        }\n      ]\n    },\n    \"post-compile\": {\n      \"name\": \"post-compile\",\n      \"description\": \"Runs after successful compilation\"\n    },\n    \"post-upgrade\": {\n      \"name\": \"post-upgrade\",\n      \"description\": \"Runs after upgrading dependencies\"\n    },\n    \"pre-compile\": {\n      \"name\": \"pre-compile\",\n      \"description\": \"Prepare the project for compilation\",\n      \"steps\": [\n        {\n          \"exec\": \"for a in lib/publishing/*/update-ssm.sh; do\\n  for b in lib/publishing/*/update-ssm.sh; do\\n    if ! diff $a $b; then\\n      echo \\\"Files should be the same but are not:\\n- $a\\n- $b\\\"\\n      exit 1\\n    fi\\n  done\\ndone\"\n        }\n      ]\n    },\n    \"release\": {\n      \"name\": \"release\",\n      \"description\": \"Prepare a release from \\\"main\\\" branch\",\n      \"env\": {\n        \"RELEASE\": \"true\"\n      },\n      \"steps\": [\n        {\n          \"exec\": \"rm -fr dist\"\n        },\n        {\n          \"spawn\": \"bump\"\n        },\n        {\n          \"spawn\": \"build\"\n        },\n        {\n          \"spawn\": \"unbump\"\n        },\n        {\n          \"exec\": \"git diff --ignore-space-at-eol --exit-code\"\n        }\n      ]\n    },\n    \"test\": {\n      \"name\": \"test\",\n      \"description\": \"Run tests\",\n      \"env\": {\n        \"TZ\": \"UTC\"\n      },\n      \"steps\": [\n        {\n          \"exec\": \"jest --passWithNoTests --updateSnapshot\",\n          \"receiveArgs\": true\n        },\n        {\n          \"spawn\": \"eslint\"\n        },\n        {\n          \"spawn\": \"integ:diff\"\n        }\n      ]\n    },\n    \"test:watch\": {\n      \"name\": \"test:watch\",\n      \"description\": \"Run jest in watch mode\",\n      \"steps\": [\n        {\n          \"exec\": \"jest --watch\"\n        }\n      ]\n    },\n    \"unbump\": {\n      \"name\": \"unbump\",\n      \"description\": \"Restores version to 0.0.0\",\n      \"env\": {\n        \"OUTFILE\": \"package.json\",\n        \"CHANGELOG\": \"dist/changelog.md\",\n        \"BUMPFILE\": \"dist/version.txt\",\n        \"RELEASETAG\": \"dist/releasetag.txt\",\n        \"RELEASE_TAG_PREFIX\": \"\",\n        \"BUMP_PACKAGE\": \"commit-and-tag-version@^12\",\n        \"RELEASABLE_COMMITS\": \"git log --no-merges --oneline $LATEST_TAG..HEAD -E --grep \\\"^(feat|fix){1}(\\\\([^()[:space:]]+\\\\))?(!)?:[[:blank:]]+.+\\\"\"\n      },\n      \"steps\": [\n        {\n          \"builtin\": \"release/reset-version\"\n        }\n      ]\n    },\n    \"upgrade\": {\n      \"name\": \"upgrade\",\n      \"description\": \"upgrade dependencies\",\n      \"env\": {\n        \"CI\": \"0\"\n      },\n      \"steps\": [\n        {\n          \"exec\": \"npx npm-check-updates@18 --upgrade --target=minor --peer --no-deprecated --dep=prod --filter=changelog-parser\"\n        },\n        {\n          \"exec\": \"yarn install --check-files\"\n        },\n        {\n          \"exec\": \"yarn upgrade changelog-parser\"\n        },\n        {\n          \"exec\": \"npx projen\"\n        },\n        {\n          \"spawn\": \"post-upgrade\"\n        }\n      ]\n    },\n    \"upgrade-cdklabs-projen-project-types\": {\n      \"name\": \"upgrade-cdklabs-projen-project-types\",\n      \"description\": \"upgrade cdklabs-projen-project-types\",\n      \"env\": {\n        \"CI\": \"0\"\n      },\n      \"steps\": [\n        {\n          \"exec\": \"npx npm-check-updates@18 --upgrade --target=latest --peer --no-deprecated --dep=dev,peer,prod,optional --filter=cdklabs-projen-project-types,projen\"\n        },\n        {\n          \"exec\": \"yarn install --check-files\"\n        },\n        {\n          \"exec\": \"yarn upgrade cdklabs-projen-project-types projen\"\n        },\n        {\n          \"exec\": \"npx projen\"\n        },\n        {\n          \"spawn\": \"post-upgrade\"\n        }\n      ]\n    },\n    \"upgrade-dev-deps\": {\n      \"name\": \"upgrade-dev-deps\",\n      \"description\": \"upgrade dev dependencies\",\n      \"env\": {\n        \"CI\": \"0\"\n      },\n      \"steps\": [\n        {\n          \"exec\": \"npx npm-check-updates@18 --upgrade --target=minor --peer --no-deprecated --dep=dev --filter=@aws-sdk/client-cloudwatch,@aws-sdk/client-codepipeline,@aws-sdk/client-s3,@aws-sdk/client-secrets-manager,@aws-sdk/client-ssm,@babel/plugin-transform-modules-commonjs,@types/adm-zip,@types/aws-lambda,@types/follow-redirects,@types/fs-extra,@types/jest,@types/tar,adm-zip,aws-cdk,aws-cdk-lib,esbuild,eslint-import-resolver-typescript,eslint-plugin-import,follow-redirects,fs-extra,jest,JSONStream,rrule,standard-version,tar,ts-jest,ts-node\"\n        },\n        {\n          \"exec\": \"yarn install --check-files\"\n        },\n        {\n          \"exec\": \"yarn upgrade @aws-sdk/client-cloudwatch @aws-sdk/client-codepipeline @aws-sdk/client-s3 @aws-sdk/client-secrets-manager @aws-sdk/client-ssm @babel/plugin-transform-modules-commonjs @stylistic/eslint-plugin @types/adm-zip @types/aws-lambda @types/follow-redirects @types/fs-extra @types/jest @types/node @types/tar @typescript-eslint/eslint-plugin @typescript-eslint/parser adm-zip aws-cdk aws-cdk-lib commit-and-tag-version constructs esbuild eslint-import-resolver-typescript eslint-plugin-import eslint follow-redirects fs-extra jest jest-junit JSONStream minipass node-ical rrule standard-version tar ts-jest ts-node typescript\"\n        },\n        {\n          \"exec\": \"npx projen\"\n        },\n        {\n          \"spawn\": \"post-upgrade\"\n        }\n      ]\n    },\n    \"watch\": {\n      \"name\": \"watch\",\n      \"description\": \"Watch & compile in the background\",\n      \"steps\": [\n        {\n          \"exec\": \"tsc --build -w\"\n        }\n      ]\n    }\n  },\n  \"env\": {\n    \"PATH\": \"$(npx -c \\\"node --print process.env.PATH\\\")\",\n    \"RELEASE\": \"1\"\n  },\n  \"//\": \"~~ Generated by projen. To modify, edit .projenrc.ts and run \\\"npx projen\\\".\"\n}\n"
  },
  {
    "path": ".projenrc.ts",
    "content": "import { CdklabsTypeScriptProject } from 'cdklabs-projen-project-types';\n\nconst project = new CdklabsTypeScriptProject({\n  name: 'aws-delivlib',\n  private: false,\n  projenrcTs: true,\n  description: 'A fabulous library for defining continuous pipelines for building, testing and releasing code libraries.',\n  repository: 'https://github.com/cdklabs/aws-delivlib.git',\n  defaultReleaseBranch: 'main',\n  authorName: 'Amazon Web Services',\n  authorUrl: 'https://aws.amazon.com',\n  minNodeVersion: '18.12.0',\n  typescriptVersion: '~5.0.0',\n  keywords: [\n    'aws-cdk',\n    'continuous-delivery',\n    'continuous-integration',\n    'ci-cd',\n  ],\n  deps: ['changelog-parser'],\n  depsUpgradeOptions: {\n    exclude: ['aws-cdk-lib', 'constructs'],\n  },\n  devDeps: [\n    '@babel/plugin-transform-modules-commonjs',\n    '@types/aws-lambda',\n    '@types/fs-extra',\n    '@types/tar',\n    '@types/adm-zip',\n    '@types/follow-redirects',\n    'aws-cdk',\n    'constructs',\n    'aws-cdk-lib',\n    'standard-version',\n    'ts-jest',\n    'typescript@~5.0.0',\n    '@aws-sdk/client-s3',\n    '@aws-sdk/client-ssm',\n    '@aws-sdk/client-secrets-manager',\n    '@aws-sdk/client-codepipeline',\n    '@aws-sdk/client-cloudwatch',\n    'node-ical@0.15.1', // need to pin due to https://github.com/axios/axios/issues/5101\n    'rrule',\n    'esbuild',\n    'fs-extra',\n    'tar',\n    'adm-zip',\n    'JSONStream',\n    'follow-redirects',\n    'minipass@3.2.1', // temporary (hopefully) workaround for https://github.com/DefinitelyTyped/DefinitelyTyped/discussions/60901s\n  ],\n  peerDeps: [\n    'constructs',\n    'aws-cdk-lib@^2.187.0',\n  ],\n  srcdir: 'lib',\n  testdir: 'lib/__tests__',\n\n  pullRequestTemplate: false,\n  autoApproveOptions: {\n    allowedUsernames: ['cdklabs-automation'],\n    secret: 'GITHUB_TOKEN',\n  },\n  autoApproveUpgrades: true,\n  releaseToNpm: true,\n  enablePRAutoMerge: true,\n});\n\n// trick projen so that it doesn't override the version in package.json\nproject.tasks.addEnvironment('RELEASE', '1');\n\nproject.gitignore.exclude('cdk.out');\nproject.gitignore.exclude('pipeline/*.js');\nproject.gitignore.exclude('pipeline/*.d.ts');\nproject.setScript('cdk', 'npx cdk');\n\nconst integDiff = project.addTask('integ:diff');\nintegDiff.exec('/bin/bash ./lib/__tests__/run-test.sh');\n\nconst integUpdate = project.addTask('integ:update');\nintegUpdate.exec('/bin/bash ./lib/__tests__/run-test.sh update');\n\n// Need to run with UTC TZ, or else node-ical does very wrong things with timestamps and fails tests...\nproject.testTask.env('TZ', 'UTC');\nproject.testTask.spawn(integDiff);\n\n// Run yarn install in the github publisher directory\nconst buildGithubPublisher = project.addTask('build:publishing/github');\nbuildGithubPublisher.exec('yarn install --frozen-lockfile', { cwd: 'lib/publishing/github' });\nbuildGithubPublisher.exec('yarn tsc --build', { cwd: 'lib/publishing/github' });\nproject.compileTask.prependSpawn(buildGithubPublisher);\n// Exclude the publisher from the root tsconfig, but add a reference to it\nproject.tsconfig?.addExclude('lib/publishing/github');\nproject.tsconfig?.file.addOverride('references', [{ path: 'lib/publishing/github' }]);\n\nconst compileCustomResourceHandlers = project.addTask('compile:custom-resource-handlers');\ncompileCustomResourceHandlers.exec('/bin/bash ./build-custom-resource-handlers.sh');\n\nproject.compileTask.prependSpawn(compileCustomResourceHandlers);\n\nproject.gitignore.include('lib/package-integrity/handler/JSONStream.d.ts');\nconst bundlePackageIntegrity = project.addTask('bundle:package-integrity', {\n  description: 'Bundle the package integrity script',\n  exec: [\n    'esbuild',\n    '--bundle',\n    'lib/package-integrity/handler/validate.js',\n    '--target=\"node14\"',\n    '--platform=\"node\"',\n    '--outfile=\"lib/package-integrity/handler/validate.bundle.js\"',\n    '--sourcemap=inline',\n  ].join(' '),\n});\n\nproject.compileTask.spawn(bundlePackageIntegrity);\n\n// Make sure that all \"update-ssm\" scripts are the same, so that they don't drift.\nproject.preCompileTask.exec(`for a in lib/publishing/*/update-ssm.sh; do\n  for b in lib/publishing/*/update-ssm.sh; do\n    if ! diff $a $b; then\n      echo \"Files should be the same but are not:\\n- $a\\n- $b\"\n      exit 1\n    fi\n  done\ndone`);\n\n// The npmignore file includes original source files, which is undesirable.\nproject.npmignore?.exclude(\n  '/lib/**/*.ts',\n);\nproject.npmignore?.include(\n  '/lib/**/*.d.ts',\n  '/lib/**/node_modules/**',\n);\n// Also includes other undesirable assets.\nproject.npmignore?.exclude(\n  '/lib/__tests__/',\n  'tsconfig.json',\n  'tsconfig.dev.json',\n  'tsconfig.tsbuildinfo',\n  '/build-*.sh',\n  'cdk.out/',\n  'cdk.json',\n);\n\n// Allow Jest to transform ESM-only dependencies\nproject.jest!.config.transformIgnorePatterns = [\n  'node_modules/(?!(@nodable/entities)/)',\n];\nproject.jest!.config.transform = {\n  'node_modules/@nodable/entities/.+\\\\.js$': ['babel-jest', { plugins: ['@babel/plugin-transform-modules-commonjs'] }],\n};\n\nproject.synth();\n"
  },
  {
    "path": "CHANGELOG.md",
    "content": "# Changelog\n\nAll notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.\n\n### [13.0.34](https://github.com/cdklabs/aws-delivlib/compare/v13.0.33...v13.0.34) (2021-09-03)\n\n### [13.0.33](https://github.com/cdklabs/aws-delivlib/compare/v13.0.32...v13.0.33) (2021-09-02)\n\n### [13.0.32](https://github.com/cdklabs/aws-delivlib/compare/v13.0.31...v13.0.32) (2021-09-01)\n\n### [13.0.31](https://github.com/cdklabs/aws-delivlib/compare/v13.0.30...v13.0.31) (2021-08-26)\n\n### [13.0.30](https://github.com/cdklabs/aws-delivlib/compare/v13.0.29...v13.0.30) (2021-08-25)\n\n### [13.0.29](https://github.com/cdklabs/aws-delivlib/compare/v13.0.28...v13.0.29) (2021-08-24)\n\n### [13.0.28](https://github.com/cdklabs/aws-delivlib/compare/v13.0.27...v13.0.28) (2021-08-23)\n\n### [13.0.27](https://github.com/cdklabs/aws-delivlib/compare/v13.0.26...v13.0.27) (2021-08-22)\n\n### [13.0.26](https://github.com/cdklabs/aws-delivlib/compare/v13.0.25...v13.0.26) (2021-08-21)\n\n### [13.0.25](https://github.com/cdklabs/aws-delivlib/compare/v13.0.24...v13.0.25) (2021-08-20)\n\n### [13.0.24](https://github.com/cdklabs/aws-delivlib/compare/v13.0.23...v13.0.24) (2021-08-19)\n\n### [13.0.23](https://github.com/cdklabs/aws-delivlib/compare/v13.0.22...v13.0.23) (2021-08-18)\n\n### [13.0.22](https://github.com/cdklabs/aws-delivlib/compare/v13.0.21...v13.0.22) (2021-08-17)\n\n### [13.0.21](https://github.com/cdklabs/aws-delivlib/compare/v13.0.20...v13.0.21) (2021-08-16)\n\n### [13.0.20](https://github.com/cdklabs/aws-delivlib/compare/v13.0.19...v13.0.20) (2021-08-15)\n\n### [13.0.19](https://github.com/cdklabs/aws-delivlib/compare/v13.0.18...v13.0.19) (2021-08-14)\n\n### [13.0.18](https://github.com/cdklabs/aws-delivlib/compare/v13.0.17...v13.0.18) (2021-08-13)\n\n### [13.0.17](https://github.com/cdklabs/aws-delivlib/compare/v13.0.16...v13.0.17) (2021-08-12)\n\n### [13.0.16](https://github.com/cdklabs/aws-delivlib/compare/v13.0.15...v13.0.16) (2021-08-11)\n\n### [13.0.15](https://github.com/cdklabs/aws-delivlib/compare/v13.0.14...v13.0.15) (2021-08-10)\n\n### [13.0.14](https://github.com/cdklabs/aws-delivlib/compare/v13.0.13...v13.0.14) (2021-07-28)\n\n### [13.0.13](https://github.com/cdklabs/aws-delivlib/compare/v13.0.12...v13.0.13) (2021-07-27)\n\n### [13.0.12](https://github.com/cdklabs/aws-delivlib/compare/v13.0.11...v13.0.12) (2021-07-26)\n\n### [13.0.11](https://github.com/cdklabs/aws-delivlib/compare/v13.0.10...v13.0.11) (2021-07-25)\n\n### [13.0.10](https://github.com/cdklabs/aws-delivlib/compare/v13.0.9...v13.0.10) (2021-07-24)\n\n### [13.0.9](https://github.com/cdklabs/aws-delivlib/compare/v13.0.8...v13.0.9) (2021-07-23)\n\n### [13.0.8](https://github.com/cdklabs/aws-delivlib/compare/v13.0.7...v13.0.8) (2021-07-22)\n\n### [13.0.7](https://github.com/cdklabs/aws-delivlib/compare/v13.0.6...v13.0.7) (2021-07-21)\n\n### [13.0.6](https://github.com/cdklabs/aws-delivlib/compare/v13.0.5...v13.0.6) (2021-07-20)\n\n### [13.0.5](https://github.com/cdklabs/aws-delivlib/compare/v13.0.4...v13.0.5) (2021-07-06)\n\n### [13.0.4](https://github.com/cdklabs/aws-delivlib/compare/v13.0.3...v13.0.4) (2021-07-05)\n\n### [13.0.3](https://github.com/cdklabs/aws-delivlib/compare/v13.0.2...v13.0.3) (2021-06-17)\n\n### [13.0.2](https://github.com/cdklabs/aws-delivlib/compare/v13.0.1...v13.0.2) (2021-06-16)\n\n\n### Bug Fixes\n\n* Custom Resources use end-of-life Lambda Node 10 runtime ([#834](https://github.com/cdklabs/aws-delivlib/issues/834)) ([6590e7d](https://github.com/cdklabs/aws-delivlib/commit/6590e7dd7d19c1f32b4516a45d34420cd4fe30b7))\n\n### [13.0.1](https://github.com/cdklabs/aws-delivlib/compare/v13.0.0...v13.0.1) (2021-06-15)\n\n## [13.0.0](https://github.com/cdklabs/aws-delivlib/compare/v12.7.2...v13.0.0) (2021-06-14)\n\n\n### ⚠ BREAKING CHANGES\n\n* `esbuild` or `docker` is required in order to bundle the change-control-lambda\n\n* change-control-lambda migrated to `NodeJsFunction` ([#844](https://github.com/cdklabs/aws-delivlib/issues/844)) ([715446d](https://github.com/cdklabs/aws-delivlib/commit/715446d7296f3f6e2fa6172f0e73bcf012193467))\n\n### [12.7.2](https://github.com/cdklabs/aws-delivlib/compare/v12.7.1...v12.7.2) (2021-06-07)\n\n### [12.7.1](https://github.com/cdklabs/aws-delivlib/compare/v12.7.0...v12.7.1) (2021-05-29)\n\n## [12.7.0](https://github.com/cdklabs/aws-delivlib/compare/v12.6.0...v12.7.0) (2021-05-12)\n\n\n### Features\n\n* **pipeline:** improved pipeline action failure metrics ([8f3783b](https://github.com/cdklabs/aws-delivlib/commit/8f3783b8a0e71542485ef91bcc281daf3c9ecb7e)), closes [#696](https://github.com/cdklabs/aws-delivlib/issues/696)\n\n## [12.6.0](https://github.com/cdklabs/aws-delivlib/compare/v12.5.0...v12.6.0) (2021-05-08)\n\n\n### Features\n\n* **registry-sync:** docker build time variables ([a04cb1e](https://github.com/cdklabs/aws-delivlib/commit/a04cb1e8cc16a5c6176c92c555e116652bcc7efe))\n\n## [12.5.0](https://github.com/cdklabs/aws-delivlib/compare/v12.4.3...v12.5.0) (2021-05-03)\n\n\n### Features\n\n* **auto-pr:** skip creating PR if open PR still exists ([#797](https://github.com/cdklabs/aws-delivlib/issues/797)) ([c58ded9](https://github.com/cdklabs/aws-delivlib/commit/c58ded9f3c9e7f585f53953dfaf8829f6eb38093))\n\n\n### Bug Fixes\n\n* reuse same repository for multiple tags ([194ec3a](https://github.com/cdklabs/aws-delivlib/commit/194ec3a1870d0e954616f3aa22065ddd4622682d))\n\n### [12.4.3](https://github.com/awslabs/aws-delivlib/compare/v12.4.2...v12.4.3) (2021-04-19)\n\n\n### Bug Fixes\n\n* **ecr-mirror:** only represent non-`latest` tags in construct IDs ([6282a0c](https://github.com/awslabs/aws-delivlib/commit/6282a0ccc8f02f1c6b7a54bc0840b4d26061f27e))\n\n### [12.4.2](https://github.com/awslabs/aws-delivlib/compare/v12.4.1...v12.4.2) (2021-04-14)\n\n\n### Bug Fixes\n\n* **ecr-mirror:** unable to mirror multiple tags from same repository ([89f3b1a](https://github.com/awslabs/aws-delivlib/commit/89f3b1a8b16843987464ac4a85f23021602dd7f7))\n\n### [12.4.1](https://github.com/awslabs/aws-delivlib/compare/v12.4.0...v12.4.1) (2021-04-12)\n\n\n### Bug Fixes\n\n* use an atomic push when pushing bumps ([90de8cd](https://github.com/awslabs/aws-delivlib/commit/90de8cd1e12923f5cfcb877dc17ea1c0c2b30596))\n\n## [12.4.0](https://github.com/awslabs/aws-delivlib/compare/v12.3.2...v12.4.0) (2021-04-12)\n\n\n### Features\n\n* **autobuild:** autobuild multiple branches ([125d792](https://github.com/awslabs/aws-delivlib/commit/125d7928ed70856b54d1522772300efc164b35c2))\n\n### [12.3.2](https://github.com/awslabs/aws-delivlib/compare/v12.3.0...v12.3.2) (2021-03-25)\n\n\n### Bug Fixes\n\n* idempotencyToken is expected to be a string ([c70c50a](https://github.com/awslabs/aws-delivlib/commit/c70c50a6c68d86a8a2e87599fa65cd46e0193f7d))\n\n### [12.3.1](https://github.com/awslabs/aws-delivlib/compare/v12.3.0...v12.3.1) (2021-03-25)\n\n\n### Bug Fixes\n\n* idempotencyToken is expected to be a string ([c70c50a](https://github.com/awslabs/aws-delivlib/commit/c70c50a6c68d86a8a2e87599fa65cd46e0193f7d))\n\n## [12.3.0](https://github.com/awslabs/aws-delivlib/compare/v12.2.5...v12.3.0) (2021-03-08)\n\n\n### Features\n\n* support NPM publishing order ([d106fbd](https://github.com/awslabs/aws-delivlib/commit/d106fbd6b10e572aeb2e873035da5ced21bffa14))\n\n### [12.2.5](https://github.com/awslabs/aws-delivlib/compare/v12.2.4...v12.2.5) (2021-03-08)\n\n### [12.2.4](https://github.com/awslabs/aws-delivlib/compare/v12.2.3...v12.2.4) (2021-03-05)\n\n### [12.2.3](https://github.com/awslabs/aws-delivlib/compare/v12.2.2...v12.2.3) (2021-03-02)\n\n### [12.2.2](https://github.com/awslabs/aws-delivlib/compare/v12.2.1...v12.2.2) (2021-03-01)\n\n### [12.2.1](https://github.com/awslabs/aws-delivlib/compare/v12.2.0...v12.2.1) (2021-02-24)\n\n## [12.2.0](https://github.com/awslabs/aws-delivlib/compare/v12.1.13...v12.2.0) (2021-02-23)\n\n\n### Features\n\n* golang publishing ([96150d8](https://github.com/awslabs/aws-delivlib/commit/96150d80f5f6ff50a9ebd235d4a0127595aa6929)), closes [aws/jsii#2562](https://github.com/aws/jsii/issues/2562)\n\n### [12.1.13](https://github.com/awslabs/aws-delivlib/compare/v12.1.12...v12.1.13) (2021-02-23)\n\n### [12.1.12](https://github.com/awslabs/aws-delivlib/compare/v12.1.11...v12.1.12) (2021-02-22)\n\n### [12.1.11](https://github.com/awslabs/aws-delivlib/compare/v12.1.10...v12.1.11) (2021-02-19)\n\n### [12.1.10](https://github.com/awslabs/aws-delivlib/compare/v12.1.9...v12.1.10) (2021-02-18)\n\n### [12.1.9](https://github.com/awslabs/aws-delivlib/compare/v12.1.8...v12.1.9) (2021-02-16)\n\n### [12.1.8](https://github.com/awslabs/aws-delivlib/compare/v12.1.7...v12.1.8) (2021-02-15)\n\n### [12.1.7](https://github.com/awslabs/aws-delivlib/compare/v12.1.6...v12.1.7) (2021-02-12)\n\n### [12.1.6](https://github.com/awslabs/aws-delivlib/compare/v12.1.5...v12.1.6) (2021-02-11)\n\n### [12.1.5](https://github.com/awslabs/aws-delivlib/compare/v12.1.4...v12.1.5) (2021-02-10)\n\n### [12.1.4](https://github.com/awslabs/aws-delivlib/compare/v12.1.3...v12.1.4) (2021-02-09)\n\n\n### Bug Fixes\n\n* **pypi:** cryptography >= 3.4 is not supported by older pip ([4ff8cfb](https://github.com/awslabs/aws-delivlib/commit/4ff8cfbd6d9e337e39d2906b959bb064549cb887)), closes [/cryptography.io/en/3.4/changelog.html#v3-4](https://github.com/awslabs//cryptography.io/en/3.4/changelog.html/issues/v3-4)\n\n### [12.1.3](https://github.com/awslabs/aws-delivlib/compare/v12.1.2...v12.1.3) (2021-02-09)\n\n### [12.1.2](https://github.com/awslabs/aws-delivlib/compare/v12.1.1...v12.1.2) (2021-02-05)\n\n### [12.1.1](https://github.com/awslabs/aws-delivlib/compare/v12.1.0...v12.1.1) (2021-02-04)\n\n## [12.1.0](https://github.com/awslabs/aws-delivlib/compare/v12.0.0...v12.1.0) (2021-02-02)\n\n\n### Features\n\n* **autobuild:** Option to disable webhooks ([969759d](https://github.com/awslabs/aws-delivlib/commit/969759d406cbea50921d3f8c1336ed5321ce78a2))\n\n## [12.0.0](https://github.com/awslabs/aws-delivlib/compare/v11.0.8...v12.0.0) (2021-02-02)\n\n\n### ⚠ BREAKING CHANGES\n\n* **pipeline:** delivlib `Pipeline` construct no longer produces\nthe `<pipelineName>_FailedStages` metric. It instead produces the\nmetric `PipelineActionFailures` with the pipeline name as a dimension.\n\n### Features\n\n* **pipeline:** improved pipeline failure metrics ([5c30ada](https://github.com/awslabs/aws-delivlib/commit/5c30adafd594ae139f9cccbeb4d0fd8c417a9c6f))\n\n### [11.0.8](https://github.com/awslabs/aws-delivlib/compare/v11.0.7...v11.0.8) (2021-02-01)\n\n### [11.0.7](https://github.com/awslabs/aws-delivlib/compare/v11.0.6...v11.0.7) (2021-01-29)\n\n### [11.0.6](https://github.com/awslabs/aws-delivlib/compare/v11.0.5...v11.0.6) (2021-01-26)\n\n### [11.0.5](https://github.com/awslabs/aws-delivlib/compare/v11.0.4...v11.0.5) (2021-01-25)\n\n### [11.0.4](https://github.com/awslabs/aws-delivlib/compare/v11.0.3...v11.0.4) (2021-01-22)\n\n### [11.0.3](https://github.com/awslabs/aws-delivlib/compare/v11.0.2...v11.0.3) (2021-01-21)\n\n### [11.0.2](https://github.com/awslabs/aws-delivlib/compare/v11.0.1...v11.0.2) (2021-01-20)\n\n### [11.0.1](https://github.com/awslabs/aws-delivlib/compare/v11.0.0...v11.0.1) (2021-01-19)\n\n## [11.0.0](https://github.com/awslabs/aws-delivlib/compare/v10.3.12...v11.0.0) (2021-01-19)\n\n\n### ⚠ BREAKING CHANGES\n\n* **bump:** the 'stage' prop of AutoMergeBackOptions has been moved to a\nnew AutoMergeBackPipelineOptions interface.\n\n### Bug Fixes\n\n* remove repo as required property of AutoMergeBackOptions ([7eb7991](https://github.com/awslabs/aws-delivlib/commit/7eb7991171ccd64d49e3e7123cc68c3dca3d104a)), closes [#671](https://github.com/awslabs/aws-delivlib/issues/671)\n\n\n* **bump:** refactor AutoMergeBackOptions ([0700bcc](https://github.com/awslabs/aws-delivlib/commit/0700bccc5cd469688ace57856086c02a6f605a94)), closes [#684](https://github.com/awslabs/aws-delivlib/issues/684)\n\n### [10.3.12](https://github.com/awslabs/aws-delivlib/compare/v10.3.11...v10.3.12) (2021-01-18)\n\n### [10.3.11](https://github.com/awslabs/aws-delivlib/compare/v10.3.10...v10.3.11) (2021-01-15)\n\n### [10.3.10](https://github.com/awslabs/aws-delivlib/compare/v10.3.9...v10.3.10) (2021-01-13)\n\n### [10.3.9](https://github.com/awslabs/aws-delivlib/compare/v10.3.8...v10.3.9) (2021-01-12)\n\n### [10.3.8](https://github.com/awslabs/aws-delivlib/compare/v10.3.7...v10.3.8) (2021-01-11)\n\n### [10.3.7](https://github.com/awslabs/aws-delivlib/compare/v10.3.6...v10.3.7) (2021-01-08)\n\n### [10.3.6](https://github.com/awslabs/aws-delivlib/compare/v10.3.5...v10.3.6) (2021-01-07)\n\n### [10.3.5](https://github.com/awslabs/aws-delivlib/compare/v10.3.4...v10.3.5) (2021-01-06)\n\n### [10.3.4](https://github.com/awslabs/aws-delivlib/compare/v10.3.3...v10.3.4) (2021-01-05)\n\n### [10.3.3](https://github.com/awslabs/aws-delivlib/compare/v10.3.2...v10.3.3) (2021-01-04)\n\n### [10.3.2](https://github.com/awslabs/aws-delivlib/compare/v10.3.1...v10.3.2) (2021-01-01)\n\n### [10.3.1](https://github.com/awslabs/aws-delivlib/compare/v10.3.0...v10.3.1) (2020-12-31)\n\n## [10.3.0](https://github.com/awslabs/aws-delivlib/compare/v10.2.16...v10.3.0) (2020-12-30)\n\n\n### Features\n\n* **auto-build:** option to not publish build logs when build is successful ([#633](https://github.com/awslabs/aws-delivlib/issues/633)) ([0026760](https://github.com/awslabs/aws-delivlib/commit/00267602e1c14013f653b1dfb486d2a2a355027c)), closes [jlhood/github-codebuild-logs#29](https://github.com/jlhood/github-codebuild-logs/issues/29)\n\n### [10.2.16](https://github.com/awslabs/aws-delivlib/compare/v10.2.15...v10.2.16) (2020-12-30)\n\n### [10.2.15](https://github.com/awslabs/aws-delivlib/compare/v10.2.14...v10.2.15) (2020-12-29)\n\n### [10.2.14](https://github.com/awslabs/aws-delivlib/compare/v10.2.13...v10.2.14) (2020-12-28)\n\n### [10.2.13](https://github.com/awslabs/aws-delivlib/compare/v10.2.12...v10.2.13) (2020-12-25)\n\n### [10.2.12](https://github.com/awslabs/aws-delivlib/compare/v10.2.11...v10.2.12) (2020-12-24)\n\n### [10.2.11](https://github.com/awslabs/aws-delivlib/compare/v10.2.10...v10.2.11) (2020-12-23)\n\n### [10.2.10](https://github.com/awslabs/aws-delivlib/compare/v10.2.9...v10.2.10) (2020-12-22)\n\n### [10.2.9](https://github.com/awslabs/aws-delivlib/compare/v10.2.8...v10.2.9) (2020-12-21)\n\n### [10.2.8](https://github.com/awslabs/aws-delivlib/compare/v10.2.7...v10.2.8) (2020-12-18)\n\n### [10.2.7](https://github.com/awslabs/aws-delivlib/compare/v10.2.6...v10.2.7) (2020-12-17)\n\n\n### Bug Fixes\n\n* **ecr-mirror:** sync job fails when using MirrorSource.fromDirectory() ([3c96b03](https://github.com/awslabs/aws-delivlib/commit/3c96b03012bc5521c656b3f07eb12fd0abc11ed1))\n\n### [10.2.6](https://github.com/awslabs/aws-delivlib/compare/v10.2.5...v10.2.6) (2020-12-17)\n\n### [10.2.5](https://github.com/awslabs/aws-delivlib/compare/v10.2.4...v10.2.5) (2020-12-15)\n\n### [10.2.4](https://github.com/awslabs/aws-delivlib/compare/v10.2.3...v10.2.4) (2020-12-14)\n\n### [10.2.3](https://github.com/awslabs/aws-delivlib/compare/v10.2.2...v10.2.3) (2020-12-11)\n\n\n### Bug Fixes\n\n* **ecr-mirror:** duplicate repository names get overwritten. ([1e15730](https://github.com/awslabs/aws-delivlib/commit/1e15730800883457142ba6f58bdcc497e6f1fd85))\n\n### [10.2.2](https://github.com/awslabs/aws-delivlib/compare/v10.2.1...v10.2.2) (2020-12-10)\n\n### [10.2.1](https://github.com/awslabs/aws-delivlib/compare/v10.2.0...v10.2.1) (2020-12-09)\n\n## [10.2.0](https://github.com/awslabs/aws-delivlib/compare/v10.1.6...v10.2.0) (2020-12-08)\n\n\n### Features\n\n* synchronize docker images to a local ECR repository ([e2fec0a](https://github.com/awslabs/aws-delivlib/commit/e2fec0a5a60211723b4b9317ecaef8ef00926003))\n\n### [10.1.6](https://github.com/awslabs/aws-delivlib/compare/v10.1.5...v10.1.6) (2020-12-04)\n\n\n### Bug Fixes\n\n* **chime-notifier:** synthesis fails when webhook url is a token ([3e6e185](https://github.com/awslabs/aws-delivlib/commit/3e6e1853a9f99e427d2a471c0d0c2da8110eee3d))\n\n### [10.1.5](https://github.com/awslabs/aws-delivlib/compare/v10.1.4...v10.1.5) (2020-12-02)\n\n### [10.1.4](https://github.com/awslabs/aws-delivlib/compare/v10.1.3...v10.1.4) (2020-12-01)\n\n### [10.1.3](https://github.com/awslabs/aws-delivlib/compare/v10.1.2...v10.1.3) (2020-11-28)\n\n### [10.1.2](https://github.com/awslabs/aws-delivlib/compare/v10.1.1...v10.1.2) (2020-11-25)\n\n### [10.1.1](https://github.com/awslabs/aws-delivlib/compare/v10.1.0...v10.1.1) (2020-11-23)\n\n\n### Bug Fixes\n\n* **pipeline-notifications:** deployment fails to replace codestar notifications resource ([13a0e2c](https://github.com/awslabs/aws-delivlib/commit/13a0e2c1081fdc74e218aa485dea5648417a2f79))\n\n## [10.1.0](https://github.com/awslabs/aws-delivlib/compare/v10.0.7...v10.1.0) (2020-11-19)\n\n\n### Features\n\n* AutoBuild as its own feature ([b98a191](https://github.com/awslabs/aws-delivlib/commit/b98a19188032f752f03f26d9058cdea5861aa1f3))\n\n### [10.0.7](https://github.com/awslabs/aws-delivlib/compare/v10.0.6...v10.0.7) (2020-11-18)\n\n### [10.0.6](https://github.com/awslabs/aws-delivlib/compare/v10.0.5...v10.0.6) (2020-11-17)\n\n### [10.0.5](https://github.com/awslabs/aws-delivlib/compare/v10.0.4...v10.0.5) (2020-11-16)\n\n### [10.0.4](https://github.com/awslabs/aws-delivlib/compare/v10.0.3...v10.0.4) (2020-11-12)\n\n### [10.0.3](https://github.com/awslabs/aws-delivlib/compare/v10.0.2...v10.0.3) (2020-11-11)\n\n### [10.0.2](https://github.com/awslabs/aws-delivlib/compare/v10.0.1...v10.0.2) (2020-11-10)\n\n### [10.0.1](https://github.com/awslabs/aws-delivlib/compare/v10.0.0...v10.0.1) (2020-11-09)\n\n## [10.0.0](https://github.com/awslabs/aws-delivlib/compare/v9.2.0...v10.0.0) (2020-11-08)\n\n\n### ⚠ BREAKING CHANGES\n\n* `pipeline.addShellable` now returns the shellable in addition to the action. Use `.action` to retrieve the action \n\n### Features\n\n* expose underlying codebuild projects ([5e53749](https://github.com/awslabs/aws-delivlib/commit/5e537498b683442579c3925de647b3aded60ffb4))\n\n## [9.2.0](https://github.com/awslabs/aws-delivlib/compare/v9.1.3...v9.2.0) (2020-11-07)\n\n\n### Features\n\n* **pipeline:** allow more details from slack notifications ([#553](https://github.com/awslabs/aws-delivlib/issues/553)) ([531308c](https://github.com/awslabs/aws-delivlib/commit/531308c6407abb3180dc6e3dfb3a1e89cfbf4d64))\n\n### [9.1.3](https://github.com/awslabs/aws-delivlib/compare/v9.1.2...v9.1.3) (2020-11-06)\n\n### [9.1.2](https://github.com/awslabs/aws-delivlib/compare/v9.1.1...v9.1.2) (2020-11-05)\n\n### [9.1.1](https://github.com/awslabs/aws-delivlib/compare/v9.1.0...v9.1.1) (2020-11-04)\n\n## [9.1.0](https://github.com/awslabs/aws-delivlib/compare/v9.0.4...v9.1.0) (2020-11-03)\n\n\n### Features\n\n* **pipeline:** notify failure on slack ([3bb46a3](https://github.com/awslabs/aws-delivlib/commit/3bb46a3d61b3187accfde7c914f04eeeae507a35))\n\n### [9.0.4](https://github.com/awslabs/aws-delivlib/compare/v9.0.3...v9.0.4) (2020-11-02)\n\n### [9.0.3](https://github.com/awslabs/aws-delivlib/compare/v9.0.2...v9.0.3) (2020-10-30)\n\n### [9.0.2](https://github.com/awslabs/aws-delivlib/compare/v9.0.1...v9.0.2) (2020-10-29)\n\n### [9.0.1](https://github.com/awslabs/aws-delivlib/compare/v9.0.0...v9.0.1) (2020-10-28)\n\n## [9.0.0](https://github.com/awslabs/aws-delivlib/compare/v8.6.0...v9.0.0) (2020-10-27)\n\n\n### ⚠ BREAKING CHANGES\n\n* From this release on, `aws-delivlib` has a dependency\non `monocdk` instead of `monocdk-experiment`.\n\n### Bug Fixes\n\n* untracked files preventing checkout ([#530](https://github.com/awslabs/aws-delivlib/issues/530)) ([77a7bc2](https://github.com/awslabs/aws-delivlib/commit/77a7bc237f66ceb66244b95f20859bf25bef6b52))\n\n\n* upgrade to `monocdk` 1.70.0 ([7b7e9dd](https://github.com/awslabs/aws-delivlib/commit/7b7e9dd0a0c006f159c8cd669519c8f829d50a94))\n\n## [8.6.0](https://github.com/awslabs/aws-delivlib/compare/v8.4.8...v8.6.0) (2020-10-26)\n\n\n### Features\n\n* **bump:** skip push & PR if head is behind base ([4af1774](https://github.com/awslabs/aws-delivlib/commit/4af1774886ff05c74e8c781526f13c43665aaf24))\n\n\n### Bug Fixes\n\n* **bump:** a branch named '<branch>' already exists ([ba5e600](https://github.com/awslabs/aws-delivlib/commit/ba5e60030c6d52473db3108a3e33c4f6c6f11984))\n* **bump:** specify remote branch name in 'git push' ([53ea149](https://github.com/awslabs/aws-delivlib/commit/53ea149027c497350a91dafb0b6ea1695afafc7c))\n\n## [8.5.0](https://github.com/awslabs/aws-delivlib/compare/v8.4.8...v8.5.0) (2020-10-26)\n\n\n### Features\n\n* **bump:** skip push & PR if head is behind base ([4af1774](https://github.com/awslabs/aws-delivlib/commit/4af1774886ff05c74e8c781526f13c43665aaf24))\n\n\n### Bug Fixes\n\n* **bump:** a branch named '<branch>' already exists ([ba5e600](https://github.com/awslabs/aws-delivlib/commit/ba5e60030c6d52473db3108a3e33c4f6c6f11984))\n* **bump:** specify remote branch name in 'git push' ([53ea149](https://github.com/awslabs/aws-delivlib/commit/53ea149027c497350a91dafb0b6ea1695afafc7c))\n\n### [8.4.8](https://github.com/awslabs/aws-delivlib/compare/v8.4.7...v8.4.8) (2020-10-22)\n\n### [8.4.7](https://github.com/awslabs/aws-delivlib/compare/v8.4.6...v8.4.7) (2020-10-21)\n\n### [8.4.6](https://github.com/awslabs/aws-delivlib/compare/v8.4.5...v8.4.6) (2020-10-20)\n\n### [8.4.5](https://github.com/awslabs/aws-delivlib/compare/v8.4.4...v8.4.5) (2020-10-19)\n\n### [8.4.4](https://github.com/awslabs/aws-delivlib/compare/v8.4.3...v8.4.4) (2020-10-16)\n\n### [8.4.3](https://github.com/awslabs/aws-delivlib/compare/v8.4.2...v8.4.3) (2020-10-15)\n\n### [8.4.2](https://github.com/awslabs/aws-delivlib/compare/v8.4.1...v8.4.2) (2020-10-14)\n\n### [8.4.1](https://github.com/awslabs/aws-delivlib/compare/v8.4.0...v8.4.1) (2020-10-13)\n\n## [8.4.0](https://github.com/awslabs/aws-delivlib/compare/v8.3.18...v8.4.0) (2020-10-12)\n\n\n### Features\n\n* build timeout ([82df7bf](https://github.com/awslabs/aws-delivlib/commit/82df7bf9ce336e0ae65f8834d7e620040fe6f885))\n\n### [8.3.18](https://github.com/awslabs/aws-delivlib/compare/v8.3.17...v8.3.18) (2020-10-12)\n\n### [8.3.17](https://github.com/awslabs/aws-delivlib/compare/v8.3.16...v8.3.17) (2020-10-09)\n\n### [8.3.16](https://github.com/awslabs/aws-delivlib/compare/v8.3.15...v8.3.16) (2020-10-08)\n\n### [8.3.15](https://github.com/awslabs/aws-delivlib/compare/v8.3.14...v8.3.15) (2020-10-07)\n\n### [8.3.14](https://github.com/awslabs/aws-delivlib/compare/v8.3.13...v8.3.14) (2020-10-06)\n\n### [8.3.13](https://github.com/awslabs/aws-delivlib/compare/v8.3.12...v8.3.13) (2020-10-05)\n\n### [8.3.12](https://github.com/awslabs/aws-delivlib/compare/v8.3.11...v8.3.12) (2020-10-01)\n\n### [8.3.11](https://github.com/awslabs/aws-delivlib/compare/v8.3.10...v8.3.11) (2020-09-30)\n\n### [8.3.10](https://github.com/awslabs/aws-delivlib/compare/v8.3.9...v8.3.10) (2020-09-28)\n\n### [8.3.9](https://github.com/awslabs/aws-delivlib/compare/v8.3.8...v8.3.9) (2020-09-25)\n\n### [8.3.8](https://github.com/awslabs/aws-delivlib/compare/v8.3.7...v8.3.8) (2020-09-21)\n\n### [8.3.7](https://github.com/awslabs/aws-delivlib/compare/v8.3.6...v8.3.7) (2020-09-18)\n\n### [8.3.6](https://github.com/awslabs/aws-delivlib/compare/v8.3.5...v8.3.6) (2020-09-17)\n\n### [8.3.5](https://github.com/awslabs/aws-delivlib/compare/v8.3.4...v8.3.5) (2020-09-16)\n\n### [8.3.4](https://github.com/awslabs/aws-delivlib/compare/v8.3.3...v8.3.4) (2020-09-15)\n\n### [8.3.3](https://github.com/awslabs/aws-delivlib/compare/v8.3.2...v8.3.3) (2020-09-14)\n\n### [8.3.2](https://github.com/awslabs/aws-delivlib/compare/v8.3.1...v8.3.2) (2020-09-11)\n\n### [8.3.1](https://github.com/awslabs/aws-delivlib/compare/v8.3.0...v8.3.1) (2020-09-09)\n\n\n### Bug Fixes\n\n* auto bump does not work for delivlib's own pipeline ([dcc6d5c](https://github.com/awslabs/aws-delivlib/commit/dcc6d5c443c94138da8b7cdbda81a54a5bda9372))\n\n## [8.3.0](https://github.com/awslabs/aws-delivlib/compare/v8.2.7...v8.3.0) (2020-09-09)\n\n\n### Features\n\n* support assume role profiles ([1565302](https://github.com/awslabs/aws-delivlib/commit/1565302fe48e8189cb68a5af3bc1398c9fd473f2))\n\n\n### Bug Fixes\n\n* **pr:** AutoPR handle repos without master branch ([#451](https://github.com/awslabs/aws-delivlib/issues/451)) ([9adf11e](https://github.com/awslabs/aws-delivlib/commit/9adf11e94578f1e8031a7ef667eeb9717080ed23))\n\n### [8.2.7](https://github.com/awslabs/aws-delivlib/compare/v8.2.6...v8.2.7) (2020-08-23)\n\n### [8.2.6](https://github.com/awslabs/aws-delivlib/compare/v8.2.5...v8.2.6) (2020-08-21)\n\n### [8.2.5](https://github.com/awslabs/aws-delivlib/compare/v8.2.4...v8.2.5) (2020-08-19)\n\n### [8.2.4](https://github.com/awslabs/aws-delivlib/compare/v8.2.3...v8.2.4) (2020-08-18)\n\n### [8.2.3](https://github.com/awslabs/aws-delivlib/compare/v8.2.2...v8.2.3) (2020-08-15)\n\n### [8.2.2](https://github.com/awslabs/aws-delivlib/compare/v8.2.1...v8.2.2) (2020-08-13)\n\n### [8.2.1](https://github.com/awslabs/aws-delivlib/compare/v8.2.0...v8.2.1) (2020-08-11)\n\n\n### Bug Fixes\n\n* make changes to RSA key construct path backwards compatible  ([#431](https://github.com/awslabs/aws-delivlib/issues/431)) ([7ac1675](https://github.com/awslabs/aws-delivlib/commit/7ac1675bb177e9b4edc4a92af4f7c17d50e6bd97))\n\n## [8.2.0](https://github.com/awslabs/aws-delivlib/compare/v8.1.12...v8.2.0) (2020-08-11)\n\n\n### Features\n\n* expose STS endpoint type ([#377](https://github.com/awslabs/aws-delivlib/issues/377)) ([3f83f12](https://github.com/awslabs/aws-delivlib/commit/3f83f12b42f3a8273eb3856257c98c16ea649a41))\n\n### [8.1.12](https://github.com/awslabs/aws-delivlib/compare/v8.1.11...v8.1.12) (2020-08-08)\n\n### [8.1.11](https://github.com/awslabs/aws-delivlib/compare/v8.1.10...v8.1.11) (2020-08-06)\n\n### [8.1.10](https://github.com/awslabs/aws-delivlib/compare/v8.1.9...v8.1.10) (2020-08-01)\n\n### [8.1.9](https://github.com/awslabs/aws-delivlib/compare/v8.1.8...v8.1.9) (2020-07-31)\n\n### [8.1.8](https://github.com/awslabs/aws-delivlib/compare/v8.1.7...v8.1.8) (2020-07-29)\n\n### [8.1.7](https://github.com/awslabs/aws-delivlib/compare/v8.1.6...v8.1.7) (2020-07-24)\n\n### [8.1.6](https://github.com/awslabs/aws-delivlib/compare/v8.1.5...v8.1.6) (2020-07-23)\n\n### [8.1.5](https://github.com/awslabs/aws-delivlib/compare/v8.1.4...v8.1.5) (2020-07-21)\n\n### [8.1.4](https://github.com/awslabs/aws-delivlib/compare/v8.1.3...v8.1.4) (2020-07-19)\n\n### [8.1.3](https://github.com/awslabs/aws-delivlib/compare/v8.1.2...v8.1.3) (2020-07-17)\n\n### [8.1.2](https://github.com/awslabs/aws-delivlib/compare/v8.1.1...v8.1.2) (2020-07-14)\n\n### [8.1.1](https://github.com/awslabs/aws-delivlib/compare/v8.1.0...v8.1.1) (2020-07-12)\n\n## [8.1.0](https://github.com/awslabs/aws-delivlib/compare/v8.0.3...v8.1.0) (2020-07-10)\n\n\n### Features\n\n* add support for reports in  buildspec ([#401](https://github.com/awslabs/aws-delivlib/issues/401)) ([1570ab5](https://github.com/awslabs/aws-delivlib/commit/1570ab58f53c1f05b857ac0c590806073aad0638))\n\n### [8.0.3](https://github.com/awslabs/aws-delivlib/compare/v8.0.2...v8.0.3) (2020-07-08)\n\n### [8.0.2](https://github.com/awslabs/aws-delivlib/compare/v8.0.1...v8.0.2) (2020-07-07)\n\n### [8.0.1](https://github.com/awslabs/aws-delivlib/compare/v8.0.0...v8.0.1) (2020-07-06)\n\n## [8.0.0](https://github.com/awslabs/aws-delivlib/compare/v7.6.4...v8.0.0) (2020-07-05)\n\n\n### ⚠ BREAKING CHANGES\n\n* **bump:** `AutoBump` API has significantly changed. Refer to the docstrings for guidance. \n\n* **bump:** refactor AutoBump to support more use-case more cleanly ([#324](https://github.com/awslabs/aws-delivlib/issues/324)) ([6a754bb](https://github.com/awslabs/aws-delivlib/commit/6a754bb1f222b0189453c4a598c794ed0f05552a)), closes [/github.com/awslabs/aws-delivlib/blob/master/lib/bump/bump.ts#L254](https://github.com/awslabs//github.com/awslabs/aws-delivlib/blob/master/lib/bump/bump.ts/issues/L254)\n\n### [7.6.4](https://github.com/awslabs/aws-delivlib/compare/v7.6.3...v7.6.4) (2020-07-03)\n\n### [7.6.3](https://github.com/awslabs/aws-delivlib/compare/v7.6.2...v7.6.3) (2020-07-02)\n\n### [7.6.2](https://github.com/awslabs/aws-delivlib/compare/v7.6.1...v7.6.2) (2020-07-01)\n\n### [7.6.1](https://github.com/awslabs/aws-delivlib/compare/v7.6.0...v7.6.1) (2020-06-25)\n\n## [7.6.0](https://github.com/awslabs/aws-delivlib/compare/v7.5.3...v7.6.0) (2020-06-24)\n\n\n### Features\n\n* allow setting a timeout for Shellable ([#375](https://github.com/awslabs/aws-delivlib/issues/375)) ([e729021](https://github.com/awslabs/aws-delivlib/commit/e729021e26baae864c7c6506322cb0a2dae87d0f))\n\n### [7.5.3](https://github.com/awslabs/aws-delivlib/compare/v7.5.2...v7.5.3) (2020-06-23)\n\n### [7.5.2](https://github.com/awslabs/aws-delivlib/compare/v7.5.1...v7.5.2) (2020-06-11)\n\n### [7.5.1](https://github.com/awslabs/aws-delivlib/compare/v7.5.0...v7.5.1) (2020-06-10)\n\n## [7.5.0](https://github.com/awslabs/aws-delivlib/compare/v7.4.0...v7.5.0) (2020-06-09)\n\n\n### Features\n\n* rename \"env\" to \"environment\" in BuildEnvironmentProps ([#359](https://github.com/awslabs/aws-delivlib/issues/359)) ([836e5e0](https://github.com/awslabs/aws-delivlib/commit/836e5e032e0f85afe513c8e59111be8df6d16bca))\n\n## [7.4.0](https://github.com/awslabs/aws-delivlib/compare/v7.3.2...v7.4.0) (2020-06-07)\n\n\n### Features\n\n* **auto-build:** delete previous links to build logs ([#301](https://github.com/awslabs/aws-delivlib/issues/301)) ([6607d6a](https://github.com/awslabs/aws-delivlib/commit/6607d6a09a5d1b550208c0c00f82bc748e3b920f))\n\n### [7.3.2](https://github.com/awslabs/aws-delivlib/compare/v7.3.1...v7.3.2) (2020-06-05)\n\n### [7.3.1](https://github.com/awslabs/aws-delivlib/compare/v7.3.0...v7.3.1) (2020-06-02)\n\n## [7.3.0](https://github.com/awslabs/aws-delivlib/compare/v7.2.2...v7.3.0) (2020-05-29)\n\n\n### Features\n\n* recurring event support for change controller ([#333](https://github.com/awslabs/aws-delivlib/issues/333)) ([8bc157a](https://github.com/awslabs/aws-delivlib/commit/8bc157afe63bbb32394162103beb94f400867264)), closes [#331](https://github.com/awslabs/aws-delivlib/issues/331)\n\n### [7.2.2](https://github.com/awslabs/aws-delivlib/compare/v7.2.1...v7.2.2) (2020-05-28)\n\n### [7.2.1](https://github.com/awslabs/aws-delivlib/compare/v7.2.0...v7.2.1) (2020-05-23)\n\n## [7.2.0](https://github.com/awslabs/aws-delivlib/compare/v7.1.26...v7.2.0) (2020-05-22)\n\n\n### Features\n\n* **autobump:** expose more config options ([#337](https://github.com/awslabs/aws-delivlib/issues/337)) ([6587579](https://github.com/awslabs/aws-delivlib/commit/6587579a0e53b5d0e0913191941874f2317086aa))\n\n### [7.1.26](https://github.com/awslabs/aws-delivlib/compare/v7.1.25...v7.1.26) (2020-05-21)\n\n### [7.1.25](https://github.com/awslabs/aws-delivlib/compare/v7.1.24...v7.1.25) (2020-05-19)\n\n### [7.1.24](https://github.com/awslabs/aws-delivlib/compare/v7.1.23...v7.1.24) (2020-05-14)\n\n### [7.1.23](https://github.com/awslabs/aws-delivlib/compare/v7.1.22...v7.1.23) (2020-05-08)\n\n### [7.1.22](https://github.com/awslabs/aws-delivlib/compare/v7.1.21...v7.1.22) (2020-05-07)\n\n### [7.1.21](https://github.com/awslabs/aws-delivlib/compare/v7.1.20...v7.1.21) (2020-05-06)\n\n### [7.1.20](https://github.com/awslabs/aws-delivlib/compare/v7.1.19...v7.1.20) (2020-05-05)\n\n### [7.1.19](https://github.com/awslabs/aws-delivlib/compare/v7.1.18...v7.1.19) (2020-05-02)\n\n### [7.1.18](https://github.com/awslabs/aws-delivlib/compare/v7.1.17...v7.1.18) (2020-05-01)\n\n### [7.1.17](https://github.com/awslabs/aws-delivlib/compare/v7.1.16...v7.1.17) (2020-04-30)\n\n### [7.1.16](https://github.com/awslabs/aws-delivlib/compare/v7.1.15...v7.1.16) (2020-04-29)\n\n### [7.1.15](https://github.com/awslabs/aws-delivlib/compare/v7.1.14...v7.1.15) (2020-04-27)\n\n### [7.1.14](https://github.com/awslabs/aws-delivlib/compare/v7.1.13...v7.1.14) (2020-04-25)\n\n### [7.1.13](https://github.com/awslabs/aws-delivlib/compare/v7.1.12...v7.1.13) (2020-04-22)\n\n### [7.1.12](https://github.com/awslabs/aws-delivlib/compare/v7.1.11...v7.1.12) (2020-04-21)\n\n### [7.1.11](https://github.com/awslabs/aws-delivlib/compare/v7.1.10...v7.1.11) (2020-04-20)\n\n### [7.1.10](https://github.com/awslabs/aws-delivlib/compare/v7.1.9...v7.1.10) (2020-04-14)\n\n### [7.1.9](https://github.com/awslabs/aws-delivlib/compare/v7.1.8...v7.1.9) (2020-04-10)\n\n### [7.1.8](https://github.com/awslabs/aws-delivlib/compare/v7.1.7...v7.1.8) (2020-04-09)\n\n### [7.1.7](https://github.com/awslabs/aws-delivlib/compare/v7.1.6...v7.1.7) (2020-04-08)\n\n### [7.1.6](https://github.com/awslabs/aws-delivlib/compare/v7.1.5...v7.1.6) (2020-04-04)\n\n### [7.1.5](https://github.com/awslabs/aws-delivlib/compare/v7.1.4...v7.1.5) (2020-04-03)\n\n### [7.1.4](https://github.com/awslabs/aws-delivlib/compare/v7.1.3...v7.1.4) (2020-04-02)\n\n### [7.1.3](https://github.com/awslabs/aws-delivlib/compare/v7.1.2...v7.1.3) (2020-03-30)\n\n### [7.1.2](https://github.com/awslabs/aws-delivlib/compare/v7.1.1...v7.1.2) (2020-03-27)\n\n### [7.1.1](https://github.com/awslabs/aws-delivlib/compare/v7.1.0...v7.1.1) (2020-03-26)\n\n## [7.1.0](https://github.com/awslabs/aws-delivlib/compare/v7.0.7...v7.1.0) (2020-03-25)\n\n\n### Features\n\n* **pipeline:** configure projectName and environment for AutoBuild ([#274](https://github.com/awslabs/aws-delivlib/issues/274)) ([f857464](https://github.com/awslabs/aws-delivlib/commit/f85746407992c27bf275426a0219a87408b6294c))\n\n### [7.0.7](https://github.com/awslabs/aws-delivlib/compare/v7.0.6...v7.0.7) (2020-03-19)\n\n### [7.0.6](https://github.com/awslabs/aws-delivlib/compare/v7.0.5...v7.0.6) (2020-03-17)\n\n### [7.0.5](https://github.com/awslabs/aws-delivlib/compare/v7.0.4...v7.0.5) (2020-03-14)\n\n### [7.0.4](https://github.com/awslabs/aws-delivlib/compare/v7.0.3...v7.0.4) (2020-03-12)\n\n### [7.0.3](https://github.com/awslabs/aws-delivlib/compare/v7.0.2...v7.0.3) (2020-03-10)\n\n\n### Bug Fixes\n\n* **pipeline:** cyclic dependency between CodePipeline::Pipeline and CodeBuild::Project ([#261](https://github.com/awslabs/aws-delivlib/issues/261)) ([6b42d0f](https://github.com/awslabs/aws-delivlib/commit/6b42d0f6a21082dfe100f0ccc5b34023173ff1cf))\n\n### [7.0.2](https://github.com/awslabs/aws-delivlib/compare/v7.0.1...v7.0.2) (2020-03-07)\n\n### [7.0.1](https://github.com/awslabs/aws-delivlib/compare/v7.0.0...v7.0.1) (2020-03-05)\n\n## [7.0.0](https://github.com/awslabs/aws-delivlib/compare/v6.1.1...v7.0.0) (2020-03-01)\n\n\n### ⚠ BREAKING CHANGES\n\n* to use this version, your application must depend on\nmonocdk-experiment.\n\n### Features\n\n* switch to MonoCDK (-experiment) ([#245](https://github.com/awslabs/aws-delivlib/issues/245)) ([f0863de](https://github.com/awslabs/aws-delivlib/commit/f0863de0029a50d4c1d9bb35254e44eb14a766fe))\n\n### [6.1.1](https://github.com/awslabs/aws-delivlib/compare/v6.1.0...v6.1.1) (2020-02-27)\n\n\n### Bug Fixes\n\n* Grant env var access in shellable when needed ([#253](https://github.com/awslabs/aws-delivlib/issues/253)) ([99b7288](https://github.com/awslabs/aws-delivlib/commit/99b7288b648409b3631db917689b2af4ea0e611b))\n\n## [6.1.0](https://github.com/awslabs/aws-delivlib/compare/v6.0.2...v6.1.0) (2020-02-27)\n\n\n### Features\n\n* Support Env Var Types in Shellable ([#251](https://github.com/awslabs/aws-delivlib/issues/251)) ([329abd5](https://github.com/awslabs/aws-delivlib/commit/329abd583dd14a29acaba9be858c3d6be6cfc94a))\n\n### [6.0.2](https://github.com/awslabs/aws-delivlib/compare/v6.0.1...v6.0.2) (2020-02-26)\n\n### [6.0.1](https://github.com/awslabs/aws-delivlib/compare/v6.0.0...v6.0.1) (2020-02-20)\n\n## [6.0.0](https://github.com/awslabs/aws-delivlib/compare/v5.7.3...v6.0.0) (2020-02-19)\n\n\n### ⚠ BREAKING CHANGES\n\n* **bump:** `repo.token` is now `tokenSecretArn` to enforce that the token is kept in AWS SecretsManager. Also, the API for `IRepo.createBuildSource` was modified.\n\n### Features\n\n* **bump:** create pull request for bumps ([#247](https://github.com/awslabs/aws-delivlib/issues/247)) ([4b00fa3](https://github.com/awslabs/aws-delivlib/commit/4b00fa34229db30e2a2f601f308a27f101ade69b))\n\n### [5.7.3](https://github.com/awslabs/aws-delivlib/compare/v5.7.2...v5.7.3) (2020-02-14)\n\n### [5.7.2](https://github.com/awslabs/aws-delivlib/compare/v5.7.1...v5.7.2) (2020-02-13)\n\n\n### Bug Fixes\n\n* public build logs are broken ([#243](https://github.com/awslabs/aws-delivlib/issues/243)) ([114abfc](https://github.com/awslabs/aws-delivlib/commit/114abfcafb23376f0f915f754025ce82a56c26c3))\n\n### [5.7.1](https://github.com/awslabs/aws-delivlib/compare/v5.7.0...v5.7.1) (2020-02-12)\n\n\n### Bug Fixes\n\n* **chime:** make Chime notifier actually deploy ([#240](https://github.com/awslabs/aws-delivlib/issues/240)) ([15c8b41](https://github.com/awslabs/aws-delivlib/commit/15c8b41cea0ee42aa5d82cc700230e8062c40bbc))\n\n## [5.7.0](https://github.com/awslabs/aws-delivlib/compare/v5.6.0...v5.7.0) (2020-02-12)\n\n\n### Features\n\n* **chime:** make the Chime notifier look up the failing action ([#239](https://github.com/awslabs/aws-delivlib/issues/239)) ([66152b3](https://github.com/awslabs/aws-delivlib/commit/66152b3c9ba1d71802bb87fa5c252b89d2fc16ea))\n\n## [5.6.0](https://github.com/awslabs/aws-delivlib/compare/v5.5.0...v5.6.0) (2020-02-09)\n\n\n### Features\n\n* add Chime notifications on failure ([#237](https://github.com/awslabs/aws-delivlib/issues/237)) ([8b95c62](https://github.com/awslabs/aws-delivlib/commit/8b95c625108e1897c667c6c8e511c1f296df104f))\n\n## [5.5.0](https://github.com/awslabs/aws-delivlib/compare/v5.4.2...v5.5.0) (2020-02-08)\n\n\n### Features\n\n* Change Maven Container to Superchain ([#224](https://github.com/awslabs/aws-delivlib/issues/224)) ([e0eeceb](https://github.com/awslabs/aws-delivlib/commit/e0eeceb6c4c991b9de8d3fbc7f3adb5b6508c4dc)), closes [#217](https://github.com/awslabs/aws-delivlib/issues/217)\n\n### [5.4.2](https://github.com/awslabs/aws-delivlib/compare/v5.4.1...v5.4.2) (2020-02-07)\n\n### [5.4.1](https://github.com/awslabs/aws-delivlib/compare/v5.4.0...v5.4.1) (2020-01-30)\n\n## [5.4.0](https://github.com/awslabs/aws-delivlib/compare/v5.3.0...v5.4.0) (2020-01-29)\n\n\n### Features\n\n* upgrade Custom Resources to use Lambda's Node10 runtime ([#226](https://github.com/awslabs/aws-delivlib/issues/226)) ([67c18e0](https://github.com/awslabs/aws-delivlib/commit/67c18e0693da8b9813374ffd40967ce561104a09)), closes [#220](https://github.com/awslabs/aws-delivlib/issues/220)\n\n## [5.3.0](https://github.com/awslabs/aws-delivlib/compare/v5.2.0...v5.3.0) (2020-01-24)\n\n\n### Features\n\n* try switching dependabot to automatic merging ([75b686e](https://github.com/awslabs/aws-delivlib/commit/75b686e19f9541b3cfac03d6f0ab8169f2a4e26a))\n\n## [5.2.0](https://github.com/awslabs/aws-delivlib/compare/v5.1.0...v5.2.0) (2020-01-10)\n\n\n### Features\n\n* allow branch-filtering on Pipeline auto-builds ([#210](https://github.com/awslabs/aws-delivlib/issues/210)) ([37ea238](https://github.com/awslabs/aws-delivlib/commit/37ea2380131acbc72920bcffa01cf06abbdf1611))\n\n## [5.1.0](https://github.com/awslabs/aws-delivlib/compare/v5.0.0...v5.1.0) (2020-01-04)\n\n\n### Features\n\n* update to CDK version 1.19.0 ([#209](https://github.com/awslabs/aws-delivlib/issues/209)) ([5e7cad2](https://github.com/awslabs/aws-delivlib/commit/5e7cad2))\n\n## [5.0.0](https://github.com/awslabs/aws-delivlib/compare/v4.6.0...v5.0.0) (2020-01-03)\n\n\n### ⚠ BREAKING CHANGES\n\n* AutoBuildOptions.buildSpec is now of type codebuild.BuildSpec\n* CanaryProps.scheduleExpression is now of type events.Schedule, and was renamed to 'schedule'\n* ChangeControllerProps.scheduleExpression is now of type events.Schedule, and was renamed to 'schedule'\n* RsaPrivateKeySecretProps.deletionPolicy has been renamed to 'removalPolicy'\n* PipelineProps.buildSpec is now of type codebuild.BuildSpec\n* GitHubRepo.tokenParameterName is now of type cdk.SecretValue, and was renamed to 'token'\n* ShellableOptions.alarmPeriodSec is now of type cdk.Duration, and was renamed to 'alarmPeriod'\n\n### Features\n\n* migrate library to General Availability CDK version ([e6602c1](https://github.com/awslabs/aws-delivlib/commit/e6602c1))\n\n## [4.6.0](https://github.com/awslabs/aws-delivlib/compare/v4.5.1...v4.6.0) (2019-12-11)\n\n\n### Features\n\n* migrate library to General Availability CDK version ([#184](https://github.com/awslabs/aws-delivlib/issues/184)) ([65a707d](https://github.com/awslabs/aws-delivlib/commit/65a707d))\n\n### [4.5.1](https://github.com/awslabs/aws-delivlib/compare/v4.5.0...v4.5.1) (2019-10-26)\n\n## [4.5.0](https://github.com/awslabs/aws-delivlib/compare/v4.4.4...v4.5.0) (2019-10-23)\n\n\n### Features\n\n* configure buildspec for AutoBuild project ([#169](https://github.com/awslabs/aws-delivlib/issues/169)) ([c9066f8](https://github.com/awslabs/aws-delivlib/commit/c9066f8))\n\n### [4.4.4](https://github.com/awslabs/aws-delivlib/compare/v4.4.3...v4.4.4) (2019-10-17)\n\n### [4.4.3](https://github.com/awslabs/aws-delivlib/compare/v4.4.2...v4.4.3) (2019-10-01)\n\n### [4.4.2](https://github.com/awslabs/aws-delivlib/compare/v4.4.1...v4.4.2) (2019-09-30)\n\n### [4.4.1](https://github.com/awslabs/aws-delivlib/compare/v4.4.0...v4.4.1) (2019-09-30)\n\n\n### Bug Fixes\n\n* **nuget:** migrate to snupkg symbols package format ([#153](https://github.com/awslabs/aws-delivlib/issues/153)) ([422c512](https://github.com/awslabs/aws-delivlib/commit/422c512))\n\n## [4.4.0](https://github.com/awslabs/aws-delivlib/compare/v4.3.0...v4.4.0) (2019-09-23)\n\n\n### Features\n\n* **maven:** allow configuring Maven endpoint ([#151](https://github.com/awslabs/aws-delivlib/issues/151)) ([d659f9c](https://github.com/awslabs/aws-delivlib/commit/d659f9c))\n\n## [4.3.0](https://github.com/awslabs/aws-delivlib/compare/v4.2.0...v4.3.0) (2019-09-13)\n\n\n### Bug Fixes\n\n* **bump:** add known_hosts before pushing to github ([047ca55](https://github.com/awslabs/aws-delivlib/commit/047ca55))\n\n\n### Features\n\n* auto-build (with public logs) ([4cbc8ab](https://github.com/awslabs/aws-delivlib/commit/4cbc8ab)), closes [#42](https://github.com/awslabs/aws-delivlib/issues/42)\n* update \"github-codebuild-logs\" sar app to 1.0.4 ([1d0a90d](https://github.com/awslabs/aws-delivlib/commit/1d0a90d))\n\n## 4.2.0 (2019-09-12)\n\n\n### Features\n\n* auto-build (with public logs) ([#146](https://github.com/awslabs/aws-delivlib/issues/146)) ([c3cac7e](https://github.com/awslabs/aws-delivlib/commit/c3cac7e)), closes [#42](https://github.com/awslabs/aws-delivlib/issues/42)\n\n## [4.1.0](https://github.com/awslabs/aws-delivlib/compare/v4.0.0...v4.1.0) (2019-08-26)\n\n\n### Features\n\n* use mono's signcode to sign .NET assemblies ([#133](https://github.com/awslabs/aws-delivlib/issues/133)) ([630f3c6](https://github.com/awslabs/aws-delivlib/commit/630f3c6))\n\n## [4.0.0](https://github.com/awslabs/aws-delivlib/compare/v3.9.5...v4.0.0) (2019-08-06)\n\n\n### ⚠ BREAKING CHANGES\n\n* The `Superchain` construct was removed. The default\nbuild image was changed to `jsii/superchain` instead of being a bundled\nimage staged in an ECR registry.\n\n### Features\n\n* use `jsii/superchain` image instead of bundling own ([#121](https://github.com/awslabs/aws-delivlib/issues/121)) ([59aeb80](https://github.com/awslabs/aws-delivlib/commit/59aeb80)), closes [aws/jsii#653](https://github.com/aws/jsii/issues/653)\n\n### [3.9.5](https://github.com/awslabs/aws-delivlib/compare/v3.9.4...v3.9.5) (2019-07-17)\n\n\n\n### [3.9.4](https://github.com/awslabs/aws-delivlib/compare/v3.9.3...v3.9.4) (2019-07-16)\n\n\n\n### [3.9.3](https://github.com/awslabs/aws-delivlib/compare/v3.9.2...v3.9.3) (2019-06-06)\n\n\n\n## [3.9.2](https://github.com/awslabs/aws-delivlib/compare/v3.9.1...v3.9.2) (2019-06-04)\n\n\n\n## [3.9.1](https://github.com/awslabs/aws-delivlib/compare/v3.9.0...v3.9.1) (2019-06-03)\n\n\n### Bug Fixes\n\n* Stop pulling GPG keys from the internets ([#96](https://github.com/awslabs/aws-delivlib/issues/96)) ([87db0da](https://github.com/awslabs/aws-delivlib/commit/87db0da))\n\n\n\n# [3.9.0](https://github.com/awslabs/aws-delivlib/compare/v3.8.2...v3.9.0) (2019-05-29)\n\n\n### Features\n\n* **shellable:** support privileged mode ([#95](https://github.com/awslabs/aws-delivlib/issues/95)) ([2558c6e](https://github.com/awslabs/aws-delivlib/commit/2558c6e))\n\n\n\n## [3.8.2](https://github.com/awslabs/aws-delivlib/compare/v3.8.0...v3.8.2) (2019-05-21)\n\n\n\n# [3.8.1](https://github.com/awslabs/aws-delivlib/compare/v3.8.0...v3.8.1) (2019-05-20)\n\n### Build\n\n* Upgraded contents of Superchain Docker image\n\n\n# [3.8.0](https://github.com/awslabs/aws-delivlib/compare/v3.7.0...v3.8.0) (2019-04-11)\n\n\n### Features\n\n* support npm disttags ([#91](https://github.com/awslabs/aws-delivlib/issues/91)) ([90aa1d0](https://github.com/awslabs/aws-delivlib/commit/90aa1d0))\n\n\n\n<a name=\"3.7.1\"></a>\n# [3.7.1](https://github.com/awslabs/aws-delivlib/compare/v3.7.0...v3.7.1) (2019-04-11)\n\n\n### Bug Fixes\n\n* **nuget-sign:** Use  osslsigncode for now, so SHA256 signatures can be used ([#92](https://github.com/awslabs/aws-delivlib/pull/92)) ([e2855af](https://github.com/awslabs/aws-delivlib/commit/e2855af))\n\n\n\n<a name=\"3.7.0\"></a>\n# [3.7.0](https://github.com/awslabs/aws-delivlib/compare/v3.6.3...v3.7.0) (2019-04-10)\n\n\n### Features\n\n* upgrade superchain to dotnet to 2.2.202 ([#87](https://github.com/awslabs/aws-delivlib/issues/87)) ([1b74842](https://github.com/awslabs/aws-delivlib/commit/1b74842))\n\n\n\n<a name=\"3.6.3\"></a>\n## [3.6.3](https://github.com/awslabs/aws-delivlib/compare/v3.6.2...v3.6.3) (2019-04-09)\n\n\n### Bug Fixes\n\n* **autobump:** stop AutoBump from releasing 0 changes ([#89](https://github.com/awslabs/aws-delivlib/issues/89)) ([a271016](https://github.com/awslabs/aws-delivlib/commit/a271016))\n\n\n\n<a name=\"3.6.2\"></a>\n## [3.6.2](https://github.com/awslabs/aws-delivlib/compare/v3.6.1...v3.6.2) (2019-04-09)\n\n\n\n<a name=\"3.6.1\"></a>\n## [3.6.1](https://github.com/awslabs/aws-delivlib/compare/v3.6.0...v3.6.1) (2019-04-09)\n\n\n\n<a name=\"3.6.0\"></a>\n# [3.6.0](https://github.com/awslabs/aws-delivlib/compare/v3.5.18...v3.6.0) (2019-04-09)\n\n\n### Features\n\n* shellable alarm configuration ([#88](https://github.com/awslabs/aws-delivlib/issues/88)) ([4beddad](https://github.com/awslabs/aws-delivlib/commit/4beddad)), closes [awslabs/cdk-ops#329](https://github.com/awslabs/cdk-ops/issues/329)\n\n\n\n<a name=\"3.5.18\"></a>\n## [3.5.18](https://github.com/awslabs/aws-delivlib/compare/v3.5.17...v3.5.18) (2019-04-08)\n\n\n\n<a name=\"3.5.17\"></a>\n## [3.5.17](https://github.com/awslabs/aws-delivlib/compare/v3.5.16...v3.5.17) (2019-04-07)\n\n\n\n<a name=\"3.5.16\"></a>\n## [3.5.16](https://github.com/awslabs/aws-delivlib/compare/v3.5.15...v3.5.16) (2019-04-06)\n\n\n\n<a name=\"3.5.15\"></a>\n## [3.5.15](https://github.com/awslabs/aws-delivlib/compare/v3.5.14...v3.5.15) (2019-04-05)\n\n\n\n<a name=\"3.5.14\"></a>\n## [3.5.14](https://github.com/awslabs/aws-delivlib/compare/v3.5.13...v3.5.14) (2019-04-04)\n\n\n\n<a name=\"3.5.13\"></a>\n## [3.5.13](https://github.com/awslabs/aws-delivlib/compare/v3.5.12...v3.5.13) (2019-04-03)\n\n\n\n<a name=\"3.5.12\"></a>\n## [3.5.12](https://github.com/awslabs/aws-delivlib/compare/v3.5.11...v3.5.12) (2019-04-02)\n\n\n\n<a name=\"3.5.11\"></a>\n## [3.5.11](https://github.com/awslabs/aws-delivlib/compare/v3.5.10...v3.5.11) (2019-04-01)\n\n\n\n<a name=\"3.5.10\"></a>\n## [3.5.10](https://github.com/awslabs/aws-delivlib/compare/v3.5.9...v3.5.10) (2019-03-31)\n\n\n\n<a name=\"3.5.9\"></a>\n## [3.5.9](https://github.com/awslabs/aws-delivlib/compare/v3.5.8...v3.5.9) (2019-03-30)\n\n\n\n<a name=\"3.5.8\"></a>\n## [3.5.8](https://github.com/awslabs/aws-delivlib/compare/v3.5.7...v3.5.8) (2019-03-29)\n\n\n\n<a name=\"3.5.7\"></a>\n## [3.5.7](https://github.com/awslabs/aws-delivlib/compare/v3.5.6...v3.5.7) (2019-03-28)\n\n\n\n<a name=\"3.5.6\"></a>\n## [3.5.6](https://github.com/awslabs/aws-delivlib/compare/v3.5.5...v3.5.6) (2019-03-27)\n\n\n\n<a name=\"3.5.5\"></a>\n## [3.5.5](https://github.com/awslabs/aws-delivlib/compare/v3.5.4...v3.5.5) (2019-03-26)\n\n\n\n<a name=\"3.5.4\"></a>\n## [3.5.4](https://github.com/awslabs/aws-delivlib/compare/v3.5.3...v3.5.4) (2019-03-25)\n\n\n\n<a name=\"3.5.3\"></a>\n## [3.5.3](https://github.com/awslabs/aws-delivlib/compare/v3.5.2...v3.5.3) (2019-03-24)\n\n\n\n<a name=\"3.5.2\"></a>\n## [3.5.2](https://github.com/awslabs/aws-delivlib/compare/v3.5.1...v3.5.2) (2019-03-23)\n\n\n\n<a name=\"3.5.1\"></a>\n## [3.5.1](https://github.com/awslabs/aws-delivlib/compare/v3.5.0...v3.5.1) (2019-03-22)\n\n\n\n<a name=\"3.5.0\"></a>\n# [3.5.0](https://github.com/awslabs/aws-delivlib/compare/v3.4.9...v3.5.0) (2019-03-21)\n\n\n### Features\n\n* PyPI publisher ([#84](https://github.com/awslabs/aws-delivlib/issues/84)) ([9ccce36](https://github.com/awslabs/aws-delivlib/commit/9ccce36))\n\n\n\n<a name=\"3.4.9\"></a>\n## [3.4.9](https://github.com/awslabs/aws-delivlib/compare/v3.4.8...v3.4.9) (2019-03-20)\n\n\n\n<a name=\"3.4.8\"></a>\n## [3.4.8](https://github.com/awslabs/aws-delivlib/compare/v3.4.7...v3.4.8) (2019-03-19)\n\n\n\n<a name=\"3.4.7\"></a>\n## [3.4.7](https://github.com/awslabs/aws-delivlib/compare/v3.4.6...v3.4.7) (2019-03-18)\n\n\n\n<a name=\"3.4.6\"></a>\n## [3.4.6](https://github.com/awslabs/aws-delivlib/compare/v3.4.5...v3.4.6) (2019-03-17)\n\n\n\n<a name=\"3.4.5\"></a>\n## [3.4.5](https://github.com/awslabs/aws-delivlib/compare/v3.4.4...v3.4.5) (2019-03-16)\n\n\n\n<a name=\"3.4.4\"></a>\n## [3.4.4](https://github.com/awslabs/aws-delivlib/compare/v3.4.3...v3.4.4) (2019-03-15)\n\n\n\n<a name=\"3.4.3\"></a>\n## [3.4.3](https://github.com/awslabs/aws-delivlib/compare/v3.4.2...v3.4.3) (2019-03-14)\n\n\n\n<a name=\"3.4.2\"></a>\n## [3.4.2](https://github.com/awslabs/aws-delivlib/compare/v3.4.1...v3.4.2) (2019-03-13)\n\n\n\n<a name=\"3.4.1\"></a>\n## [3.4.1](https://github.com/awslabs/aws-delivlib/compare/v3.4.0...v3.4.1) (2019-03-12)\n\n\n\n<a name=\"3.4.0\"></a>\n# [3.4.0](https://github.com/awslabs/aws-delivlib/compare/v3.2.13...v3.4.0) (2019-03-11)\n\n\n### Features\n\n* **s3:** make S3 publisher idempotent ([#81](https://github.com/awslabs/aws-delivlib/issues/81)) ([d8bc2d8](https://github.com/awslabs/aws-delivlib/commit/d8bc2d8))\n\n\n\n<a name=\"3.3.0\"></a>\n# [3.3.0](https://github.com/awslabs/aws-delivlib/compare/v3.2.13...v3.3.0) (2019-03-11)\n\n\n### Features\n\n* **s3:** make S3 publisher idempotent ([#81](https://github.com/awslabs/aws-delivlib/issues/81)) ([d8bc2d8](https://github.com/awslabs/aws-delivlib/commit/d8bc2d8))\n\n\n\n<a name=\"3.2.13\"></a>\n## [3.2.13](https://github.com/awslabs/aws-delivlib/compare/v3.2.12...v3.2.13) (2019-03-10)\n\n\n\n<a name=\"3.2.12\"></a>\n## [3.2.12](https://github.com/awslabs/aws-delivlib/compare/v3.2.11...v3.2.12) (2019-03-09)\n\n\n\n<a name=\"3.2.11\"></a>\n## [3.2.11](https://github.com/awslabs/aws-delivlib/compare/v3.2.10...v3.2.11) (2019-03-08)\n\n\n\n<a name=\"3.2.10\"></a>\n## [3.2.10](https://github.com/awslabs/aws-delivlib/compare/v3.2.9...v3.2.10) (2019-03-07)\n\n\n\n<a name=\"3.2.9\"></a>\n## [3.2.9](https://github.com/awslabs/aws-delivlib/compare/v3.2.8...v3.2.9) (2019-03-06)\n\n\n\n<a name=\"3.2.8\"></a>\n## [3.2.8](https://github.com/awslabs/aws-delivlib/compare/v3.2.7...v3.2.8) (2019-03-05)\n\n\n\n<a name=\"3.2.7\"></a>\n## [3.2.7](https://github.com/awslabs/aws-delivlib/compare/v3.2.6...v3.2.7) (2019-03-04)\n\n\n\n<a name=\"3.2.6\"></a>\n## [3.2.6](https://github.com/awslabs/aws-delivlib/compare/v3.2.5...v3.2.6) (2019-03-03)\n\n\n\n<a name=\"3.2.5\"></a>\n## [3.2.5](https://github.com/awslabs/aws-delivlib/compare/v3.2.4...v3.2.5) (2019-03-02)\n\n\n\n<a name=\"3.2.4\"></a>\n## [3.2.4](https://github.com/awslabs/aws-delivlib/compare/v3.2.3...v3.2.4) (2019-03-01)\n\n\n\n<a name=\"3.2.3\"></a>\n## [3.2.3](https://github.com/awslabs/aws-delivlib/compare/v3.2.2...v3.2.3) (2019-02-28)\n\n\n\n<a name=\"3.2.2\"></a>\n## [3.2.2](https://github.com/awslabs/aws-delivlib/compare/v3.2.1...v3.2.2) (2019-02-27)\n\n\n\n<a name=\"3.2.1\"></a>\n## [3.2.1](https://github.com/awslabs/aws-delivlib/compare/v3.2.0...v3.2.1) (2019-02-26)\n\n\n\n<a name=\"3.2.0\"></a>\n# [3.2.0](https://github.com/awslabs/aws-delivlib/compare/v3.0.0...v3.2.0) (2019-02-25)\n\n\n### Features\n\n* **superchain:** add MSBuild to Superchain image ([#76](https://github.com/awslabs/aws-delivlib/issues/76)) ([b2f1dfa](https://github.com/awslabs/aws-delivlib/commit/b2f1dfa))\n* automatic bumps ([#12](https://github.com/awslabs/aws-delivlib/issues/12)) ([39ea8a0](https://github.com/awslabs/aws-delivlib/commit/39ea8a0)), closes [awslabs/cdk-ops#103](https://github.com/awslabs/cdk-ops/issues/103)\n* make it possible to add arbitrary processing steps ([#77](https://github.com/awslabs/aws-delivlib/issues/77)) ([f2ceb8a](https://github.com/awslabs/aws-delivlib/commit/f2ceb8a))\n\n\n\n<a name=\"3.1.0\"></a>\n# [3.1.0](https://github.com/awslabs/aws-delivlib/compare/v3.0.0...v3.1.0) (2019-02-25)\n\n\n### Features\n\n* make it possible to add arbitrary processing steps ([#77](https://github.com/awslabs/aws-delivlib/issues/77)) ([f2ceb8a](https://github.com/awslabs/aws-delivlib/commit/f2ceb8a))\n* **superchain:** add MSBuild to Superchain image ([#76](https://github.com/awslabs/aws-delivlib/issues/76)) ([b2f1dfa](https://github.com/awslabs/aws-delivlib/commit/b2f1dfa))\n\n\n\n<a name=\"3.0.0\"></a>\n## [3.0.0](https://github.com/awslabs/aws-delivlib/compare/v2.0.1...v3.0.0) (2019-02-20)\n\n\n### Bug Fixes\n\n* Correctly set environment before using gpg ([#69](https://github.com/awslabs/aws-delivlib/issues/69)) ([19aeed5](https://github.com/awslabs/aws-delivlib/commit/19aeed5))\n* Don't attempt deleting OpenPGP keys' secrets ([#70](https://github.com/awslabs/aws-delivlib/issues/70)) ([de02f7c](https://github.com/awslabs/aws-delivlib/commit/de02f7c))\n* Upgrade npm if 'npm ci' is unsupported ([#72](https://github.com/awslabs/aws-delivlib/issues/72)) ([e8a19ca](https://github.com/awslabs/aws-delivlib/commit/e8a19ca))\n\n\n### Features\n\n* Rename PGPSecret to OpenPGPKeyPair ([#67](https://github.com/awslabs/aws-delivlib/issues/67)) ([c540def](https://github.com/awslabs/aws-delivlib/commit/c540def))\n* Support Change Control Policies ([#71](https://github.com/awslabs/aws-delivlib/issues/71)) ([82acca9](https://github.com/awslabs/aws-delivlib/commit/82acca9)), closes [awslabs/cdk-ops#231](https://github.com/awslabs/cdk-ops/issues/231)\n\n\n### BREAKING CHANGES\n\n* The `PGPSecret` class was renamed to `OpenPGPKeyPair`.\n\n\n\n<a name=\"2.0.1\"></a>\n## [2.0.1](https://github.com/awslabs/aws-delivlib/compare/v2.0.0...v2.0.1) (2019-02-11)\n\n### Bug Fixes\n\n* Add missing permission to PGPSecret CustomResource\n\n<a name=\"2.0.0\"></a>\n## [2.0.0](https://github.com/awslabs/aws-delivlib/compare/v1.0.0...v2.0.0) (2019-02-11)\n\n\n### Features\n\n* Create OpenPGP Public Key parameter using SSM resource ([#63](https://github.com/awslabs/aws-delivlib/issues/63)) ([a3510f1](https://github.com/awslabs/aws-delivlib/commit/a3510f1))\n* Move permission grant function to PGPSecret ([#62](https://github.com/awslabs/aws-delivlib/issues/62)) ([7c6809a](https://github.com/awslabs/aws-delivlib/commit/7c6809a))\n\n### BREAKING CHANGES\n\n* `ICredentialPair` now conveys `ssm.IStringParameter` and `secretsManager.ISecret` instead of the ARNs and related attributes of those.\n\n\n<a name=\"1.0.0\"></a>\n## [1.0.0](https://github.com/awslabs/aws-delivlib/compare/v0.4.0...v1.0.0) (2019-01-29)\n\n\n### Bug Fixes\n\n* Correctly model accepted/required attributes ([#35](https://github.com/awslabs/aws-delivlib/issues/35)) ([52bdccb](https://github.com/awslabs/aws-delivlib/commit/52bdccb))\n* pgp-secret did not store passphrase in secrets manager ([#45](https://github.com/awslabs/aws-delivlib/issues/45)) ([d8f9dbc](https://github.com/awslabs/aws-delivlib/commit/d8f9dbc))\n* Stop surfacing and using secret VersionIds ([#33](https://github.com/awslabs/aws-delivlib/issues/33)) ([afbd204](https://github.com/awslabs/aws-delivlib/commit/afbd204))\n\n\n### Code Refactoring\n\n* improvements to shellable, testable and canary ([#46](https://github.com/awslabs/aws-delivlib/issues/46)) ([2446bd1](https://github.com/awslabs/aws-delivlib/commit/2446bd1))\n\n\n### Features\n\n* wrap the superchain image in a Superchain construct. ([#38](https://github.com/awslabs/aws-delivlib/issues/38)) ([5713727](https://github.com/awslabs/aws-delivlib/commit/5713727))\n* **shallable:** assume-role ([#47](https://github.com/awslabs/aws-delivlib/issues/47)) ([1b9ef5d](https://github.com/awslabs/aws-delivlib/commit/1b9ef5d))\n\n\n### BREAKING CHANGES\n\n* `Testable` has been removed, `environmentVariables`\nhas been renamed to `env` and changed schema; `pipeline.env` renamed to `environment`.\n\n\n\n<a name=\"0.5.0\"></a>\n## [0.5.0](https://github.com/awslabs/aws-delivlib/compare/v0.4.0...v0.5.0) (2019-01-15)\n\n\n### Bug Fixes\n\n* Correctly model accepted/required attributes ([#35](https://github.com/awslabs/aws-delivlib/issues/35)) ([52bdccb](https://github.com/awslabs/aws-delivlib/commit/52bdccb))\n* Stop surfacing and using secret VersionIds ([#33](https://github.com/awslabs/aws-delivlib/issues/33)) ([afbd204](https://github.com/awslabs/aws-delivlib/commit/afbd204))\n\n\n### Features\n\n* wrap the superchain image in a Superchain construct. ([#38](https://github.com/awslabs/aws-delivlib/issues/38)) ([5713727](https://github.com/awslabs/aws-delivlib/commit/5713727))\n\n\n\n<a name=\"0.4.0\"></a>\n## [0.4.0](https://github.com/awslabs/aws-delivlib/compare/v0.3.2...v0.4.0) (2019-01-07)\n\n### Features\n\n* Allow update of PGPSecret and PrivateKey ([#20](https://github.com/awslabs/aws-delivlib/issues/20)) ([bfc6225](https://github.com/awslabs/aws-delivlib/commit/bfc6225))\n\n### BREAKING CHANGES\n\n* This changes the API of the PGPSecret and CodeSigningCertificate constructs to offer a consistent API for accessing the name\nand ARNs of the secret and parameters associated with the secrets, through the `ICredentialPair` interface.\n\n\n<a name=\"0.3.2\"></a>\n## [0.3.2](https://github.com/awslabs/aws-delivlib/compare/v0.3.1...v0.3.2) (2018-12-20)\n\n\n### Bug Fixes\n\n* upgrade changelog parser ([#28](https://github.com/awslabs/aws-delivlib/issues/28)) ([813e837](https://github.com/awslabs/aws-delivlib/commit/813e837))\n\n\n<a name=\"0.3.1\"></a>\n## [0.3.1](https://github.com/awslabs/aws-delivlib/compare/v0.3.0...v0.3.1) (2018-12-19)\n\n### Bug Fixes\n\n* do not assume executable permissions on publishing scripts ([#25](https://github.com/awslabs/aws-delivlib/issues/25)) ([6832ebe](https://github.com/awslabs/aws-delivlib/commit/6832ebe))\n\n### Features\n\n* **pgp-secret:** Surface parameterName attribute ([#17](https://github.com/awslabs/aws-delivlib/issues/17)) ([972a1c9](https://github.com/awslabs/aws-delivlib/commit/972a1c9))\n\n<a name=\"0.3.0\"></a>\n## 0.3.0 (2018-12-18)\n\n\n### Bug Fixes\n\n* Correctly import requests ([#15](https://github.com/awslabs/aws-delivlib/issues/15)) ([637290e](https://github.com/awslabs/aws-delivlib/commit/637290e))\n* Custom resource behavior ([40885c0](https://github.com/awslabs/aws-delivlib/commit/40885c0))\n* Logger reference in CSC custom resources ([#14](https://github.com/awslabs/aws-delivlib/issues/14)) ([4c0bca6](https://github.com/awslabs/aws-delivlib/commit/4c0bca6))\n\n\n### Features\n\n* **gh-pages-publisher:** force-push without history ([#7](https://github.com/awslabs/aws-delivlib/issues/7)) ([e062ab7](https://github.com/awslabs/aws-delivlib/commit/e062ab7))\n* **github-releases:** if changelog doesn't exist, don't include release notes ([#8](https://github.com/awslabs/aws-delivlib/issues/8)) ([ab0d58c](https://github.com/awslabs/aws-delivlib/commit/ab0d58c))\n* **pipeline:** concurrency limit ([#9](https://github.com/awslabs/aws-delivlib/issues/9)) ([268a128](https://github.com/awslabs/aws-delivlib/commit/268a128))\n* **pipeline:** send email notifications on any action failure ([#10](https://github.com/awslabs/aws-delivlib/issues/10)) ([dab2348](https://github.com/awslabs/aws-delivlib/commit/dab2348))\n* expose failure alarm to allow developers to configure hooks ([#18](https://github.com/awslabs/aws-delivlib/issues/18)) ([2ed0f16](https://github.com/awslabs/aws-delivlib/commit/2ed0f16))\n* NuGet assemblies code signing ([#2](https://github.com/awslabs/aws-delivlib/issues/2)) ([e715c65](https://github.com/awslabs/aws-delivlib/commit/e715c65))\n\n\n\n# Change log\n\n## [0.2.1](https://github.com/awslabs/aws-cdk/compare/v0.2.0...v0.2.1) (2018-12-17)\n\n### Fixes\n\n* **code-signing-certificate**: fix behavior of custom resources ([#15](https://github.com/awslabs/aws-delivlib/pull/15) and [40885c0](https://github.com/awslabs/aws-delivlib/commit/40885c01b0a75fd9a41e64264fce7afcc1337194))\n\n## [0.2.0](https://github.com/awslabs/aws-cdk/compare/v0.1.2...v0.2.0) (2018-12-13)\n\n### Features\n\n* **pipeline**: concurrency limit ([#9](https://github.com/awslabs/aws-delivlib/pull/9))\n* **gh-pages-publisher**: force-push without history ([#7](https://github.com/awslabs/aws-delivlib/pull/7))\n* **pipeline**: send email notifications on any action failure ([#10](https://github.com/awslabs/aws-delivlib/pull/10))\n* **github-releases**: if changelog doesn't exist, don't include release notes ([#8](https://github.com/awslabs/aws-delivlib/pull/8))\n* **pipeline**: raise an alarm when any stages are in a Failed state ([#6](https://github.com/awslabs/aws-delivlib/pull/6))\n\n## [0.1.2](https://github.com/awslabs/aws-cdk/compare/v0.1.1...v0.1.2) (2018-12-12)\n\n### Features\n\n* NuGet publisher now supports X509 code signing ([#2](https://github.com/awslabs/aws-delivlib/pull/2)) ([e715c65](https://github.com/awslabs/aws-delivlib/commit/e715c65))\n* The CodePipeline can be phyiscal-named ([#3](https://github.com/awslabs/aws-delivlib/pull/3)) ([f38a8a3](https://github.com/awslabs/aws-delivlib/commit/f38a8a3))\n"
  },
  {
    "path": "CODE_OF_CONDUCT.md",
    "content": "## Code of Conduct\nThis project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct). \nFor more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact \nopensource-codeofconduct@amazon.com with any additional questions or comments.\n"
  },
  {
    "path": "CONTRIBUTING.md",
    "content": "# Contributing Guidelines\n\nThank you for your interest in contributing to our project. Whether it's a bug report, new feature, correction, or additional\ndocumentation, we greatly value feedback and contributions from our community.\n\nPlease read through this document before submitting any issues or pull requests to ensure we have all the necessary\ninformation to effectively respond to your bug report or contribution.\n\n## Reporting Bugs/Feature Requests\n\nWe welcome you to use the GitHub issue tracker to report bugs or suggest features.\n\nWhen filing an issue, please check [existing open](https://github.com/awslabs/aws-delivlib/issues), or [recently closed](https://github.com/awslabs/aws-delivlib/issues?utf8=%E2%9C%93&q=is%3Aissue%20is%3Aclosed%20), issues to make sure somebody else hasn't already\nreported the issue. Please try to include as much information as you can. Details like these are incredibly useful:\n\n* A reproducible test case or series of steps\n* The version of our code being used\n* Any modifications you've made relevant to the bug\n* Anything unusual about your environment or deployment\n\n## Contributing via Pull Requests\n\nContributions via pull requests are much appreciated. Before sending us a pull request, please ensure that:\n\n1. You are working against the latest source on the *main* branch.\n2. You check existing open, and recently merged, pull requests to make sure someone else hasn't addressed the problem already.\n3. You open an issue to discuss any significant work - we would hate for your time to be wasted.\n\nTo send us a pull request, please:\n\n1. Fork the repository.\n2. Modify the source; please focus on the specific change you are contributing. If you also reformat all the code, it will be hard for us to focus on your change.\n3. Ensure local tests pass.\n4. Commit to your fork using clear commit messages.\n5. Send us a pull request, answering any default questions in the pull request interface.\n6. Pay attention to any automated CI failures reported in the pull request, and stay involved in the conversation.\n\nGitHub provides additional document on [forking a repository](https://help.github.com/articles/fork-a-repo/) and\n[creating a pull request](https://help.github.com/articles/creating-a-pull-request/).\n\n## Finding contributions to work on\n\nLooking at the existing issues is a great way to find something to contribute on. As our projects, by default, use the default GitHub issue labels (enhancement/bug/duplicate/help wanted/invalid/question/wontfix), looking at any ['help wanted'](https://github.com/awslabs/aws-delivlib/labels/help%20wanted) issues is a great place to start.\n\n## Code of Conduct\n\nThis project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct).\nFor more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact\nopensource-codeofconduct@amazon.com with any additional questions or comments.\n\n## Security issue notifications\n\nIf you discover a potential security issue in this project we ask that you notify AWS/Amazon Security via our [vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/). Please do **not** create a public github issue.\n\n## Development Environment\n\nTo setup a development environment:\n\n1. Clone the repo\n2. Run `yarn install`\n3. Run `yarn build` (or `yarn watch`) to compile typescript\n4. Run `yarn test`\n\n## Testing\n\nWe have good coverage of unit tests that should be testing the bulk of the logic in delivlib. For every contribution and change,\nwe expect them to be covered by unit tests, where appropriate.\n\nYou can run the tests by executing:\n\n```console\nyarn compile\nyarn test\n```\n\nBesides this, there is a delivlib instance deployed to an AWS account (712950704752) that configures a delivlib pipeline for\nthe package [aws-delivlib-sample](https://github.com/awslabs/aws-delivlib-sample).\n\nYou should use this code to validate more elaborate changes to the Delivlib code base. To do this,\n\n1. Setup credentials to our AWS account: 712950704752\n2. Execute `yarn integ:update`. This will update the delivlib instance.\n\nAt this point, you will find the resources created by delivlib in the stack whose ARN is printed to the console. Wait for the\ndeployment to complete, and are then free to test and verify that your changes had the intended effect.\n\n> NOTE: you might need to manually replicate the CDKlabs credentials to the publishing account. We don't do\n> this often enough to make it worthwhile investing into automation for it.\n\n## Releasing a New Version\n\nEvery commit pushed to main will be picked up by the build & release pipeline automatically,\nso there's nothing manual you need to do to release a new version.\n\n## Licensing\n\nSee the [LICENSE](https://github.com/awslabs/aws-delivlib/blob/main/LICENSE) file for our project's licensing. We will ask you to confirm the licensing of your contribution.\n\nWe may ask you to sign a [Contributor License Agreement (CLA)](http://en.wikipedia.org/wiki/Contributor_License_Agreement) for larger changes.\n"
  },
  {
    "path": "LICENSE",
    "content": "\n                                 Apache License\n                           Version 2.0, January 2004\n                        http://www.apache.org/licenses/\n\n   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n   1. Definitions.\n\n      \"License\" shall mean the terms and conditions for use, reproduction,\n      and distribution as defined by Sections 1 through 9 of this document.\n\n      \"Licensor\" shall mean the copyright owner or entity authorized by\n      the copyright owner that is granting the License.\n\n      \"Legal Entity\" shall mean the union of the acting entity and all\n      other entities that control, are controlled by, or are under common\n      control with that entity. For the purposes of this definition,\n      \"control\" means (i) the power, direct or indirect, to cause the\n      direction or management of such entity, whether by contract or\n      otherwise, or (ii) ownership of fifty percent (50%) or more of the\n      outstanding shares, or (iii) beneficial ownership of such entity.\n\n      \"You\" (or \"Your\") shall mean an individual or Legal Entity\n      exercising permissions granted by this License.\n\n      \"Source\" form shall mean the preferred form for making modifications,\n      including but not limited to software source code, documentation\n      source, and configuration files.\n\n      \"Object\" form shall mean any form resulting from mechanical\n      transformation or translation of a Source form, including but\n      not limited to compiled object code, generated documentation,\n      and conversions to other media types.\n\n      \"Work\" shall mean the work of authorship, whether in Source or\n      Object form, made available under the License, as indicated by a\n      copyright notice that is included in or attached to the work\n      (an example is provided in the Appendix below).\n\n      \"Derivative Works\" shall mean any work, whether in Source or Object\n      form, that is based on (or derived from) the Work and for which the\n      editorial revisions, annotations, elaborations, or other modifications\n      represent, as a whole, an original work of authorship. For the purposes\n      of this License, Derivative Works shall not include works that remain\n      separable from, or merely link (or bind by name) to the interfaces of,\n      the Work and Derivative Works thereof.\n\n      \"Contribution\" shall mean any work of authorship, including\n      the original version of the Work and any modifications or additions\n      to that Work or Derivative Works thereof, that is intentionally\n      submitted to Licensor for inclusion in the Work by the copyright owner\n      or by an individual or Legal Entity authorized to submit on behalf of\n      the copyright owner. For the purposes of this definition, \"submitted\"\n      means any form of electronic, verbal, or written communication sent\n      to the Licensor or its representatives, including but not limited to\n      communication on electronic mailing lists, source code control systems,\n      and issue tracking systems that are managed by, or on behalf of, the\n      Licensor for the purpose of discussing and improving the Work, but\n      excluding communication that is conspicuously marked or otherwise\n      designated in writing by the copyright owner as \"Not a Contribution.\"\n\n      \"Contributor\" shall mean Licensor and any individual or Legal Entity\n      on behalf of whom a Contribution has been received by Licensor and\n      subsequently incorporated within the Work.\n\n   2. Grant of Copyright License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      copyright license to reproduce, prepare Derivative Works of,\n      publicly display, publicly perform, sublicense, and distribute the\n      Work and such Derivative Works in Source or Object form.\n\n   3. Grant of Patent License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      (except as stated in this section) patent license to make, have made,\n      use, offer to sell, sell, import, and otherwise transfer the Work,\n      where such license applies only to those patent claims licensable\n      by such Contributor that are necessarily infringed by their\n      Contribution(s) alone or by combination of their Contribution(s)\n      with the Work to which such Contribution(s) was submitted. If You\n      institute patent litigation against any entity (including a\n      cross-claim or counterclaim in a lawsuit) alleging that the Work\n      or a Contribution incorporated within the Work constitutes direct\n      or contributory patent infringement, then any patent licenses\n      granted to You under this License for that Work shall terminate\n      as of the date such litigation is filed.\n\n   4. Redistribution. You may reproduce and distribute copies of the\n      Work or Derivative Works thereof in any medium, with or without\n      modifications, and in Source or Object form, provided that You\n      meet the following conditions:\n\n      (a) You must give any other recipients of the Work or\n          Derivative Works a copy of this License; and\n\n      (b) You must cause any modified files to carry prominent notices\n          stating that You changed the files; and\n\n      (c) You must retain, in the Source form of any Derivative Works\n          that You distribute, all copyright, patent, trademark, and\n          attribution notices from the Source form of the Work,\n          excluding those notices that do not pertain to any part of\n          the Derivative Works; and\n\n      (d) If the Work includes a \"NOTICE\" text file as part of its\n          distribution, then any Derivative Works that You distribute must\n          include a readable copy of the attribution notices contained\n          within such NOTICE file, excluding those notices that do not\n          pertain to any part of the Derivative Works, in at least one\n          of the following places: within a NOTICE text file distributed\n          as part of the Derivative Works; within the Source form or\n          documentation, if provided along with the Derivative Works; or,\n          within a display generated by the Derivative Works, if and\n          wherever such third-party notices normally appear. The contents\n          of the NOTICE file are for informational purposes only and\n          do not modify the License. You may add Your own attribution\n          notices within Derivative Works that You distribute, alongside\n          or as an addendum to the NOTICE text from the Work, provided\n          that such additional attribution notices cannot be construed\n          as modifying the License.\n\n      You may add Your own copyright statement to Your modifications and\n      may provide additional or different license terms and conditions\n      for use, reproduction, or distribution of Your modifications, or\n      for any such Derivative Works as a whole, provided Your use,\n      reproduction, and distribution of the Work otherwise complies with\n      the conditions stated in this License.\n\n   5. Submission of Contributions. Unless You explicitly state otherwise,\n      any Contribution intentionally submitted for inclusion in the Work\n      by You to the Licensor shall be under the terms and conditions of\n      this License, without any additional terms or conditions.\n      Notwithstanding the above, nothing herein shall supersede or modify\n      the terms of any separate license agreement you may have executed\n      with Licensor regarding such Contributions.\n\n   6. Trademarks. This License does not grant permission to use the trade\n      names, trademarks, service marks, or product names of the Licensor,\n      except as required for reasonable and customary use in describing the\n      origin of the Work and reproducing the content of the NOTICE file.\n\n   7. Disclaimer of Warranty. Unless required by applicable law or\n      agreed to in writing, Licensor provides the Work (and each\n      Contributor provides its Contributions) on an \"AS IS\" BASIS,\n      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n      implied, including, without limitation, any warranties or conditions\n      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n      PARTICULAR PURPOSE. You are solely responsible for determining the\n      appropriateness of using or redistributing the Work and assume any\n      risks associated with Your exercise of permissions under this License.\n\n   8. Limitation of Liability. In no event and under no legal theory,\n      whether in tort (including negligence), contract, or otherwise,\n      unless required by applicable law (such as deliberate and grossly\n      negligent acts) or agreed to in writing, shall any Contributor be\n      liable to You for damages, including any direct, indirect, special,\n      incidental, or consequential damages of any character arising as a\n      result of this License or out of the use or inability to use the\n      Work (including but not limited to damages for loss of goodwill,\n      work stoppage, computer failure or malfunction, or any and all\n      other commercial damages or losses), even if such Contributor\n      has been advised of the possibility of such damages.\n\n   9. Accepting Warranty or Additional Liability. While redistributing\n      the Work or Derivative Works thereof, You may choose to offer,\n      and charge a fee for, acceptance of support, warranty, indemnity,\n      or other liability obligations and/or rights consistent with this\n      License. However, in accepting such obligations, You may act only\n      on Your own behalf and on Your sole responsibility, not on behalf\n      of any other Contributor, and only if You agree to indemnify,\n      defend, and hold each Contributor harmless for any liability\n      incurred by, or claims asserted against, such Contributor by reason\n      of your accepting any such warranty or additional liability.\n\n   END OF TERMS AND CONDITIONS\n\n   APPENDIX: How to apply the Apache License to your work.\n\n      To apply the Apache License to your work, attach the following\n      boilerplate notice, with the fields enclosed by brackets \"[]\"\n      replaced with your own identifying information. (Don't include\n      the brackets!)  The text should be enclosed in the appropriate\n      comment syntax for the file format. We also recommend that a\n      file or class name and description of purpose be included on the\n      same \"printed page\" as the copyright notice for easier\n      identification within third-party archives.\n\n   Copyright [yyyy] [name of copyright owner]\n\n   Licensed under the Apache License, Version 2.0 (the \"License\");\n   you may not use this file except in compliance with the License.\n   You may obtain a copy of the License at\n\n       http://www.apache.org/licenses/LICENSE-2.0\n\n   Unless required by applicable law or agreed to in writing, software\n   distributed under the License is distributed on an \"AS IS\" BASIS,\n   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n   See the License for the specific language governing permissions and\n   limitations under the License.\n"
  },
  {
    "path": "NOTICE",
    "content": "AWS Delivlib\nCopyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved. \n"
  },
  {
    "path": "README.md",
    "content": "## aws-delivlib\n\n[![experimental](http://badges.github.io/stability-badges/dist/experimental.svg)](http://github.com/badges/stability-badges)\n\n**aws-delivlib** is a fabulous library for defining continuous pipelines for\nbuilding, testing and publishing code libraries through AWS CodeBuild and AWS\nCodePipeline.\n\n**aws-delivlib** is used by the [AWS Cloud Development Kit](https://github.com/awslabs/aws-cdk) and was\ndesigned to support simultaneous delivery of the AWS CDK in multiple programming languages\npackaged via [jsii](https://github.com/awslabs/jsii).\n\n## Pipeline Structure\n\nA delivlib pipeline consists of the following sequential stages. Each stage will\nexecute all tasks concurrently:\n\n```\n+-----------+     +-----------+     +-----------+     +----------------+\n|  Source   +---->+   Build   +---->+   Test    +---->+    Publish     |\n+-----------+     +-----------+     +-----+-----+     +-------+--------+\n                                          |                   |\n                                          v                   v\n                                    +-----+-----+     +-------+-------+\n                                    |   Test1   |     |      npm      |\n                                    +-----------+     +---------------+\n                                    |   Test2   |     |     NuGet     |\n                                    +-----------+     +---------------+\n                                    |   Test3   |     | Maven Central |\n                                    +-----------+     +---------------+\n                                    |    ...    |     |     PyPI      |\n                                    +-----------+     +---------------+\n                                                      |  GitHub Pages |\n                                                      +---------------+\n                                                      |GitHub Releases|\n                                                      +---------------+\n```\n\nThe following sections describe each stage and the configuration options\navailable:\n\n- [aws-delivlib](#aws-delivlib)\n- [Pipeline Structure](#pipeline-structure)\n- [Installation](#installation)\n- [Source](#source)\n  - [`repo`: Source Repository (required)](#repo-source-repository-required)\n  - [`branch`: Source Control Branch (optional)](#branch-source-control-branch-optional)\n- [Pull Request Builds](#pull-request-builds)\n- [Build](#build)\n  - [`buildSpec`: Build Script (optional)](#buildspec-build-script-optional)\n  - [`buildImage`: Build container image (optional)](#buildimage-build-container-image-optional)\n  - [`env`: Build environment variables (optional)](#env-build-environment-variables-optional)\n  - [Other Build Options](#other-build-options)\n- [Tests](#tests)\n- [Publish](#publish)\n  - [npm.js (JavaScript)](#npmjs-javascript)\n  - [NuGet (.NET)](#nuget-net)\n  - [Maven Central (Java)](#maven-central-java)\n  - [PyPI (Python)](#pypi-python)\n  - [GitHub Releases](#github-releases)\n  - [GitHub Pages](#github-pages)\n- [Metrics](#metrics)\n- [Automatic Bumps and Pull Request Builds](#automatic-bumps-and-pull-request-builds)\n  - [GitHub Access](#github-access)\n  - [Automatic Bumps](#automatic-bumps)\n- [Failure Notifications](#failure-notifications)\n- [ECR Mirror](#ecr-mirror)\n- [Contributing](#contributing)\n- [License](#license)\n\n\n## Installation\n\nTo install, use npm / yarn:\n\n```console\n$ npm i aws-delivlib\n```\n\nor:\n\n```console\n$ yarn add aws-delivlib\n```\n\nand import the library to your project:\n\n```ts\nimport delivlib = require('aws-delivlib');\n```\n\nThe next step is to add a pipeline to your app. When you define a pipeline, the\nminimum requirement is to specify the source repository. All other settings are\noptional.\n\n```ts\nconst pipeline = new delivlib.Pipeline(this, 'MyPipeline', {\n  // options\n});\n```\n\nThe following sections will describe the various options available in your\npipeline.\n\nYou can also take a look at the\n[pipeline definition releasing the delivlib library itself](pipeline/delivlib.ts)\nfor a real-world, working example.\n\n## Source\n\nThe only required option when defining a pipeline is to specify a source\nrepository for your project.\n\n### `repo`: Source Repository (required)\n\nThe `repo` option specifies your source code repository for your project. You\ncould use either CodeCommit or GitHub.\n\n#### CodeCommit\n\nTo use an existing repository:\n\n```ts\nimport codecommit = require('@aws-cdk/aws-codecommit');\n\n// import an existing repository\nconst myRepo = codecommit.Repository.fromRepositoryName(this, 'TestRepo',\n  'delivlib-test-repo');\n\n// ...or define a new repository (probably not what you want)\nconst myRepo = new codecommit.Repository(this, 'TestRepo');\n\n// create a delivlib pipeline associated with this codebuild repo\nnew delivlib.Pipeline(this, 'MyPipeline', {\n  repo: new delivlib.CodeCommitRepo(myRepo),\n  // ...\n});\n```\n\n#### GitHub\n\nTo connect to GitHub, you will need to store a [Personal GitHub Access\nToken](https://github.com/settings/tokens) as an SSM Parameter and provide the\nname of the SSM parameter.\n\n```ts\nimport cdk = require('@aws-cdk/core');\n\nnew delivlib.Pipeline(this, 'MyPipeline', {\n  repo: new delivlib.GitHubRepo({\n    repository: 'cdklabs/aws-delivlib',\n    token: cdk.SecretValue.secretsManager('my-github-token'),\n  }),\n  // ...\n})\n```\n\n### `branch`: Source Control Branch (optional)\n\nThe `branch` option can be used to specify the git branch to build from. The\ndefault is `master`.\n\n```ts\nnew delivlib.Pipeline(this, 'MyPipeline', {\n  repo: // ...\n  branch: 'dev',\n})\n```\n\n## Pull Request Builds\n\nPull Request Builds can be used to validate if changes submitted via a pull request\nsuccessfully build and pass tests. They are triggered automatically by GitHub or\nCodeCommit when pull requests are submitted or updated.\n\nKnown in delivlib as AutoBuild, they can be enabled on the Pipeline and further\nconfigured -\n\n```ts\nnew delivlib.Pipeline(this, 'MyPipeline', {\n  // ...\n  autoBuild: true,\n  autoBuildOptions: {\n    publicLogs: true,\n  },\n});\n```\n\nDelivlib also separately exports the `AutoBuild` construct that can be used to configure\nAutoBuild on a project that doesn't have a pipeline associated, or for jobs that can be\nrun outside of a pipeline.\n\n```ts\nnew delivlib.AutoBuild(this, 'MyAutoBuild', {\n  repo: // ...\n});\n```\n\n## Build\n\nThe second stage of a pipeline is to build your code. The following options\nallow you to do customize your build environment and scripts:\n\n### `buildSpec`: Build Script (optional)\n\nThe default behavior will use the `buildspec.yaml` file from the root of your\nsource repository to determine the build steps.\n\nSee the the [buildspec reference documentation](https://docs.aws.amazon.com/codebuild/latest/userguide/build-spec-ref.html)\nin the CodeBuild User Guide.\n\nNote that if you don't have an \"__artifacts__\" section in your buildspec, you won't\nbe able to run any tests against the build outputs or publish them to package\nmanagers.\n\nIf you wish, you can use the `buildSpec` option, in which case CodeBuild will not\nuse the checked-in `buildspec.yaml`:\n\n```ts\nimport codebuild = require('@aws-cdk/aws-codebuild');\n\nnew delivlib.Pipeline(this, 'MyPipeline', {\n  // ...\n  buildSpec: codebuild.BuildSpec.fromObject({\n    version: '0.2',\n    phases: {\n      build: {\n        commands: [\n          'echo \"Hello, world!\"'\n        ]\n      }\n    },\n    artifacts: {\n      files: [ '**/*' ],\n      'base-directory': 'dist'\n    }\n  }),\n});\n```\n\n### `buildImage`: Build container image (optional)\n\nThe Docker image to use for the build container.\n\nDefault: the default image (if none is specified) is a custom Docker image which\nis provided as part of the [jsii] distribution called [jsii/superchain]. It is\nan environment that supports building libraries that target all programming\nlanguages supported by [jsii]. Find more information on the contents of the\n[jsii/superchain] image on the [jsii] homepage.\n\n[jsii]: https://github.com/aws/jsii\n[jsii/superchain]: https://hub.docker.com/r/jsii/superchain\n\nYou can use the AWS CodeBuild API to specify any Linux/Windows Docker image for\nyour build. Here are some examples:\n\n* `codebuild.LinuxBuildImage.fromDockerRegistry('golang:1.11')` - use an image from Docker Hub\n* `codebuild.LinuxBuildImage.UBUNTU_14_04_OPEN_JDK_9` - OpenJDK 9 available from AWS CodeBuild\n* `codebuild.WindowsBuildImage.WIN_SERVER_CORE_2016_BASE` - Windows Server Core 2016 available from AWS CodeBuild\n* `codebuild.LinuxBuildImage.fromEcrRepository(myRepo)` - use an image from an ECR repository\n\n### `env`: Build environment variables (optional)\n\nAllows adding environment variables to the build environment:\n\n```ts\nnew delivlib.Pipeline(this, 'MyPipeline', {\n  // ...\n  environment: {\n    FOO: 'bar'\n  }\n});\n```\n\n### Other Build Options\n\n* `computeType`: size of the AWS CodeBuild compute capacity (default: SMALL)\n* `privileged`: run in privileged mode (default: `false`)\n\n## Tests\n\nThe third stage of a delivlib pipeline is to execute tests. Tests are executed\nin parallel only after a successful build and can access build artifacts as\ndefined in your `buildspec.yaml`.\n\nThe `pipeline.addTest` method can be used to add tests to your pipeline. Test\nscripts are packaged as part of your delivlib CDK app.\n\n```ts\ndelivlib.addTest('MyTest', {\n  platform: delivlib.ShellPlatform.LinuxUbuntu(), // or `ShellPlatform.Windows()`\n  scriptDirectory: 'path/to/local/directory/with/tests',\n  entrypoint: 'run.sh',\n});\n```\n\n`scriptDirectory` refers to a directory on the local file system which must\ncontain the `entrypoint` file.\nPreferably make this path relative to the current file using `path.join(__dirname, ...)`.\n\nThe test container will be populated the build output artifacts as well as all\nthe files from the test directory.\n\nThen, the entry-point will be executed. If it fails, the test failed.\n\n## Publish\n\nThe last step of the pipeline is to publish your artifacts to one or more\npackage managers. Delivlib is shipped with a bunch of built-in publishing\ntasks, but you could add your own if you like.\n\nTo add a publishing target to your pipeline, you can either use the\n`pipeline.addPublish(publisher)` method or one of the built-in\n`pipeline.publishToXxx` methods. The first option is useful if you wish to\ndefine your own publisher, which is class the implements the\n`delivlib.IPublisher` interface.\n\nBuilt-in publishers are designed to be idempotent: if the artifacts version is\nalready published to the package manager, the publisher __will succeed__. This\nmeans that in order to publish a new version, all you need to do is bump the\nversion of your package artifact (e.g. change `package.json`) and the publisher\nwill kick in.\n\nYou can use the `dryRun: true` option when creating a publisher to tell the\npublisher to do as much as it can without actually making the package publicly\navailable. This is useful for testing.\n\nThe following sections describe how to use each one of the built-in publishers.\n\n### npm.js (JavaScript)\n\nThe method `pipeline.publishToNpm` will add a publisher to your pipeline which\ncan publish JavaScript modules to [npmjs](https://www.npmjs.com/).\n\nThe publisher will search for `js/*.tgz` in your build artifacts and will `npm\npublish` each of them.\n\nTo create npm tarballs, you can use `npm pack` as part of your build and emit\nthem to the `js/` directory in your build artifacts. The version of the module\nis deduced from the name of the tarball.\n\nTo use this publisher, you will first need to store an [npm.js publishing\ntoken](https://docs.npmjs.com/creating-and-viewing-authentication-tokens) in AWS\nSecrets Manager and supply the secret ARN when you add the publisher.\n\n```ts\npipeline.publishToNpm({\n  npmTokenSecret: { secretArn: 'my-npm-token-secret-arn' }\n});\n```\n\n### NuGet (.NET)\n\nThis publisher can publish .NET NuGet packages to [nuget.org](https://www.nuget.org/).\n\nThe publisher will search `dotnet/**/*.nuget` in your build artifacts and will\npublish each package to NuGet. To create .nupkg files, see [Creating NuGet\nPackages](https://docs.microsoft.com/en-us/nuget/create-packages/creating-a-package).\nMake sure you output the artifacts under the `dotnet/` directory.\n\nTo use this publisher, you will first need to store a [NuGet API\nKey](https://www.nuget.org/account/apikeys) with \"Push\" permissions in AWS\nSecrets Manager and supply the secret ARN when you add the publisher.\n\nUse `pipeline.publishToNuGet` will add a publisher to your pipeline:\n\n```ts\npipeline.publishToNuGet({\n  nugetApiKeySecret: { secretArn: 'my-nuget-token-secret-arn' }\n});\n```\n\n#### Assembly Signature\n\n**Important:** Limitations in the `mono` tools restrict the hash algorithms that\ncan be used in the signature to `SHA-1`. This limitation will be removed in the\nfuture.\n\nYou can enable digital signatures for the `.dll` files enclosed in your NuGet\npackages. In order to do so, you need to procure a Code-Signing Certificate\n(also known as a Software Publisher Certificate, or SPC). If you don't have one\nyet, you can refer to\n[Obtaining a new Code Signing Certificate](#obtaining-a-new-code-signing-certificate)\nfor a way to create a new certificate entirely in the Cloud.\n\nIn order to enable code signature, change the way the NuGet publisher is added\nby adding an `ICodeSigningCertificate` for the `codeSign` key (it could be a\n`CodeSigningCertificate` construct, or you may bring your own implementation if\nyou wish to use a pre-existing certificate):\n\n```ts\npipeline.publishToNuGet({\n  nugetApiKeySecret: { secretArn: 'my-nuget-token-secret-arn' },\n  codeSign: codeSigningCertificate\n});\n```\n\n##### Obtaining a new Code Signing Certificate\n\nIf you want to create a new certificate, the `CodeSigningCertificate` construct\nwill provision a new RSA Private Key and emit a Certificate Signing Request in\nan `Output` so you can pass it to your Certificate Authority (CA) of choice:\n1. Add a `CodeSigningCertificate` to your stack:\n    ```ts\n    new delivlib.CodeSigningCertificate(stack, 'CodeSigningCertificate', {\n      distinguishedName: {\n        commonName: '<a name your customers would recognize>',\n        emailAddress: '<your@email.address>',\n        country: '<two-letter ISO country code>',\n        stateOrProvince: '<state or province>',\n        locality: '<city>',\n        organizationName: '<name of your company or organization>',\n        organizationalUnitName: '<name of your department within the origanization>',\n      }\n    });\n    ```\n2. Deploy the stack:\n    ```console\n    $ cdk deploy $stack_name\n    ...\n    Outputs:\n    $stack_name.CodeSigningCertificateXXXXXX = -----BEGIN CERTIFICATE REQUEST-----\n    ...\n    -----END CERTIFICATE REQUEST-----\n    ```\n3. Forward the Certificate Signing Request (the value of the stack output that\n   starts with `-----BEGIN CERTIFICATE REQUEST-----` and ends with\n   `-----END CERTIFICATE REQUEST-----`) to a Certificate Authority, so they can\n   provde you with a signed certificate.\n4. Update your stack with the signed certificate obtained from the CA. The below\n   example assumes you palced the PEM-encoded certificate in a file named\n   `certificate.pem` that is in the same folder as file that uses the code:\n    ```ts\n    // Import utilities at top of file:\n    import fs = require('fs');\n    import path = require('path');\n    // ...\n    new delivlib.CodeSigningCertificate(stack, 'CodeSigningCertificate', {\n      distinguishedName: {\n        commonName: '<a name your customers would recognize>',\n        emailAddress: '<your@email.address>',\n        country: '<two-letter ISO country code>',\n        stateOrProvince: '<state or province>',\n        locality: '<city>',\n        organizationName: '<name of your company or organization>',\n        organizationalUnitName: '<name of your department within the origanization>',\n      },\n      // Addin the signed certificate\n      pemCertificate: fs.readFileSync(path.join(__dirname, 'certificate.pem'))\n    });\n    ```\n5. Redeploy your stack, so the self-signed certificate is replaced with the one\n   received from your CA:\n    ```console\n    $ cdk deploy $stackName\n    ```\n\n### Maven Central (Java)\n\nThis publisher can publish Java packages to [Maven\nCentral](https://search.maven.org/).\n\nThis publisher expects to find a local maven repository under the `java/`\ndirectory in your build output artifacts. You can create one using the\n`altDeploymentRepository` option for `mvn deploy` (this assumes `dist` if the\nroot of your artifacts tree):\n\n```console\n$ mvn deploy -D altDeploymentRepository=local::default::file://${PWD}/dist/java\n```\n\nUse `pipeline.publishToMaven` to add this publisher to your pipeline:\n\n```ts\npipeline.publishToMaven({\n  mavenLoginSecret: { secretArn: 'my-maven-credentials-secret-arn' },\n  signingKey: mavenSigningKey,\n  stagingProfileId: '11a33451234521'\n});\n```\n\nIn order to configure the Maven publisher, you will need at least three pieces\nof information:\n\n1. __Maven Central credentials__ (`mavenLoginSecret`) stored in AWS Secrets Manager\n2. __GPG signing key__ (`signingKey`) to sign your Maven packages\n3. __Staging profile ID__ (`stagingProfileId`) assigned to your account in Maven Central.\n\nThe following sections will describe how to obtain this information.\n\n#### GPG Signing Key\n\nSince Maven Central requires that you sign your packages you will need to\ncreate a GPG key pair and publish it's public key to a well-known server:\n\nThis library includes a GPG key construct:\n\n```ts\nconst mavenSigningKey = new delivlib.OpenPGPKeyPair(this, 'MavenCodeSign', {\n  email: 'your-email@domain.com',\n  identity: 'your-identity',\n  secretName: 'maven-code-sign',\n  pubKeyParameterName: 'mavenPublicKey',\n  keySizeBits: 4096,\n  expiry: '1y',\n  version: 1.0\n});\n```\n\nAfter you've deployed your stack once, you can go to the SSM Parameter Store\nconsole and copy the public key from the new parameter created by your stack\nunder the specified secret name. Then, you should paste this key to any of the\nsupported key servers (recommended: https://keyserver.ubuntu.com).\n\n#### Sonatype Credentials\n\nIn order to publish to Maven Central, you'll need to follow the instructions in\nMaven Central's [OSSRH Guide](http://central.sonatype.org/pages/ossrh-guide.html)\nand create a Sonatype account and project via JIRA:\n\n1. [Create JIRA\n   account](https://issues.sonatype.org/secure/Signup!default.jspa)\n2. [Create new project\n   ticket](https://issues.sonatype.org/secure/CreateIssue.jspa?issuetype=21&pid=10134)\n3. Once you have the user name and password of your Sonatype account, create an\n   AWS Secrets Manager secret with a `username` and `password` key/value fields\n   that correspond to your account's credentials.\n\n#### Staging Profile ID\n\nAfter you've obtained a Sonatype account and Maven Central project:\n\n1. Log into https://oss.sonatype.org\n2. Select \"Staging Profiles\" from the side bar (under \"Build Promotion\")\n3. Click on the \"Releases\" staging profile that you registered\n4. The URL of the page should change and include your profile ID. For example: `https://oss.sonatype.org/#stagingProfiles;11a33451234521`\n\nThis is the value you should assign to the `stagingProfileId` option.\n\n### PyPI (Python)\n\nThis publisher can publish modules to [PyPI](https://pypi.org/).\n\nThis publisher will publish all files under the `python/` directory in your\nbuild output artifacts to PyPI using the following command:\n\n```sh\ntwine upload --skip-existing python/**\n```\n\nTo use this publisher, you will need to an\n[account](https://pypi.org/account/register/) with PyPI. Then store your\ncredentials in an AWS Secrets Manager secret, under the `username` and\n`password` fields.\n\nNow, use `pipeline.publishToPyPi` to add this publisher to your pipeline:\n\n```ts\npipeline.publishToPyPi({\n  loginSecret: { secretArn: 'my-pypi-credentials-secret-arn' }\n});\n```\n\n### GitHub Releases\n\nThis publisher can package all your build artifacts, sign them and publish them\nto the \"Releases\" section of a GitHub project.\n\nThis publisher relies on two files to produce the release:\n\n- `build.json` a manifest that contains metadata about the release.\n- `CHANGELOG.md` (optional) the changelog of your project, from which the\n  release notes are extracted. If not provided, no release notes are added\n  to the release.\n\n<a id=\"manifest\"/>\n\nThe file `build.json` is read from the root of your artifact tree. It should\ninclude the following fields:\n\n```json\n{\n  \"name\": \"<project name>\",\n  \"version\": \"<project version>\",\n  \"commit\": \"<sha of commit>\"\n}\n```\n\nThis publisher does the following:\n\n1. Create a zip archive that contains the entire build artifacts tree under the\n   name `${name}-${version}.zip`.\n2. Sign the archive using a GPG key and store it under\n   `${name}-${version}.zip.sig`\n3. Check if there is already a git tag with `v${version}` in the GitHub\n   repository. If there is, bail out successfully.\n4. If there's a `CHANGELOG.md` file, and extract the release notes for\n   `${version}` (uses [changelog-parser](https://www.npmjs.com/package/changelog-parser))\n5. Create a GitHub release named `v${version}`, tag the specified `${commit}`\n   with the release notes from the changelog.\n6. Attach the zip archive and signature to the release.\n\nTo add a GitHub release publisher to your pipeline, use the\n`pipeline.publishToGitHub` method:\n\n```ts\npipeline.publishToGitHub({\n  githubRepo: targetRepository,\n  signingKey: releaseSigningKey\n});\n```\n\nThe publisher requires the following information:\n\n- The target GitHub project (`githubRepo`): see [instructions](#github) on how to connect\n  to a GitHub repository. It doesn't have to be the same repository as the source repository,\n  but it can be.\n- A GPG signing key (`signingKey`): a `delivlib.SigningKey` object used to sign the\n  zip bundle. Make sure to publish the public key to a well-known server so your users\n  can validate the authenticity of your release (see [GPG Signing Key](#gpg-signing-key) for\n  details on how to create a signing key pair and extract it's public key). You can either use\n\n### GitHub Pages\n\nThis publisher allows you to publish versioned static web-site content to GitHub Pages.\n\nThe publisher commits the entire contents of the `docs/` directory into the root of the specified\nGitHub repository, and also under the `${version}/` directory of the repo (which allows users\nto access old versions of the docs if they wish).\n\nNOTE: static website content can grow big. Therefore, this publisher will always force-push\nto the branch without history (history is preserved via the `versions/` directory). Make sure\nyou don't protect this branch against force-pushing or otherwise the publisher will fail.\n\nThis publisher depends on the following artifacts:\n\n1. `build.json`: build manifest (see [schema](#manifest) above)\n2. `docs/**`: the static website contents\n\nThis is how this publisher works:\n\n1. Read the `version` field from `build.json`\n2. Clone the `gh-pages` branch of the target repository to a local working directory\n3. Rsync the contents of `docs/**` both to `versions/${version}` and to `/` of the working copy.\n5. Commit and push to the `gh-pages` branch on GitHub\n\n> NOTE: if `docs/` contains a fully rendered static website, you should also include\n> a `.nojekyll` file to [bypass](https://blog.github.com/2009-12-29-bypassing-jekyll-on-github-pages/)\n> Jekyll rendering.\n\nTo add this publisher to your pipeline, use the `pipeline.publishToGitHubPages` method:\n\n```ts\npipeline.publishToGitHubPages({\n  githubRepo,\n  sshKeySecret: { secretArn: 'github-ssh-key-secret-arn' },\n  commitEmail: 'foo@bar.com',\n  commitUsername: 'foobar',\n  branch: 'gh-pages' // default\n});\n```\n\nIn order to publish to GitHub Pages, you will need the following pieces of information:\n\n1. The target GitHub repository (`githubRepo`). See [instructions](#github) on\n   how to connect to a GitHub repository. It doesn't have to be the same\n   repository as the source repository, but it can be.\n2. SSH private key (`sshKeySecret`) for pushing to that repository stored in AWS\n   Secrets Manager which is configured in your GitHub repository as a deploy key\n   with write permissions.\n3. Committer email (`commitEmail`) and username (`commitUsername`).\n\nTo create an ssh deploy key for your repository:\n\n1. Follow [this\n   guide](https://developer.github.com/v3/guides/managing-deploy-keys/#deploy-keys)\n   to produce a private/public key pair on your machine.\n1. Add the deploy key to your repository with write permissions.\n1. Create an AWS Secrets Manager secret and paste the private key as plaintext\n   (not key/value).\n1. Use the name of the AWS Secrets Manager secret in the `sshKeySecret` option.\n\n## Metrics\n\nThe `Pipeline` construct automatically creates the following metrics in CloudWatch\nfor the configured pipelines. These are published under the namespace 'CDK/Delivlib'.\n\n- Execution Failures: The number of failures of the pipeline execution.\n  When a pipeline execution fails, a '1' is recorded and forevery success, a '0' is\n  recorded.\n\n  Metric Name: *Failures*\n  Dimensions:\n  - *Pipeline*: The pipeline name in CodePipeline.\n\n- Action Failures: The number of failures per action per pipeline. An execution\n  failure can be due to multiple actions failing.\n  For every action failure, a '1' is recorded and for every success, a '0' is recorded.\n\n  Metric Name: *Failures*\n  Dimensions:\n  - *Pipeline*: The pipeline name in CodePipeline.\n  - *Action*: THe name of the action that succeeded or failed.\n\n## Automatic Bumps and Pull Request Builds\n\n### GitHub Access\n\nIf your source repository is GitHub, in order to enable these features you will\nneed to manually connect AWS CodeBuild to your GitHub account. Otherwise, you\nwill receive the following error message:\n\n```\nNo Access token found, please visit AWS CodeBuild console to connect to GitHub\n(Service: AWSCodeBuild; Status Code: 400; Error Code: InvalidInputException;\nRequest ID: ab458603-6fd4-11e8-9310-ff116e0423f9)\n```\n\nTo connect, go to the AWS CodeBuild console, click \"Create Project\", select a\nGitHub source and hit \"Connect\". There is no need to save the new project. This\nneeds to be done once per account/region.\n\n### Automatic Bumps\n\nA bump is the process of incrementing the version number of the project. When\nthe version number is incremented and a commit is pushed to the master branch,\nthe publishing actions will release the new version to all repositories.\n\nThis feature enables achieving full continuous delivery for libraries.\n\nTo enable automatic bumps, you will first need to determine how to perform a\nbump in your repository. What command should be executed in order to increment\nthe version number, update change log, etc.\n\nThe bump command is expected to perform the bump and issue a **commit** and a\n**tag** to the local repository with the version number.\n\nFor JavaScript projects, the\n[standard-version](https://github.com/conventional-changelog/standard-version)\ntool will do exactly that, so it is the recommended mechanism for such projects.\n\nOnce a bump is committed, the commit will be pushed either to a dedicated branch\ncalled `bumps/VERSION` or to a branch of your choosing such as `master`.\n\nTo set up bumps, simply call `autoBump` on your pipeline. The following example\nsets up a bump on the default schedule (12pm UTC daily) which will automatically\npush the to \"master\" (which will trigger a release).\n\n```ts\nconst bump = pipeline.autoBump({\n  bumpCommand: 'npm i && npm run bump',\n  branch: 'master'\n});\n```\n\nYou can customize the environment used for running the bump script.\n\nIf a bump fails, the `bump.alarm` CloudWatch alarm will be triggered.\n\nNOTE: there is currently no way for the bump command to indicate to the\nsystem that a bump is not needed (i.e. no changes have been made to the\nlibrary).\n\n## Failure Notifications\n\nPipelines can be configured with notifications that will be sent on any failure in pipeline's stages. Notifications can\nbe sent to either a Slack channel or a Chime room. The following code configures one of each -\n\n```ts\n// Slack\nconst teamChannel = new chatbot.SlackChannelConfiguration(this, {\n  // ...\n});\npipeline.notifyOnFailure(PipelineNotification.slack({\n  channels: [teamChannel]\n}));\n\n// Chime\nconst teamRoomWebhook = 'https://hooks.chime.aws/incomingwebhooks/1c3588c7-623d-4799-af9b-8b1818fca779?token=cUMzOVA4OXl8MXxCaHJlZ0RUVm03TmZVMkpoTzlwa3NVbXJCam8tNWF3UGdzemVqZndsZERV';\npipeline.notifyOnFailure(PipelineNotification.chime({\n  webhookUrl: [ teamRoomWebhook ]\n}));\n```\n\n## ECR Mirror\n\nBuilds commonly use Docker images from DockerHub as their base image. In fact, delivlib defaults its build\nimage to `jsii/superchain`. However, DockerHub has throttles in place for the volume of unauthenticated and\nauthenticated pulls. This can cause CodeBuild jobs that run frequently to fail from DockerHub's throttling.\n\nThe `EcrMirror` construct can be used to synchronize, on a specific schedule, Docker images between DockerHub and\na local ECR registry in the AWS account.\n\n```ts\nnew EcrMirror(this, 'RegistrySync', {\n  sources: [\n    MirrorSource.fromDockerHub('jsii/superchain:1-bullseye-slim'),\n    MirrorSource.fromDockerHub('python:3.6'),\n  ],\n  dockerhubCredentials: // ...\n  schedule: events.Schedule.cron( ... ),\n})\n```\n\nYou can also use the `MirrorSource.fromDirectory()` API if you would like to build a new Docker image based on a\nDockerfile. The Dockerfile should be placed at the top level of the specified directory.\n\nIn addition to this, an `EcrMirrorAspect` is available that can walk the construct tree and replace all occurrences\nof Docker images in CodeBuild projects with ECR equivalents if they are found in the provided `EcrMirror` construct.\nThis can be applied to an entire stack as so -\n\n```ts\nconst stack = new MyStack(...);\n// ...\nAspects.of(stack).add(new EcrMirrorAspect(ecrMirrorStack.mirror));\n```\n\n## Package Integrity\n\nTo ensure the artifacts published into package managers exactly correspond to your source code, delivlib offers a `PackageIntegrityValidation` construct.\nIt will perform periodic integrity checks, comparing the published artifact against an artifact directly build from source code.\n\nThis can help detect scenarios where your publishing platform may have been compromised, and your packages no longer contain the expected bits.\n\n```ts\n// first import the secret containing your github token secret.\n// the secret value should be the token in plain text.\nconst token = sm.Secret.fromSecretCompleteArn(stack, 'GitHubSecret', '<sercet-arn>');\n\n// validate integrity of your package, hosted in a github repository.\nnew PackageIntegrityValidation(stack, 'PackageValidation', {\n  repository: '<repository-slug>',\n  buildImage: codebuild.LinuxBuildImage.fromDockerRegistry('<docker-image>'),\n  githubTokenSecret: token,\n});\n```\n\nAt a high level, the validation is performed like so:\n\n1. Clone the GitHub repository and checkout to the latest tag.\n2. Build the repository to produce local artifacts from the source code.\n3. Download the corresponding artifacts from package managers.\n4. Compare.\n\nBy default the validation will run once a day, but you can configure its schedule using the `schedule` option.\nIf the validation fails, a CloudWatch alarm will be triggered, which is accessible via the `failureAlarm` property.\n\n## Contributing\n\nSee the [contribution guide](./CONTRIBUTING.md) for details on how to submit\nissues, pull requests, setup a development environment and publish new releases\nof this library.\n\n## License\n\nThis library is licensed under the Apache 2.0 License.\n\n"
  },
  {
    "path": "build-custom-resource-handlers.sh",
    "content": "#!/bin/bash\nset -euo pipefail\n\ncompile=\"tsc --alwaysStrict\n             --inlineSourceMap\n             --lib ES2017\n             --module CommonJS\n             --moduleResolution Node\n             --noFallthroughCasesInSwitch\n             --noImplicitAny\n             --noImplicitReturns\n             --noImplicitThis\n             --noUnusedLocals\n             --noUnusedParameters\n             --removeComments\n             --strict\n             --target ES2017\n             --types node\"\n\nfor handler in pgp-secret private-key certificate-signing-request\ndo\n  echo \"Building CustomResource handler ${handler}\"\n  ${compile}                                                                    \\\n    --incremental                                                               \\\n    --tsBuildInfoFile \"./lib/custom-resource-handlers/src/${handler}.tsbuildinfo\"   \\\n    --outDir \"./lib/custom-resource-handlers/bin/${handler}\"                        \\\n    \"./lib/custom-resource-handlers/src/${handler}.ts\"                              \\\n    ./lib/custom-resource-handlers/src/_*.ts\n  cp \"./lib/custom-resource-handlers/bin/${handler}/${handler}.js\" \"./lib/custom-resource-handlers/bin/${handler}/index.js\"\ndone\n"
  },
  {
    "path": "cdk.json",
    "content": "{\n  \"context\": {\n    \"@aws-cdk/core:newStyleStackSynthesis\": \"true\"\n  }\n}\n"
  },
  {
    "path": "lib/__tests__/auto-build.test.ts",
    "content": "import { App, Stack } from 'aws-cdk-lib';\nimport { Template } from 'aws-cdk-lib/assertions';\nimport { Artifacts } from 'aws-cdk-lib/aws-codebuild';\nimport { Bucket } from 'aws-cdk-lib/aws-s3';\nimport { AutoBuild, GitHubRepo } from '../../lib';\n\nlet app: App;\nlet stack: Stack;\nbeforeEach(() => {\n  app = new App();\n  stack = new Stack(app, 'Stack');\n});\n\ntest('webhooks are enabled by default', () => {\n  new AutoBuild(stack, 'AutoBuild', {\n    repo: new GitHubRepo({\n      repository: 'some-owner/some-repo',\n      tokenSecretArn: 'arn:aws:secretsmanager:someregion:someaccount:secret:sometoken',\n    }),\n  });\n  const template = Template.fromStack(stack);\n\n  template.hasResourceProperties('AWS::CodeBuild::Project', {\n    Triggers: {\n      FilterGroups: [\n        [\n          {\n            Pattern: 'PUSH, PULL_REQUEST_CREATED, PULL_REQUEST_UPDATED',\n            Type: 'EVENT',\n          },\n        ],\n      ],\n      Webhook: true,\n    },\n  });\n});\n\ntest('webhooks for a single branch', () => {\n  new AutoBuild(stack, 'AutoBuild', {\n    repo: new GitHubRepo({\n      repository: 'some-owner/some-repo',\n      tokenSecretArn: 'arn:aws:secretsmanager:someregion:someaccount:secret:sometoken',\n    }),\n    branch: 'banana',\n  });\n  const template = Template.fromStack(stack);\n\n  template.hasResourceProperties('AWS::CodeBuild::Project', {\n    Triggers: {\n      FilterGroups: [\n        [\n          {\n            Pattern: 'PUSH',\n            Type: 'EVENT',\n          },\n          {\n            Pattern: '^refs/heads/banana$',\n            Type: 'HEAD_REF',\n          },\n        ],\n        [\n          {\n            Pattern: 'PULL_REQUEST_CREATED, PULL_REQUEST_UPDATED',\n            Type: 'EVENT',\n          },\n          {\n            Pattern: '^refs/heads/banana$',\n            Type: 'BASE_REF',\n          },\n        ],\n      ],\n      Webhook: true,\n    },\n  });\n});\n\ntest('webhooks for multiple branches', () => {\n  new AutoBuild(stack, 'AutoBuild', {\n    repo: new GitHubRepo({\n      repository: 'some-owner/some-repo',\n      tokenSecretArn: 'arn:aws:secretsmanager:someregion:someaccount:secret:sometoken',\n    }),\n    branches: ['banana', 'grapefruit'],\n  });\n  const template = Template.fromStack(stack);\n\n  template.hasResourceProperties('AWS::CodeBuild::Project', {\n    Triggers: {\n      FilterGroups: [\n        [\n          {\n            Pattern: 'PUSH',\n            Type: 'EVENT',\n          },\n          {\n            Pattern: '^refs/heads/banana$|^refs/heads/grapefruit$',\n            Type: 'HEAD_REF',\n          },\n        ],\n        [\n          {\n            Pattern: 'PULL_REQUEST_CREATED, PULL_REQUEST_UPDATED',\n            Type: 'EVENT',\n          },\n          {\n            Pattern: '^refs/heads/banana$|^refs/heads/grapefruit$',\n            Type: 'BASE_REF',\n          },\n        ],\n      ],\n      Webhook: true,\n    },\n  });\n});\n\ntest('can disable webhooks', () => {\n  new AutoBuild(stack, 'AutoBuild', {\n    repo: new GitHubRepo({\n      repository: 'some-owner/some-repo',\n      tokenSecretArn: 'arn:aws:secretsmanager:someregion:someaccount:secret:sometoken',\n    }),\n    webhook: false,\n  });\n  const template = Template.fromStack(stack);\n\n  template.hasResourceProperties('AWS::CodeBuild::Project', {\n    Triggers: {\n      Webhook: false,\n    },\n  });\n});\n\ntest('can enable artifacts', () => {\n  new AutoBuild(stack, 'AutoBuild', {\n    repo: new GitHubRepo({\n      repository: 'some-owner/some-repo',\n      tokenSecretArn: 'arn:aws:secretsmanager:someregion:someaccount:secret:sometoken',\n    }),\n    artifacts: Artifacts.s3({\n      bucket: new Bucket(stack, 'artifactBucket'),\n      name: 'artifact-name',\n    }),\n  });\n  const template = Template.fromStack(stack);\n\n  template.hasResourceProperties('AWS::CodeBuild::Project', {\n    Artifacts: {\n      Location: { Ref: 'artifactBucket27548F83' },\n      Name: 'artifact-name',\n      NamespaceType: 'BUILD_ID',\n      Packaging: 'ZIP',\n      Type: 'S3',\n    },\n  });\n});\n"
  },
  {
    "path": "lib/__tests__/build-spec.test.ts",
    "content": "import * as delivlib from '../../lib';\n\ntest('buildspec single artifact goes to \"artifacts\"', () => {\n  const bs = delivlib.BuildSpec.simple({\n    build: ['echo hello > foo/file.txt'],\n    artifactDirectory: 'foo',\n  });\n\n  const rendered = bs.render();\n\n  expect(rendered).toEqual({\n    artifacts: {\n      'base-directory': 'foo',\n      'files': [\n        '**/*',\n      ],\n    },\n    phases: {\n      build: {\n        commands: [\n          'echo hello > foo/file.txt',\n        ],\n      },\n    },\n    version: '0.2',\n  });\n});\n\ntest('buildspec multiple artifacts all go into \"secondary-artifacts\"', () => {\n  const bs = delivlib.BuildSpec.simple({\n    build: ['echo hello > foo/file.txt'],\n    artifactDirectory: 'foo',\n    additionalArtifactDirectories: {\n      artifact2: 'boo',\n    },\n  });\n\n  const rendered = bs.render({ primaryArtifactName: 'primrose' });\n\n  expect(rendered).toEqual({\n    artifacts: {\n      'secondary-artifacts': {\n        primrose: {\n          'base-directory': 'foo',\n          'files': [\n            '**/*',\n          ],\n        },\n        artifact2: {\n          'base-directory': 'boo',\n          'files': [\n            '**/*',\n          ],\n        },\n      },\n    },\n    phases: {\n      build: {\n        commands: [\n          'echo hello > foo/file.txt',\n        ],\n      },\n    },\n    version: '0.2',\n  });\n});\n\ntest('buildspec empty creates minimal structure', () => {\n  const bs = delivlib.BuildSpec.empty();\n  const rendered = bs.render();\n\n  expect(rendered).toEqual({\n    version: '0.2',\n  });\n});\n\ntest('buildspec literal accepts raw structure', () => {\n  const struct = {\n    version: '0.2' as const,\n    phases: {\n      build: {\n        commands: ['echo test'],\n      },\n    },\n  };\n\n  const bs = delivlib.BuildSpec.literal(struct);\n  const rendered = bs.render();\n\n  expect(rendered).toEqual(struct);\n});\n\ntest('buildspec simple with all phases', () => {\n  const bs = delivlib.BuildSpec.simple({\n    install: ['npm install'],\n    preBuild: ['npm run lint'],\n    build: ['npm run build'],\n    artifactDirectory: 'dist',\n  });\n\n  const rendered = bs.render();\n\n  expect(rendered).toEqual({\n    version: '0.2',\n    phases: {\n      install: {\n        commands: ['npm install'],\n      },\n      pre_build: {\n        commands: ['npm run lint'],\n      },\n      build: {\n        commands: ['npm run build'],\n      },\n    },\n    artifacts: {\n      'base-directory': 'dist',\n      'files': ['**/*'],\n    },\n  });\n});\n\ntest('buildspec simple with reports', () => {\n  const bs = delivlib.BuildSpec.simple({\n    build: ['npm test'],\n    reports: {\n      jest: {\n        'files': ['coverage/clover.xml'],\n        'file-format': 'CucumberJson',\n      },\n    },\n  });\n\n  const rendered = bs.render();\n\n  expect(rendered.reports).toEqual({\n    jest: {\n      'files': ['coverage/clover.xml'],\n      'file-format': 'CucumberJson',\n    },\n  });\n});\n\ntest('additionalArtifactNames returns correct names', () => {\n  const bs = delivlib.BuildSpec.simple({\n    build: ['echo test'],\n    artifactDirectory: 'dist',\n    additionalArtifactDirectories: {\n      docs: 'documentation',\n      assets: 'static',\n    },\n  });\n\n  expect(bs.additionalArtifactNames).toEqual(['docs', 'assets']);\n});\n\ntest('additionalArtifactNames excludes PRIMARY', () => {\n  const bs = delivlib.BuildSpec.simple({\n    build: ['echo test'],\n    artifactDirectory: 'dist',\n  });\n\n  expect(bs.additionalArtifactNames).toEqual([]);\n});\n\ntest('merge combines two buildspecs', () => {\n  const bs1 = delivlib.BuildSpec.simple({\n    install: ['npm install'],\n    build: ['npm run build'],\n  });\n\n  const bs2 = delivlib.BuildSpec.simple({\n    preBuild: ['npm run lint'],\n    build: ['npm run test'],\n  });\n\n  const merged = bs1.merge(bs2);\n  const rendered = merged.render();\n\n  expect(rendered.phases).toEqual({\n    install: {\n      commands: ['npm install'],\n    },\n    pre_build: {\n      commands: ['npm run lint'],\n    },\n    build: {\n      commands: ['npm run build', 'npm run test'],\n    },\n  });\n});\n\ntest('merge throws on duplicate artifact names', () => {\n  const bs1 = delivlib.BuildSpec.simple({\n    additionalArtifactDirectories: { docs: 'docs1' },\n  });\n\n  const bs2 = delivlib.BuildSpec.simple({\n    additionalArtifactDirectories: { docs: 'docs2' },\n  });\n\n  expect(() => bs1.merge(bs2)).toThrow('There is already an artifact with name docs');\n});\n\ntest('merge throws on duplicate report names', () => {\n  const bs1 = delivlib.BuildSpec.simple({\n    reports: { test: { files: ['test1.xml'] } },\n  });\n\n  const bs2 = delivlib.BuildSpec.simple({\n    reports: { test: { files: ['test2.xml'] } },\n  });\n\n  expect(() => bs1.merge(bs2)).toThrow('Reports must have unique names');\n});\n\ntest('render throws when PRIMARY artifact name not supplied', () => {\n  const bs = delivlib.BuildSpec.simple({\n    artifactDirectory: 'dist',\n    additionalArtifactDirectories: { docs: 'documentation' },\n  });\n\n  expect(() => bs.render()).toThrow('Replacement name for PRIMARY artifact not supplied');\n});\n\ntest('merge handles environment variables', () => {\n  const bs1 = delivlib.BuildSpec.literal({\n    version: '0.2',\n    env: {\n      variables: { NODE_ENV: 'production' },\n    },\n  });\n\n  const bs2 = delivlib.BuildSpec.literal({\n    version: '0.2',\n    env: {\n      variables: { DEBUG: 'true' },\n    },\n  });\n\n  const merged = bs1.merge(bs2);\n  const rendered = merged.render();\n\n  expect(rendered.env?.variables).toEqual({\n    NODE_ENV: 'production',\n    DEBUG: 'true',\n  });\n});\n\ntest('merge handles cache paths', () => {\n  const bs1 = delivlib.BuildSpec.literal({\n    version: '0.2',\n    cache: { paths: ['node_modules/**/*'] },\n  });\n\n  const bs2 = delivlib.BuildSpec.literal({\n    version: '0.2',\n    cache: { paths: ['.npm/**/*'] },\n  });\n\n  const merged = bs1.merge(bs2);\n  const rendered = merged.render();\n\n  expect(rendered.cache?.paths).toEqual(['node_modules/**/*', '.npm/**/*']);\n});\n\ntest('merge handles install phase runtime-versions', () => {\n  const bs1 = delivlib.BuildSpec.literal({\n    version: '0.2',\n    phases: {\n      install: {\n        'commands': ['echo install'],\n        'runtime-versions': { nodejs: '18' },\n      },\n    },\n  });\n\n  const bs2 = delivlib.BuildSpec.literal({\n    version: '0.2',\n    phases: {\n      install: {\n        'commands': ['npm install'],\n        'runtime-versions': { python: '3.9' },\n      },\n    },\n  });\n\n  const merged = bs1.merge(bs2);\n  const rendered = merged.render();\n\n  expect(rendered.phases?.install).toEqual({\n    'commands': ['echo install', 'npm install'],\n    'runtime-versions': { nodejs: '18', python: '3.9' },\n  });\n});\n"
  },
  {
    "path": "lib/__tests__/bump.test.ts",
    "content": "// tslint:disable: max-line-length\nimport * as cdk from 'aws-cdk-lib';\nimport { Template } from 'aws-cdk-lib/assertions';\nimport { AutoBump, WritableGitHubRepo } from '../../lib';\n\nconst Stack = cdk.Stack;\n\nconst MOCK_REPO = new WritableGitHubRepo({\n  sshKeySecret: { secretArn: 'ssh-key-secret-arn' },\n  commitUsername: 'user',\n  commitEmail: 'email@email',\n  repository: 'owner/repo',\n  tokenSecretArn: 'token-secret-arn',\n});\n\ntest('autoBump', () => {\n  // GIVEN\n  const stack = new Stack(new cdk.App(), 'TestStack');\n\n  // WHEN\n  new AutoBump(stack, 'MyAutoBump', {\n    repo: MOCK_REPO,\n  });\n  const template = Template.fromStack(stack);\n\n  // THEN\n\n  // build project\n  template.hasResourceProperties('AWS::CodeBuild::Project', {\n    Triggers: {\n      Webhook: false,\n    },\n    Source: {\n      Type: 'GITHUB',\n      GitCloneDepth: 0,\n      Location: 'https://github.com/owner/repo.git',\n      ReportBuildStatus: false,\n      BuildSpec: JSON.stringify({\n        version: '0.2',\n        phases: {\n          pre_build: {\n            commands: [\n              'git config --global user.email \"email@email\"',\n              'git config --global user.name \"user\"',\n            ],\n          },\n          build: {\n            commands: [\n              'export SKIP=false',\n              '$SKIP || { aws secretsmanager get-secret-value --secret-id \"ssh-key-secret-arn\" --output=text --query=SecretString > ~/.ssh/id_rsa ; }',\n              '$SKIP || { mkdir -p ~/.ssh ; }',\n              '$SKIP || { chmod 0600 ~/.ssh/id_rsa ~/.ssh/config ; }',\n              '$SKIP || { ssh-keyscan -t rsa github.com >> ~/.ssh/known_hosts ; }',\n              '$SKIP || { ls .git && { echo \".git directory exists\";  } || { echo \".git directory doesnot exist - cloning...\" && git init . && git remote add origin git@github.com:owner/repo.git && git fetch && git reset --hard origin/master && git branch -M master && git clean -fqdx; } ; }',\n              \"$SKIP || { git describe --exact-match master && { echo 'Skip condition is met, skipping...' && export SKIP=true; } || { echo 'Skip condition is not met, continuing...' && export SKIP=false; } ; }\",\n              '$SKIP || { export GITHUB_TOKEN=$(aws secretsmanager get-secret-value --secret-id \"token-secret-arn\" --output=text --query=SecretString) ; }',\n              '$SKIP || { git rev-parse --verify origin/bump/$VERSION && { git checkout bump/$VERSION && git merge master && /bin/sh ./bump.sh && export VERSION=$(git describe) && echo Finished running user commands;  } || { git checkout master && git checkout -b temp && /bin/sh ./bump.sh && export VERSION=$(git describe) && echo Finished running user commands && git branch -M bump/$VERSION; } ; }',\n              '$SKIP || { git merge-base --is-ancestor bump/$VERSION origin/master && { echo \"Skipping: bump/$VERSION is an ancestor of origin/master\"; export SKIP=true; } || { echo \"Pushing: bump/$VERSION is ahead of origin/master\"; export SKIP=false; } ; }',\n              '$SKIP || { git remote add origin_ssh git@github.com:owner/repo.git ; }',\n              '$SKIP || { git push --atomic --follow-tags origin_ssh bump/$VERSION:bump/$VERSION ; }',\n              \"$SKIP || { curl --fail -X POST -o pr.json --header \\\"Authorization: token $GITHUB_TOKEN\\\" --header \\\"Content-Type: application/json\\\" -d \\\"{\\\\\\\"title\\\\\\\":\\\\\\\"chore(release): $VERSION\\\\\\\",\\\\\\\"base\\\\\\\":\\\\\\\"master\\\\\\\",\\\\\\\"head\\\\\\\":\\\\\\\"bump/$VERSION\\\\\\\"}\\\" https://api.github.com/repos/owner/repo/pulls && export PR_NUMBER=$(node -p 'require(\\\"./pr.json\\\").number') ; }\",\n              '$SKIP || { curl --fail -X PATCH --header \"Authorization: token $GITHUB_TOKEN\" --header \"Content-Type: application/json\" -d \"{\\\\\"body\\\\\":\\\\\"See [CHANGELOG](https://github.com/owner/repo/blob/bump/$VERSION/CHANGELOG.md)\\\\\"}\" https://api.github.com/repos/owner/repo/pulls/$PR_NUMBER ; }',\n            ],\n          },\n        },\n      }, undefined, 2),\n    },\n  });\n\n});\n\ntest('autoBump with schedule', () => {\n\n  const stack = new Stack(new cdk.App(), 'TestStack');\n\n  // WHEN\n  new AutoBump(stack, 'MyAutoBump', {\n    repo: MOCK_REPO,\n    scheduleExpression: 'cron(0 12 * * ? *)',\n  });\n  const template = Template.fromStack(stack);\n\n  // default schedule\n  template.hasResourceProperties('AWS::Events::Rule', {\n    ScheduleExpression: 'cron(0 12 * * ? *)',\n  });\n\n});\n\ntest('autoBump with custom cloneDepth', () => {\n  // GIVEN\n  const stack = new Stack(new cdk.App(), 'TestStack');\n\n  // WHEN\n  new AutoBump(stack, 'MyAutoBump', {\n    repo: MOCK_REPO,\n    cloneDepth: 10,\n  });\n  const template = Template.fromStack(stack);\n\n  // THEN\n\n  // build project\n  template.hasResourceProperties('AWS::CodeBuild::Project', {\n    Triggers: {\n      Webhook: false,\n    },\n    Source: {\n      Type: 'GITHUB',\n      GitCloneDepth: 10,\n      Location: 'https://github.com/owner/repo.git',\n      ReportBuildStatus: false,\n      BuildSpec: JSON.stringify({\n        version: '0.2',\n        phases: {\n          pre_build: {\n            commands: [\n              'git config --global user.email \"email@email\"',\n              'git config --global user.name \"user\"',\n            ],\n          },\n          build: {\n            commands: [\n              'export SKIP=false',\n              '$SKIP || { aws secretsmanager get-secret-value --secret-id \"ssh-key-secret-arn\" --output=text --query=SecretString > ~/.ssh/id_rsa ; }',\n              '$SKIP || { mkdir -p ~/.ssh ; }',\n              '$SKIP || { chmod 0600 ~/.ssh/id_rsa ~/.ssh/config ; }',\n              '$SKIP || { ssh-keyscan -t rsa github.com >> ~/.ssh/known_hosts ; }',\n              '$SKIP || { ls .git && { echo \".git directory exists\";  } || { echo \".git directory doesnot exist - cloning...\" && git init . && git remote add origin git@github.com:owner/repo.git && git fetch && git reset --hard origin/master && git branch -M master && git clean -fqdx; } ; }',\n              \"$SKIP || { git describe --exact-match master && { echo 'Skip condition is met, skipping...' && export SKIP=true; } || { echo 'Skip condition is not met, continuing...' && export SKIP=false; } ; }\",\n              '$SKIP || { export GITHUB_TOKEN=$(aws secretsmanager get-secret-value --secret-id \"token-secret-arn\" --output=text --query=SecretString) ; }',\n              '$SKIP || { git rev-parse --verify origin/bump/$VERSION && { git checkout bump/$VERSION && git merge master && /bin/sh ./bump.sh && export VERSION=$(git describe) && echo Finished running user commands;  } || { git checkout master && git checkout -b temp && /bin/sh ./bump.sh && export VERSION=$(git describe) && echo Finished running user commands && git branch -M bump/$VERSION; } ; }',\n              '$SKIP || { git merge-base --is-ancestor bump/$VERSION origin/master && { echo \"Skipping: bump/$VERSION is an ancestor of origin/master\"; export SKIP=true; } || { echo \"Pushing: bump/$VERSION is ahead of origin/master\"; export SKIP=false; } ; }',\n              '$SKIP || { git remote add origin_ssh git@github.com:owner/repo.git ; }',\n              '$SKIP || { git push --atomic --follow-tags origin_ssh bump/$VERSION:bump/$VERSION ; }',\n              \"$SKIP || { curl --fail -X POST -o pr.json --header \\\"Authorization: token $GITHUB_TOKEN\\\" --header \\\"Content-Type: application/json\\\" -d \\\"{\\\\\\\"title\\\\\\\":\\\\\\\"chore(release): $VERSION\\\\\\\",\\\\\\\"base\\\\\\\":\\\\\\\"master\\\\\\\",\\\\\\\"head\\\\\\\":\\\\\\\"bump/$VERSION\\\\\\\"}\\\" https://api.github.com/repos/owner/repo/pulls && export PR_NUMBER=$(node -p 'require(\\\"./pr.json\\\").number') ; }\",\n              '$SKIP || { curl --fail -X PATCH --header \"Authorization: token $GITHUB_TOKEN\" --header \"Content-Type: application/json\" -d \"{\\\\\"body\\\\\":\\\\\"See [CHANGELOG](https://github.com/owner/repo/blob/bump/$VERSION/CHANGELOG.md)\\\\\"}\" https://api.github.com/repos/owner/repo/pulls/$PR_NUMBER ; }',\n            ],\n          },\n        },\n      }, undefined, 2),\n    },\n  });\n});\n\ntest('autoBump with schedule disabled', () => {\n  // GIVEN\n  const stack = new Stack(new cdk.App(), 'TestStack');\n\n  // WHEN\n  new AutoBump(stack, 'MyAutoBump', {\n    repo: MOCK_REPO,\n    scheduleExpression: 'disable',\n  });\n  const template = Template.fromStack(stack);\n\n  // THEN\n  template.hasResourceProperties('AWS::Events::Rule', {\n    ScheduleExpression: 'disable',\n  });\n});\n\ntest('autoBump with push only', () => {\n  // GIVEN\n  const stack = new Stack(new cdk.App(), 'TestStack');\n  const repo = new WritableGitHubRepo({\n    sshKeySecret: { secretArn: 'ssh-key-secret-arn' },\n    commitUsername: 'user',\n    commitEmail: 'email@email',\n    repository: 'owner/repo',\n    tokenSecretArn: 'token-secret-arn',\n  });\n\n  // WHEN\n  new AutoBump(stack, 'MyAutoBump', {\n    repo,\n    pushOnly: true,\n  });\n  const template = Template.fromStack(stack);\n\n  // THEN\n\n  // build project\n  template.hasResourceProperties('AWS::CodeBuild::Project', {\n    Triggers: {\n      Webhook: false,\n    },\n    Source: {\n      Type: 'GITHUB',\n      GitCloneDepth: 0,\n      Location: 'https://github.com/owner/repo.git',\n      ReportBuildStatus: false,\n      BuildSpec: JSON.stringify({\n        version: '0.2',\n        phases: {\n          pre_build: {\n            commands: [\n              'git config --global user.email \"email@email\"',\n              'git config --global user.name \"user\"',\n            ],\n          },\n          build: {\n            commands: [\n              'export SKIP=false',\n              '$SKIP || { aws secretsmanager get-secret-value --secret-id \"ssh-key-secret-arn\" --output=text --query=SecretString > ~/.ssh/id_rsa ; }',\n              '$SKIP || { mkdir -p ~/.ssh ; }',\n              '$SKIP || { chmod 0600 ~/.ssh/id_rsa ~/.ssh/config ; }',\n              '$SKIP || { ssh-keyscan -t rsa github.com >> ~/.ssh/known_hosts ; }',\n              '$SKIP || { ls .git && { echo \".git directory exists\";  } || { echo \".git directory doesnot exist - cloning...\" && git init . && git remote add origin git@github.com:owner/repo.git && git fetch && git reset --hard origin/master && git branch -M master && git clean -fqdx; } ; }',\n              \"$SKIP || { git describe --exact-match master && { echo 'Skip condition is met, skipping...' && export SKIP=true; } || { echo 'Skip condition is not met, continuing...' && export SKIP=false; } ; }\",\n              '$SKIP || { git rev-parse --verify origin/bump/$VERSION && { git checkout bump/$VERSION && git merge master && /bin/sh ./bump.sh && export VERSION=$(git describe) && echo Finished running user commands;  } || { git checkout master && git checkout -b temp && /bin/sh ./bump.sh && export VERSION=$(git describe) && echo Finished running user commands && git branch -M bump/$VERSION; } ; }',\n              '$SKIP || { git merge-base --is-ancestor bump/$VERSION origin/master && { echo \"Skipping: bump/$VERSION is an ancestor of origin/master\"; export SKIP=true; } || { echo \"Pushing: bump/$VERSION is ahead of origin/master\"; export SKIP=false; } ; }',\n              '$SKIP || { git remote add origin_ssh git@github.com:owner/repo.git ; }',\n              '$SKIP || { git push --atomic --follow-tags origin_ssh bump/$VERSION:bump/$VERSION ; }',\n            ],\n          },\n        },\n      }, undefined, 2),\n    },\n  });\n});\n\ntest('autoBump with pull request with custom options', () => {\n  // GIVEN\n  const stack = new Stack(new cdk.App(), 'TestStack');\n\n  // WHEN\n  new AutoBump(stack, 'MyAutoBump', {\n    repo: MOCK_REPO,\n\n    title: 'custom title',\n    body: 'custom body',\n    base: {\n      name: 'release',\n    },\n\n  });\n  const template = Template.fromStack(stack);\n\n  // THEN\n\n  // build project\n  template.hasResourceProperties('AWS::CodeBuild::Project', {\n    Triggers: {\n      Webhook: false,\n    },\n    Source: {\n      Type: 'GITHUB',\n      GitCloneDepth: 0,\n      Location: 'https://github.com/owner/repo.git',\n      ReportBuildStatus: false,\n      BuildSpec: JSON.stringify({\n        version: '0.2',\n        phases: {\n          pre_build: {\n            commands: [\n              'git config --global user.email \"email@email\"',\n              'git config --global user.name \"user\"',\n            ],\n          },\n          build: {\n            commands: [\n              'export SKIP=false',\n              '$SKIP || { aws secretsmanager get-secret-value --secret-id \"ssh-key-secret-arn\" --output=text --query=SecretString > ~/.ssh/id_rsa ; }',\n              '$SKIP || { mkdir -p ~/.ssh ; }',\n              '$SKIP || { chmod 0600 ~/.ssh/id_rsa ~/.ssh/config ; }',\n              '$SKIP || { ssh-keyscan -t rsa github.com >> ~/.ssh/known_hosts ; }',\n              '$SKIP || { ls .git && { echo \".git directory exists\";  } || { echo \".git directory doesnot exist - cloning...\" && git init . && git remote add origin git@github.com:owner/repo.git && git fetch && git reset --hard origin/release && git branch -M release && git clean -fqdx; } ; }',\n              \"$SKIP || { git describe --exact-match release && { echo 'Skip condition is met, skipping...' && export SKIP=true; } || { echo 'Skip condition is not met, continuing...' && export SKIP=false; } ; }\",\n              '$SKIP || { export GITHUB_TOKEN=$(aws secretsmanager get-secret-value --secret-id \"token-secret-arn\" --output=text --query=SecretString) ; }',\n              '$SKIP || { git rev-parse --verify origin/bump/$VERSION && { git checkout bump/$VERSION && git merge release && /bin/sh ./bump.sh && export VERSION=$(git describe) && echo Finished running user commands;  } || { git checkout release && git checkout -b temp && /bin/sh ./bump.sh && export VERSION=$(git describe) && echo Finished running user commands && git branch -M bump/$VERSION; } ; }',\n              '$SKIP || { git merge-base --is-ancestor bump/$VERSION origin/release && { echo \"Skipping: bump/$VERSION is an ancestor of origin/release\"; export SKIP=true; } || { echo \"Pushing: bump/$VERSION is ahead of origin/release\"; export SKIP=false; } ; }',\n              '$SKIP || { git remote add origin_ssh git@github.com:owner/repo.git ; }',\n              '$SKIP || { git push --atomic --follow-tags origin_ssh bump/$VERSION:bump/$VERSION ; }',\n              \"$SKIP || { curl --fail -X POST -o pr.json --header \\\"Authorization: token $GITHUB_TOKEN\\\" --header \\\"Content-Type: application/json\\\" -d \\\"{\\\\\\\"title\\\\\\\":\\\\\\\"custom title\\\\\\\",\\\\\\\"base\\\\\\\":\\\\\\\"release\\\\\\\",\\\\\\\"head\\\\\\\":\\\\\\\"bump/$VERSION\\\\\\\"}\\\" https://api.github.com/repos/owner/repo/pulls && export PR_NUMBER=$(node -p 'require(\\\"./pr.json\\\").number') ; }\",\n              '$SKIP || { curl --fail -X PATCH --header \"Authorization: token $GITHUB_TOKEN\" --header \"Content-Type: application/json\" -d \"{\\\\\"body\\\\\":\\\\\"custom body\\\\\"}\" https://api.github.com/repos/owner/repo/pulls/$PR_NUMBER ; }',\n            ],\n          },\n        },\n      }, undefined, 2),\n    },\n  });\n});\n\ntest('autoBump with pull request fails when head=base', () => {\n  // GIVEN\n  const stack = new Stack(new cdk.App(), 'TestStack');\n\n  // WHEN\n  expect(() => new AutoBump(stack, 'MyAutoBump', {\n    repo: MOCK_REPO,\n    base: {\n      name: 'master',\n    },\n    head: {\n      name: 'master',\n    },\n  })).toThrow();\n});\n"
  },
  {
    "path": "lib/__tests__/canary.test.ts",
    "content": "import * as path from 'path';\nimport { App, Stack, aws_events as events } from 'aws-cdk-lib';\nimport { Template } from 'aws-cdk-lib/assertions';\nimport { Canary } from '../../lib';\n\n\nconst testDir = path.join(__dirname, 'delivlib-tests', 'linux');\n\ntest('correctly creates canary', () => {\n  // GIVEN\n  const stack = new Stack(new App(), 'TestStack');\n  // WHEN\n  new Canary(stack, 'Canary', {\n    schedule: events.Schedule.expression('rate(1 minute)'),\n    scriptDirectory: testDir,\n    entrypoint: 'test.sh',\n  });\n  const template = Template.fromStack(stack);\n  // THEN\n  template.hasResourceProperties('AWS::CloudWatch::Alarm', {\n    ComparisonOperator: 'GreaterThanOrEqualToThreshold',\n    EvaluationPeriods: 1,\n    Threshold: 1,\n    Dimensions: [{\n      Name: 'ProjectName',\n      Value: {\n        Ref: 'CanaryShellableA135E79C',\n      },\n    }],\n    MetricName: 'FailedBuilds',\n    Namespace: 'AWS/CodeBuild',\n    Statistic: 'Sum',\n    TreatMissingData: 'ignore',\n    Period: 300,\n  });\n\n  template.hasResourceProperties('AWS::Events::Rule', {\n    ScheduleExpression: 'rate(1 minute)',\n    State: 'ENABLED',\n    Targets: [{\n      Arn: {\n        'Fn::GetAtt': [\n          'CanaryShellableA135E79C',\n          'Arn',\n        ],\n      },\n      Id: 'Target0',\n      RoleArn: {\n        'Fn::GetAtt': [\n          'CanaryShellableEventsRoleC4030D0D',\n          'Arn',\n        ],\n      },\n    }],\n  });\n\n  template.hasResourceProperties('AWS::CodeBuild::Project', {\n    Artifacts: {\n      Type: 'NO_ARTIFACTS',\n    },\n    Environment: {\n      ComputeType: 'BUILD_GENERAL1_MEDIUM',\n      Image: 'aws/codebuild/standard:7.0',\n      PrivilegedMode: false,\n      Type: 'LINUX_CONTAINER',\n      EnvironmentVariables: [\n        {\n          Name: 'SCRIPT_S3_BUCKET',\n          Type: 'PLAINTEXT',\n          Value: {\n            'Fn::Sub': 'cdk-hnb659fds-assets-${AWS::AccountId}-${AWS::Region}',\n          },\n        },\n        {\n          Name: 'SCRIPT_S3_KEY',\n          Type: 'PLAINTEXT',\n          Value: '3d34b07ba871989d030649c646b3096ba7c78ca531897bcdb0670774d2f9d3e4.zip',\n        },\n        {\n          Name: 'IS_CANARY',\n          Type: 'PLAINTEXT',\n          Value: 'true',\n        },\n      ],\n    },\n    ServiceRole: {\n      'Fn::GetAtt': [\n        'CanaryShellableRole063BC07D',\n        'Arn',\n      ],\n    },\n    Source: {\n      // tslint:disable-next-line:max-line-length\n      BuildSpec: '{\\n  \"version\": \"0.2\",\\n  \"phases\": {\\n    \"install\": {\\n      \"commands\": [\\n        \"command -v yarn > /dev/null || npm install --global yarn\"\\n      ]\\n    },\\n    \"pre_build\": {\\n      \"commands\": [\\n        \"echo \\\\\"Downloading scripts from s3://${SCRIPT_S3_BUCKET}/${SCRIPT_S3_KEY}\\\\\"\",\\n        \"aws s3 cp s3://${SCRIPT_S3_BUCKET}/${SCRIPT_S3_KEY} /tmp\",\\n        \"mkdir -p /tmp/scriptdir\",\\n        \"unzip /tmp/$(basename $SCRIPT_S3_KEY) -d /tmp/scriptdir\"\\n      ]\\n    },\\n    \"build\": {\\n      \"commands\": [\\n        \"export SCRIPT_DIR=/tmp/scriptdir\",\\n        \"echo \\\\\"Running test.sh\\\\\"\",\\n        \"/bin/bash /tmp/scriptdir/test.sh\"\\n      ]\\n    }\\n  }\\n}',\n    },\n  });\n});\n"
  },
  {
    "path": "lib/__tests__/change-control-lambda/disable-transition.test.ts",
    "content": "// eslint-disable-next-line @typescript-eslint/no-require-imports\n\n\nconst pipelineName = 'MyPipeline';\nconst stageName = 'MyStage';\n\nconst mockCodePipelineClient = {\n  disableStageTransition: jest.fn().mockName('CodePipeline.disableStageTransition'),\n  enableStageTransition: jest.fn().mockName('CodePipeline.enableStageTransition'),\n\n};\n\njest.mock('@aws-sdk/client-codepipeline', () => {\n  return {\n    CodePipeline: jest.fn().mockImplementation(() => {\n      return mockCodePipelineClient;\n    }),\n  };\n});\n\nbeforeEach(() => {\n  mockCodePipelineClient.disableStageTransition.mockImplementation(() => Promise.resolve({}));\n  mockCodePipelineClient.enableStageTransition.mockImplementation(() => Promise.resolve({}));\n});\n\ndescribe('disableTransition', () => {\n  // eslint-disable-next-line @typescript-eslint/no-require-imports\n  const disableTransition = require('../../change-control-lambda/disable-transition').disableTransition;\n\n  test('with a simple reason', async () => {\n    // GIVEN\n    const reason = 'Just Because';\n    // WHEN\n    await expect(disableTransition(pipelineName, stageName, reason))\n      .resolves.toBeUndefined();\n    // THEN\n    expect(mockCodePipelineClient.disableStageTransition)\n      .toHaveBeenCalledWith({ pipelineName, stageName, reason, transitionType: 'Inbound' });\n  });\n\n  test('with a reason that needs cleaning up', async () => {\n    // GIVEN\n    const reason = 'It\\'s so cool!';\n    // WHEN\n    await expect(disableTransition(pipelineName, stageName, reason))\n      .resolves.toBeUndefined();\n    // THEN\n    const cleanReason = reason.replace(/[^a-zA-Z0-9!@ \\(\\)\\.\\*\\?\\-]/g, '-');\n    expect(mockCodePipelineClient.disableStageTransition)\n      .toHaveBeenCalledWith({ pipelineName, stageName, reason: cleanReason, transitionType: 'Inbound' });\n  });\n\n  test('with a reason that is too long', async () => {\n    // GIVEN\n    const reason = 'Reason '.repeat(300);\n    // WHEN\n    await expect(disableTransition(pipelineName, stageName, reason))\n      .resolves.toBeUndefined();\n    // THEN\n    const cleanReason = reason.slice(0, 300);\n    expect(mockCodePipelineClient.disableStageTransition)\n      .toHaveBeenCalledWith({ pipelineName, stageName, reason: cleanReason, transitionType: 'Inbound' });\n  });\n});\n\ntest('enableTransition', async () => {\n  // GIVEN\n  // eslint-disable-next-line @typescript-eslint/no-require-imports\n  const enableTransition = require('../../change-control-lambda/disable-transition').enableTransition;\n  // WHEN\n  expect(() => enableTransition(pipelineName, stageName))\n    .not.toThrow();\n  // THEN\n  expect(mockCodePipelineClient.enableStageTransition)\n    .toHaveBeenCalledWith({ pipelineName, stageName, transitionType: 'Inbound' });\n});\n"
  },
  {
    "path": "lib/__tests__/change-control-lambda/handler.test.ts",
    "content": "import type * as timeWindow from '../../change-control-lambda/time-window';\n\n// _____                                _   _\n// |  __ \\                              | | (_)\n// | |__) | __ ___ _ __   __ _ _ __ __ _| |_ _  ___  _ __\n// |  ___/ '__/ _ \\ '_ \\ / _` | '__/ _` | __| |/ _ \\| '_ \\\n// | |   | | |  __/ |_) | (_| | | | (_| | |_| | (_) | | | |\n// |_|   |_|  \\___| .__/ \\__,_|_|  \\__,_|\\__|_|\\___/|_| |_|\n//                | |\n//                |_|\n\nconst mockS3Client = {\n  getObject: jest.fn().mockName('S3.GetObject'),\n};\n\njest.mock('@aws-sdk/client-s3', () => {\n  return {\n    S3: jest.fn().mockImplementation(() => {\n      return mockS3Client;\n    }),\n  };\n});\n\njest.mock('../../change-control-lambda/disable-transition');\njest.mock('../../change-control-lambda/time-window');\n\n// eslint-disable-next-line @typescript-eslint/no-require-imports\nconst transitions = require('../../change-control-lambda/disable-transition');\n// eslint-disable-next-line @typescript-eslint/no-require-imports\nconst timeWindowModule = require('../../change-control-lambda/time-window');\n\nconst mockEnableTransition =\n  jest.fn((_pipeline: string, _stage: string) => Promise.resolve(undefined))\n    .mockName('enableTransition');\n\nconst mockDisableTransition =\n  jest.fn((_pipeline: string, _stage: string, _reason: string) => Promise.resolve(undefined))\n    .mockName('disableTransition');\n\nconst mockShouldBlockPipeline = jest.fn((_icsData: string | Buffer, _now?: Date): timeWindow.CalendarEvent | undefined => undefined)\n  .mockName('shouldBlockPipeline');\n\nconst initialEnv = process.env;\nbeforeEach(() => {\n  jest.restoreAllMocks();\n  process.env = { ...testEnv };\n  transitions.enableTransition = mockEnableTransition;\n  transitions.disableTransition = mockDisableTransition;\n  timeWindowModule.shouldBlockPipeline = mockShouldBlockPipeline;\n});\n\nconst mockConsoleLog = jest.fn().mockName('console.log');\nconsole.log = mockConsoleLog;\n\nconst bucketName = 'BucketName';\nconst objectKey = 'ObjectKey';\nconst stageName = 'StageName';\nconst pipelineName = 'PipelineName';\nconst testEnv = {\n  CHANGE_CONTROL_BUCKET_NAME: bucketName,\n  CHANGE_CONTROL_OBJECT_KEY: objectKey,\n  STAGE_NAME: stageName,\n  PIPELINE_NAME: pipelineName,\n};\n\n// _______        _\n// |__   __|      | |\n//    | | ___  ___| |_ ___\n//    | |/ _ \\/ __| __/ __|\n//    | |  __/\\__ \\ |_\\__ \\\n//    |_|\\___||___/\\__|___/\n\ndescribe('handler', () => {\n  // eslint-disable-next-line @typescript-eslint/no-require-imports\n  const handler = require('../../change-control-lambda/index').handler;\n\n  describe('failing conditions', () => {\n    for (const variable of Object.keys(testEnv)) {\n      test(`when ${variable} is not set`, () => {\n        // GIVEN\n        delete process.env[variable];\n\n        // THEN\n        return expect(handler())\n          .rejects.toThrow(`Environment variable \"${variable}\" is required`);\n      });\n    }\n\n    test('when S3 access fails', async () => {\n      // GIVEN\n      const e = new Error('S3 Not Working - the apocalypse has begun');\n      mockS3Client.getObject.mockImplementationOnce(() => Promise.reject(e));\n\n      // THEN\n      return expect(handler()).rejects.toThrow(e);\n    });\n  });\n\n  for (const cause of ['NoSuchKey', 'NoSuchBucket']) {\n    test(`when no calendar is found (due to ${cause})`, async () => {\n      // GIVEN\n      mockS3Client.getObject.mockImplementationOnce(() => Promise.reject({ code: cause, message: cause }));\n      mockShouldBlockPipeline.mockReturnValueOnce({\n        summary: 'Blocked by default',\n        // Other properties - values irrelevant\n        start: new Date(),\n        end: new Date(),\n        dtstamp: new Date(),\n        type: 'VEVENT',\n        datetype: 'date-time',\n        params: [],\n      });\n\n      // WHEN\n      await expect(handler()).resolves.toBeUndefined();\n\n      // THEN\n      expect(mockS3Client.getObject)\n        .toHaveBeenCalledWith({ Bucket: bucketName, Key: objectKey });\n\n      expect(mockShouldBlockPipeline)\n        .toHaveBeenCalledWith(expect.stringContaining('No change control calendar was found'),\n          expect.any(Date));\n\n      return expect(mockDisableTransition)\n        .toHaveBeenCalledWith(pipelineName, stageName, 'Blocked by default');\n    });\n  }\n\n  test('when the window is open', async () => {\n    // GIVEN\n    const iCalBody = 'Some iCal document (obviously, this is a fake one!)';\n    mockS3Client.getObject.mockImplementationOnce(() => Promise.resolve({ Body: iCalBody }));\n    mockShouldBlockPipeline.mockReturnValueOnce(undefined);\n\n    // WHEN\n    await expect(handler()).resolves.toBeUndefined();\n\n    // THEN\n    expect(mockS3Client.getObject)\n      .toHaveBeenCalledWith({ Bucket: bucketName, Key: objectKey });\n\n    expect(mockShouldBlockPipeline)\n      .toHaveBeenCalledWith(iCalBody, expect.any(Date));\n\n    return expect(mockEnableTransition)\n      .toHaveBeenCalledWith(pipelineName, stageName);\n  });\n});\n\nafterAll(() => {\n  process.env = initialEnv;\n});\n"
  },
  {
    "path": "lib/__tests__/change-control-lambda/time-window.test.ts",
    "content": "import { shouldBlockPipeline } from '../../change-control-lambda/time-window';\n// tslint:disable:no-console\n\nconst ics = `\nBEGIN:VCALENDAR\nVERSION:2.0\nPRODID:-//Events Calendar//iCal4j 1.0//EN\n\nBEGIN:VEVENT\nUID:2017-04-12T07:00:00.000Z to 2017-04-19T06:59:59.000Z\nDTSTAMP:20190114T161956Z\nDTSTART:20170412T070000Z\nDTEND:20170419T065959Z\nSUMMARY:Block1\nEND:VEVENT\n\nBEGIN:VEVENT\nUID:2017-11-23T08:00:00.000Z to 2017-11-27T08:00:00.000Z\nDTSTAMP:20190114T161956Z\nDTSTART:20171123T080000Z\nDTEND:20171127T080000Z\nSUMMARY:Block2\nEND:VEVENT\n\nBEGIN:VEVENT\nUID:2019-02-03T08:00:00.000Z to 2019-02-04T07:59:00.000Z\nDTSTAMP:20190114T161956Z\nDTSTART:20190203T080000Z\nDTEND:20190204T075900Z\nSUMMARY:Block3\nEND:VEVENT\n\nEND:VCALENDAR\n`;\n\n/**\n * An event with an 'RRULE' property will be recurring. Events following the\n * initial event are calculated based on the RRULE specified and the\n * initial event.\n *\n * https://icalendar.org/iCalendar-RFC-5545/3-3-10-recurrence-rule.html\n *\n * Example:\n *\n * BEGIN:VEVENT\n * RRULE:FREQ=WEEKLY;INTERVAL=1  <--- Weekly recurrence, every 1 week. If we set\n *                                    the INTERVAL=2, it would be every 2 weeks.\n * DTSTART:20200501T220000Z      <--- Start datetime of the initial event in the series.\n * DTEND:20200504T170000Z        <--- End datetime of the initial event in the series.\n * SUMMARY:RecurringBlock1\n * DTSTAMP:20200501T163641Z\n * SEQUENCE:0\n * END:VEVENT\n *\n */\nconst recurringIcs = `\nBEGIN:VCALENDAR\nVERSION:2.0\nPRODID:-//Events Calendar//iCal4j 1.0//EN\n\nBEGIN:VEVENT\nRRULE:FREQ=WEEKLY;INTERVAL=1\nDTSTART:20200501T220000Z\nDTEND:20200504T170000Z\nDTSTAMP:20200501T163641Z\nSUMMARY:RecurringBlock1\nSEQUENCE:0\nEND:VEVENT\n\nBEGIN:VEVENT\nRRULE:FREQ=WEEKLY;INTERVAL=1\nDTSTART:20200505T220000Z\nDTEND:20200506T040000Z\nDTSTAMP:20200501T163641Z\nSUMMARY:RecurringBlock2\nSEQUENCE:0\nEND:VEVENT\n\nEND:VCALENDAR\n`;\n\n// The tests below can only run if TZ=UTC due to an issue in `node-ical`.\nconst test_ = new Date().getTimezoneOffset() !== 0\n  ? test.skip\n  : test;\n\ntest_('non blocked time before all events', () => {\n  const x = shouldBlockPipeline(ics, new Date('2019-02-03T07:00:00.000Z'), 300);\n  expect(x).toBeUndefined();\n});\n\ntest_('non blocked time in between events', () => {\n  const x = shouldBlockPipeline(ics, new Date('2017-07-12T07:00:00.000Z'));\n  expect(x).toBeUndefined();\n});\n\ntest_('left edge', () => {\n  const x = shouldBlockPipeline(ics, new Date('2017-04-12T07:00:00.000Z'));\n  expect(x && x.summary).toBe('Block1');\n});\n\ntest_('right edge', () => {\n  const x = shouldBlockPipeline(ics, new Date('2017-11-27T08:00:00.000Z'));\n  expect(x && x.summary).toBe('Block2');\n});\n\ntest_('a blocked window starts AND finishes within margin', () => {\n  // Using 72 hours padding to widely overlap Block3\n  const x = shouldBlockPipeline(ics, new Date('2019-02-03T07:00:00.000Z'), 72 * 3_600);\n  expect(x && x.summary).toBe('Block3');\n});\n\n// Test that the initial event in a recurring series blocks the pipeline when\n// the left edge aligns with the current time.\ntest_('current time aligns with the left edge of the first event in a series blocks pipeline', () => {\n  const x = shouldBlockPipeline(recurringIcs, new Date('2020-05-01T22:00:00.000Z'));\n  expect(x && x.summary).toBe('RecurringBlock1');\n});\n\n// Test that a future event in a recurring series blocks the pipeline when\n// the left edge aligns with the current time.\ntest_('current time aligns with the left edge of future event in a series blocks pipeline', () => {\n  const x = shouldBlockPipeline(recurringIcs, new Date('2020-05-22T22:00:00.000Z'));\n  expect(x && x.summary).toBe('RecurringBlock1');\n});\n\n// Test that the initial event in a recurring series blocks the pipeline when\n// the right edge aligns with the current time.\ntest_('current time aligns with the right edge of the first occurrence blocks pipeline', () => {\n  const x = shouldBlockPipeline(recurringIcs, new Date('2020-05-06T04:00:00.000Z'));\n  expect(x && x.summary).toBe('RecurringBlock2');\n});\n\n\n// Test that a future event in a recurring series blocks the pipeline when\n// the right edge aligns with the current time.\ntest_('current time aligns with the right edge of future event in series blocks pipeline', () => {\n  const x = shouldBlockPipeline(recurringIcs, new Date('2020-05-27T04:00:00.000Z'));\n  expect(x && x.summary).toBe('RecurringBlock2');\n});\n\n// Test that we do not block between events in a recurring series.\ntest_('current time is between future events in recurring series does not block pipeline', () => {\n  const x = shouldBlockPipeline(recurringIcs, new Date('2020-05-14T00:00:00.000Z'));\n  expect(x).toBeUndefined();\n});\n"
  },
  {
    "path": "lib/__tests__/chime-notifier.test.ts",
    "content": "import https from 'https';\nimport {\n  App, Lazy, Stack,\n  aws_codepipeline as aws_codepipeline,\n  aws_codepipeline_actions as aws_codepipeline_actions,\n} from 'aws-cdk-lib';\nimport { Template } from 'aws-cdk-lib/assertions';\nimport { Construct } from 'constructs';\nimport { ChimeNotifier } from '../../lib';\nimport { codePipeline, handler } from '../../lib/chime-notifier/handler/notifier-handler';\n\nconst mockHttpsWrite = jest.fn();\n\nhttps.request = jest.fn().mockImplementation((_url, _options, cb) => {\n  return {\n    on: jest.fn(),\n    write: mockHttpsWrite,\n    end: () => cb({\n      statusCode: 200,\n      headers: {},\n      setEncoding: () => undefined,\n      on: (event: string, listener: () => void) => {\n        if (event === 'end') { listener(); }\n      },\n    }),\n  };\n});\n\ntest('call codepipeline and then post to webhooks', async () => {\n  codePipeline.getPipelineExecution = jest.fn().mockReturnValue(\n    Promise.resolve({\n      pipelineExecution: {\n        pipelineExecutionId: 'xyz',\n        pipelineVersion: 1,\n        pipelineName: 'xyz',\n        status: 'Succeeded',\n        artifactRevisions: [\n          {\n            revisionUrl: 'revision.com/url',\n            revisionId: '1234',\n            name: 'Source',\n            revisionSummary: 'A thing happened',\n          },\n        ],\n      },\n    }),\n  );\n\n  codePipeline.listActionExecutions = jest.fn().mockReturnValue(\n    Promise.resolve({\n      actionExecutionDetails: [\n        {\n          stageName: 'Source',\n          actionName: 'Source',\n          status: 'Succeeded',\n          output: {\n            executionResult: {\n              externalExecutionUrl: 'https://SUCCEED',\n            },\n          },\n        },\n        {\n          stageName: 'Build',\n          actionName: 'Build',\n          status: 'Failed',\n          output: {\n            executionResult: {\n              externalExecutionUrl: 'https://FAIL',\n            },\n          },\n        },\n      ],\n    }),\n  );\n\n  await handler({\n    webhookUrls: ['https://my.url/'],\n    message: \"Pipeline '$PIPELINE' failed on '$REVISION' in '$ACTION' (see $URL)\",\n    detail: {\n      'pipeline': 'myPipeline',\n      'version': '1',\n      'state': 'FAILED',\n      'execution-id': 'abcdef',\n    },\n  });\n\n  expect(https.request).toBeCalledWith('https://my.url/', expect.objectContaining({\n    method: 'POST',\n  }), expect.any(Function));\n  expect(mockHttpsWrite).toBeCalledWith(expect.stringContaining('\"Content\"')); // Contains JSON\n\n  expect(mockHttpsWrite).toBeCalledWith(expect.stringContaining('myPipeline')); // Contains the pipeline name\n  expect(mockHttpsWrite).toBeCalledWith(expect.stringContaining('A thing happened')); // Contains the revision summary\n  expect(mockHttpsWrite).toBeCalledWith(expect.stringContaining('Build')); // Contains the failing action name\n  expect(mockHttpsWrite).toBeCalledWith(expect.stringContaining('https://FAIL')); // Contains the failing URL\n});\n\ntest('can add to stack', () => {\n  const stack = new Stack(new App(), 'TestStack');\n  const pipeline = new aws_codepipeline.Pipeline(stack, 'Pipe');\n  pipeline.addStage({ stageName: 'Source', actions: [new FakeSourceAction()] });\n  pipeline.addStage({ stageName: 'Build', actions: [new aws_codepipeline_actions.ManualApprovalAction({ actionName: 'Dummy' })] });\n\n  new ChimeNotifier(stack, 'Chime', {\n    pipeline,\n    webhookUrls: ['https://go/'],\n  });\n  const template = Template.fromStack(stack);\n\n  // EXPECT: no error\n  template.resourceCountIs('AWS::Lambda::Function', 1);\n});\n\ntest('webhook url can be a token', () => {\n  const stack = new Stack(new App(), 'TestStack');\n  const pipeline = new aws_codepipeline.Pipeline(stack, 'Pipe');\n  pipeline.addStage({ stageName: 'Source', actions: [new FakeSourceAction()] });\n  pipeline.addStage({ stageName: 'Build', actions: [new aws_codepipeline_actions.ManualApprovalAction({ actionName: 'Dummy' })] });\n\n  new ChimeNotifier(stack, 'Chime', {\n    pipeline,\n    webhookUrls: [Lazy.string({ produce: () => 'https://go/' })],\n  });\n  const template = Template.fromStack(stack);\n\n  template.resourceCountIs('AWS::Lambda::Function', 1);\n});\n\nexport class FakeSourceAction extends aws_codepipeline_actions.Action {\n  constructor() {\n    super({\n      actionName: 'Fake',\n      category: aws_codepipeline.ActionCategory.SOURCE,\n      provider: 'FAKE',\n      artifactBounds: {\n        minInputs: 0,\n        maxInputs: 0,\n        minOutputs: 1,\n        maxOutputs: 1,\n      },\n      outputs: [new aws_codepipeline.Artifact('bla')],\n    });\n  }\n\n  // tslint:disable-next-line: max-line-length\n  protected bound(_scope: Construct, _stage: aws_codepipeline.IStage, _options: aws_codepipeline.ActionBindOptions): aws_codepipeline.ActionConfig {\n    return {\n      configuration: { },\n    };\n  }\n}\n"
  },
  {
    "path": "lib/__tests__/code-signing-cert.test.ts",
    "content": "import {\n  App, Stack,\n  aws_kms as kms,\n} from 'aws-cdk-lib';\nimport { Template } from 'aws-cdk-lib/assertions';\nimport { Construct } from 'constructs';\nimport * as delivlib from '../../lib';\n\n\nlet app: App;\nlet stack: Stack;\nlet key: kms.Key;\nbeforeEach(() => {\n  app = new App();\n  const randomExtraContainer = new Construct(app, 'SomethingElse');\n  stack = new Stack(randomExtraContainer, 'Stack', {\n    stackName: 'ActualStackName',\n  });\n  key = new kms.Key(stack, 'Key');\n});\n\nconst distinguishedName: delivlib.DistinguishedName = {\n  commonName: 'CN',\n  country: 'Country',\n  emailAddress: 'Email',\n  locality: 'Locality',\n  organizationName: 'OrgName',\n  organizationalUnitName: 'OrgUnitName',\n  stateOrProvince: 'Province, Please',\n};\n\ntest('secret name consists of stack name and relative construct path', () => {\n  // WHEN\n  const yetAnotherParent = new Construct(stack, 'Inbetween');\n  new delivlib.CodeSigningCertificate(yetAnotherParent, 'Cert', {\n    distinguishedName,\n    pemCertificate: 'asdf',\n    secretEncryptionKey: key,\n  });\n  const template = Template.fromStack(stack);\n\n  // THEN - specifically: does not include construct names above the containing stack\n  // uses the actual stack name (and not the stack NODE name)\n  template.hasResourceProperties('Custom::RsaPrivateKeySecret', {\n    SecretName: 'ActualStackName/Inbetween/Cert/RSAPrivateKeyV2',\n  });\n});\n\n\ntest('secret name can be overridden', () => {\n  // WHEN\n  const yetAnotherParent = new Construct(stack, 'Inbetween');\n  new delivlib.CodeSigningCertificate(yetAnotherParent, 'Cert', {\n    distinguishedName,\n    pemCertificate: 'asdf',\n    secretEncryptionKey: key,\n    baseName: 'Sekrit',\n  });\n  const template = Template.fromStack(stack);\n\n  template.hasResourceProperties('Custom::RsaPrivateKeySecret', {\n    SecretName: 'Sekrit/RSAPrivateKeyV2',\n  });\n});\n"
  },
  {
    "path": "lib/__tests__/custom-resource-handlers/_cloud-formation.test.ts",
    "content": "/* eslint-disable @typescript-eslint/no-require-imports */\nimport { EventEmitter } from 'events';\nimport https = require('https');\nimport cfn = require('../../custom-resource-handlers/src/_cloud-formation');\n\nconst event: cfn.Event = {\n  RequestType: cfn.RequestType.CREATE,\n  RequestId: '00FDF1A4-69FE-4E90-A6D5-D0E97F561414',\n  ResourceType: 'Custom::Resource::Type',\n  StackId: 'Stack-1234567890',\n  LogicalResourceId: 'Resource123456',\n  ResponseURL: 'https://host.domain.tld:123/path/to/resource?query=string',\n  ResourceProperties: {},\n  PhysicalResourceId: undefined,\n};\nconst status = cfn.Status.SUCCESS;\nconst physicalId = 'Physical ID!';\nconst data = {};\nconst reason = 'I have reasons. Don\\'t ask.';\n\nconst httpsRequest = https.request = jest.fn().mockName('https.request');\n\ntest('sends the correct response to CloudFormation', () => {\n  httpsRequest.mockImplementationOnce((opts, cb) => {\n    expect(opts.headers['content-type']).toBe('');\n    expect(opts.hostname).toBe('host.domain.tld');\n    expect(opts.method).toBe('PUT');\n    expect(opts.port).toBe('123');\n    expect(opts.path).toBe('/path/to/resource?query=string');\n\n    const emitter = new EventEmitter();\n    let payload: string;\n\n    return {\n      on(evt: string, callback: (...args: any[]) => void) {\n        emitter.on(evt, callback);\n        return this;\n      },\n      once(evt: string, callback: (...args: any[]) => void) {\n        emitter.once(evt, callback);\n        return this;\n      },\n      write(str: string) {\n        payload = str;\n        return true;\n      },\n      end: jest.fn().mockImplementationOnce(() => {\n        expect(JSON.parse(payload || '{}')).toEqual({\n          Data: data,\n          LogicalResourceId: event.LogicalResourceId,\n          PhysicalResourceId: physicalId,\n          Reason: reason,\n          RequestId: event.RequestId,\n          StackId: event.StackId,\n          Status: status,\n        });\n        cb({ statusCode: 200 });\n      }),\n    } as any;\n  },\n  );\n\n  return expect(cfn.sendResponse(event, status, physicalId, data, reason))\n    .resolves.toBe(undefined);\n});\n\ntest('fails if the PUT request returns non-200', () => {\n  httpsRequest.mockImplementationOnce((opts, cb) => {\n    expect(opts.headers['content-type']).toBe('');\n    expect(opts.hostname).toBe('host.domain.tld');\n    expect(opts.method).toBe('PUT');\n    expect(opts.port).toBe('123');\n    expect(opts.path).toBe('/path/to/resource?query=string');\n\n    const emitter = new EventEmitter();\n    let payload: string;\n\n    return {\n      on(evt: string, callback: (...args: any[]) => void) {\n        emitter.on(evt, callback);\n        return this;\n      },\n      once(evt: string, callback: (...args: any[]) => void) {\n        emitter.once(evt, callback);\n        return this;\n      },\n      write(str: string) {\n        payload = str;\n        return true;\n      },\n      end: jest.fn().mockImplementationOnce(() => {\n        expect(JSON.parse(payload || '{}')).toEqual({\n          Data: data,\n          LogicalResourceId: event.LogicalResourceId,\n          PhysicalResourceId: physicalId,\n          Reason: reason,\n          RequestId: event.RequestId,\n          StackId: event.StackId,\n          Status: status,\n        });\n        cb({ statusCode: 500, statusMessage: 'Internal Error' });\n      }),\n    } as any;\n  });\n\n  return expect(cfn.sendResponse(event, status, physicalId, data, reason))\n    .rejects.toThrow('Unexpected error sending resopnse to CloudFormation: HTTP 500 (Internal Error)');\n});\n"
  },
  {
    "path": "lib/__tests__/custom-resource-handlers/_exec.test.ts",
    "content": "/* eslint-disable @typescript-eslint/no-require-imports */\nimport _exec = require('../../custom-resource-handlers/src/_exec');\n\ntest('forwards stdout (single-line)', () =>\n  expect(_exec('node', '-e', 'process.stdout.write(\"OKAY\")')).resolves.toBe('OKAY'),\n);\n\ntest('forwards stdout (multi-line)', () =>\n  expect(_exec('node', '-e', 'process.stdout.write(\"OKAY\\\\nGREAT\")')).resolves.toBe('OKAY\\nGREAT'),\n);\n\ntest('fails if the command exits with non-zero status', () =>\n  expect(_exec('node', '-e', 'process.exit(10)')).rejects.toThrow('Exited with status 10'),\n);\n\ntest('fails if the command is killed by a signal', () =>\n  expect(_exec('node', '-e', 'process.kill(process.pid, \"SIGKILL\")')).rejects.toThrow('Killed by SIGKILL'),\n);\n"
  },
  {
    "path": "lib/__tests__/custom-resource-handlers/_rmrf.test.ts",
    "content": "/* eslint-disable @typescript-eslint/no-require-imports */\nimport fs = require('fs');\nimport os = require('os');\nimport path = require('path');\n\nimport _rmrf = require('../../custom-resource-handlers/src/_rmrf');\n\ntest('removes a full directory', () => {\n  const dir = fs.mkdtempSync(os.tmpdir() + '/');\n  fs.writeFileSync(path.join(dir, 'exhibit-A'), 'Exhibit A');\n  return expect(_rmrf(dir).then(() => fs.existsSync(dir))).resolves.toBeFalsy();\n});\n"
  },
  {
    "path": "lib/__tests__/custom-resource-handlers/certificate-signing-request.test.ts",
    "content": "/* eslint-disable @typescript-eslint/no-require-imports */\nimport fs = require('fs');\nimport path = require('path');\nimport { PutObjectCommandInput } from '@aws-sdk/client-s3';\nimport cfn = require('../../custom-resource-handlers/src/_cloud-formation');\nimport lambda = require('../../custom-resource-handlers/src/_lambda');\n\nconst context: lambda.Context = { awsRequestId: '90E99AAE-B120-409A-9156-0C5925FDD996' } as lambda.Context;\nconst outputBucketName = 'csr-output-bucket-name';\nconst eventBase = {\n  LogicalResourceId: 'ResourceID12345689',\n  ResponseURL: 'https://response/url',\n  RequestId: '5EF100FB-0075-4716-970B-FBCA05BFE118',\n  ResourceProperties: {\n    ServiceToken: 'Service-Token (Would be the function ARN',\n    ResourceVersion: 'The hash of the function code',\n\n    DnCommonName: 'Test',\n    DnCountry: 'FR',\n    DnStateOrProvince: 'TestLand',\n    DnLocality: 'Test City',\n    DnOrganizationName: 'Test, Inc.',\n    DnOrganizationalUnitName: 'QA Department',\n    DnEmailAddress: 'test@acme.test',\n    KeyUsage: 'critical,use-the-key',\n    ExtendedKeyUsage: 'critical,abuse-the-key',\n    PrivateKeySecretId: 'arn:::private-key-secret',\n\n    OutputBucket: outputBucketName,\n  },\n  ResourceType: 'Custom::Resource::Type',\n  StackId: 'StackID-1324597',\n};\nconst mockTmpDir = '/tmp/directory/is/phony';\nconst mockPrivateKey = 'Pretend private key';\nconst mockCsr = 'Pretend CSR';\nconst mockCertificate = 'Pretend Certificate';\n\nconst csrDocument = `[ req ]\ndefault_md           = sha256\ndistinguished_name   = dn\nprompt               = no\nreq_extensions       = extensions\nstring_mask          = utf8only\nutf8                 = yes\n\n[ dn ]\nCN                   = ${eventBase.ResourceProperties.DnCommonName}\nC                    = ${eventBase.ResourceProperties.DnCountry}\nST                   = ${eventBase.ResourceProperties.DnStateOrProvince}\nL                    = ${eventBase.ResourceProperties.DnLocality}\nO                    = ${eventBase.ResourceProperties.DnOrganizationName}\nOU                   = ${eventBase.ResourceProperties.DnOrganizationalUnitName}\nemailAddress         = ${eventBase.ResourceProperties.DnEmailAddress}\n\n[ extensions ]\nextendedKeyUsage     = ${eventBase.ResourceProperties.ExtendedKeyUsage}\nkeyUsage             = ${eventBase.ResourceProperties.KeyUsage}\nsubjectKeyIdentifier = hash`;\n\njest.spyOn(fs, 'mkdtemp').mockName('fs.mkdtemp')\n  .mockImplementation(async (_, cb) => cb(undefined as any, mockTmpDir));\nfs.readFile = jest.fn().mockName('fs.readFile')\n  .mockImplementation(async (file, opts, cb) => {\n    expect(opts.encoding).toBe('utf8');\n    switch (file) {\n      case require('path').join(mockTmpDir, 'csr.pem'):\n        return cb(undefined, mockCsr);\n      case require('path').join(mockTmpDir, 'cert.pem'):\n        return cb(undefined, mockCertificate);\n      default:\n        cb(new Error('Unexpected call!'));\n    }\n  }) as any;\nconst mockWriteFile = fs.writeFile = jest.fn().mockName('fs.writeFile')\n  .mockImplementation((_pth, _data, _opts, cb) => cb()) as any;\n\nconst mockExec = jest.fn().mockName('_exec').mockRejectedValue(new Error('Unexpected call!'));\njest.mock('../../custom-resource-handlers/src/_exec', () => mockExec);\njest.mock('../../custom-resource-handlers/src/_rmrf', () => mockRmrf);\nconst mockRmrf = jest.fn().mockName('_rmrf')\n  .mockResolvedValue(undefined);\njest.mock('../../custom-resource-handlers/src/_rmrf', () => mockRmrf);\njest.spyOn(cfn, 'sendResponse').mockName('cfn.sendResponse').mockResolvedValue(Promise.resolve());\n\nconst mockSecretsManagerClient = {\n  getSecretValue: jest.fn().mockName('SecretsManager.getSecretValue'),\n};\n\nconst mockS3Client = {\n  putObject: jest.fn().mockName('S3.putObject'),\n};\n\njest.mock('@aws-sdk/client-secrets-manager', () => {\n  return {\n    SecretsManager: jest.fn().mockImplementation(() => {\n      return mockSecretsManagerClient;\n    }),\n  };\n});\n\njest.mock('@aws-sdk/client-s3', () => {\n  return {\n    S3: jest.fn().mockImplementation(() => {\n      return mockS3Client;\n    }),\n  };\n});\n\nbeforeEach(() => {\n  mockSecretsManagerClient.getSecretValue.mockImplementation(() => Promise.resolve({ SecretString: mockPrivateKey }));\n  mockS3Client.putObject.mockImplementation((request: PutObjectCommandInput) => {\n    expect(request.Bucket).toBe(outputBucketName);\n    expect(request.ContentType).toBe('application/x-pem-file');\n    switch (request.Key) {\n      case 'certificate-signing-request.pem':\n        expect(request.Body).toBe(mockCsr);\n        break;\n      case 'self-signed-certificate.pem':\n        expect(request.Body).toBe(mockCertificate);\n        break;\n      default:\n        return Promise.reject(`Unexpected object key requested: ${request.Key}`);\n    }\n\n    return Promise.resolve();\n  });\n});\n\ntest('Create', async () => {\n  const event: cfn.Event = {\n    RequestType: cfn.RequestType.CREATE,\n    PhysicalResourceId: undefined,\n    ...eventBase,\n  };\n\n  mockExec.mockImplementation(async (cmd: string, ...args: string[]) => {\n    expect(cmd).toBe('openssl');\n    switch (args[0]) {\n      case 'req':\n        expect(args).toEqual(['req', '-config', require('path').join(mockTmpDir, 'csr.config'),\n          '-key', require('path').join(mockTmpDir, 'private_key.pem'),\n          '-out', require('path').join(mockTmpDir, 'csr.pem'),\n          '-new']);\n        break;\n      case 'x509':\n        expect(args).toEqual(['x509', '-in', require('path').join(mockTmpDir, 'csr.pem'),\n          '-out', require('path').join(mockTmpDir, 'cert.pem'),\n          '-req',\n          '-signkey', require('path').join(mockTmpDir, 'private_key.pem'),\n          '-days', '365']);\n        break;\n      default:\n        throw new Error('Unexpected call!');\n    }\n    return '';\n  });\n\n  const { handler } = require('../../custom-resource-handlers/src/certificate-signing-request');\n  await expect(handler(event, context)).resolves.toBe(undefined);\n\n  expect(mockWriteFile)\n    .toHaveBeenCalledWith(path.join(mockTmpDir, 'csr.config'),\n      csrDocument,\n      expect.anything(),\n      expect.any(Function));\n  expect(mockWriteFile)\n    .toHaveBeenCalledWith(path.join(mockTmpDir, 'private_key.pem'),\n      mockPrivateKey,\n      expect.anything(),\n      expect.any(Function));\n  expect(mockS3Client.putObject).toHaveBeenCalledTimes(2);\n  expect(mockRmrf).toHaveBeenCalledWith(mockTmpDir);\n  return expect(cfn.sendResponse)\n    .toHaveBeenCalledWith(event,\n      cfn.Status.SUCCESS,\n      event.LogicalResourceId,\n      { Ref: event.LogicalResourceId, CSR: `s3://${outputBucketName}/certificate-signing-request.pem`, SelfSignedCertificate: `s3://${outputBucketName}/self-signed-certificate.pem` });\n});\n\ntest('Update', async () => {\n  const event: cfn.Event = {\n    RequestType: cfn.RequestType.UPDATE,\n    PhysicalResourceId: eventBase.LogicalResourceId,\n    OldResourceProperties: eventBase.ResourceProperties,\n    ...eventBase,\n  };\n\n  mockExec.mockImplementation(async (cmd: string, ...args: string[]) => {\n    expect(cmd).toBe('openssl');\n    switch (args[0]) {\n      case 'req':\n        expect(args).toEqual(['req', '-config', require('path').join(mockTmpDir, 'csr.config'),\n          '-key', require('path').join(mockTmpDir, 'private_key.pem'),\n          '-out', require('path').join(mockTmpDir, 'csr.pem'),\n          '-new']);\n        break;\n      case 'x509':\n        expect(args).toEqual(['x509', '-in', require('path').join(mockTmpDir, 'csr.pem'),\n          '-out', require('path').join(mockTmpDir, 'cert.pem'),\n          '-req',\n          '-signkey', require('path').join(mockTmpDir, 'private_key.pem'),\n          '-days', '365']);\n        break;\n      default:\n        throw new Error('Unexpected call!');\n    }\n    return '';\n  });\n\n  const { handler } = require('../../custom-resource-handlers/src/certificate-signing-request');\n  await expect(handler(event, context)).resolves.toBe(undefined);\n\n  expect(mockWriteFile)\n    .toHaveBeenCalledWith(path.join(mockTmpDir, 'csr.config'),\n      csrDocument,\n      expect.anything(),\n      expect.any(Function));\n  expect(mockWriteFile)\n    .toHaveBeenCalledWith(path.join(mockTmpDir, 'private_key.pem'),\n      mockPrivateKey,\n      expect.anything(),\n      expect.any(Function));\n  expect(mockS3Client.putObject).toHaveBeenCalledTimes(2);\n  expect(mockRmrf).toHaveBeenCalledWith(mockTmpDir);\n  return expect(cfn.sendResponse)\n    .toHaveBeenCalledWith(event,\n      cfn.Status.SUCCESS,\n      event.LogicalResourceId,\n      { Ref: event.LogicalResourceId, CSR: `s3://${outputBucketName}/certificate-signing-request.pem`, SelfSignedCertificate: `s3://${outputBucketName}/self-signed-certificate.pem` });\n});\n\ntest('Delete', async () => {\n  const event: cfn.Event = {\n    RequestType: cfn.RequestType.DELETE,\n    PhysicalResourceId: eventBase.LogicalResourceId,\n    ...eventBase,\n  };\n\n  const { handler } = require('../../custom-resource-handlers/src/certificate-signing-request');\n  await expect(handler(event, context)).resolves.toBe(undefined);\n\n  return expect(cfn.sendResponse)\n    .toHaveBeenCalledWith(event,\n      cfn.Status.SUCCESS,\n      event.LogicalResourceId,\n      { Ref: event.LogicalResourceId });\n});\n"
  },
  {
    "path": "lib/__tests__/custom-resource-handlers/pgp-secret.test.ts",
    "content": "/* eslint-disable @typescript-eslint/no-require-imports */\nimport crypto = require('crypto');\nimport fs = require('fs');\nimport path = require('path');\nimport cfn = require('../../custom-resource-handlers/src/_cloud-formation');\nimport lambda = require('../../custom-resource-handlers/src/_lambda');\n\nconst context: lambda.Context = { awsRequestId: 'E3802D69-27F8-44F0-9E4C-3329A8736A4C' } as any;\nconst mockTmpDir = '/tmp/directory/is/phony';\nconst mockPrivateKey = '---BEGIN RSA FAKE PRIVATE KEY---';\nconst mockPublicKey = '---BEGIN RSA FAKE PUBLIC KEY---';\nconst mockEventBase = {\n  LogicalResourceId: 'ResourceID12345689',\n  ResponseURL: 'https://response/url',\n  RequestId: '5EF100FB-0075-4716-970B-FBCA05BFE118',\n  ResourceProperties: {\n    ServiceToken: 'Service-Token (Would be the function ARN',\n    ResourceVersion: 'The hash of the function code',\n\n    KeySizeBits: 4_096,\n    Identity: 'Test Identity',\n    Email: 'test@amazon.com',\n    Expiry: '1d',\n    SecretName: 'Secret/Name/Shhhhh',\n    KeyArn: 'alias/KmsKey',\n    Description: 'Description',\n  },\n  ResourceType: 'Custom::Resource::Type',\n  StackId: 'StackID-1324597',\n};\n\nconst secretArn = 'arn::::::secret';\n\nconst passphrase = crypto.randomBytes(32);\n\nconst keyConfig = `Key-Type: RSA\nKey-Length: ${mockEventBase.ResourceProperties.KeySizeBits}\nName-Real: ${mockEventBase.ResourceProperties.Identity}\nName-Email: ${mockEventBase.ResourceProperties.Email}\nExpire-Date: ${mockEventBase.ResourceProperties.Expiry}\nPassphrase: ${passphrase.toString('base64')}\n%commit\n%echo done`;\n\njest.spyOn(crypto, 'randomBytes').mockImplementation(() => passphrase);\njest.spyOn(fs, 'mkdtemp')\n  .mockImplementation(async (_, cb) => cb(undefined as any, mockTmpDir));\nconst writeFile = fs.writeFile = jest.fn().mockName('fs.writeFile')\n  .mockImplementation((_pth, _data, _opts, cb) => cb()) as any;\njest.mock('../../custom-resource-handlers/src/_exec', () => async (cmd: string, ...args: string[]) => {\n  expect(cmd).toBe('gpg');\n  const process = require('process');\n  expect(process.env.GNUPGHOME).toBe(mockTmpDir);\n  expect(args).toContain('--batch');\n  if (args.indexOf('--gen-key') !== -1) {\n    expect(args[args.indexOf('--gen-key') + 1]).toBe(require('path').join(mockTmpDir, 'key.config'));\n    return '';\n  }\n  expect(args).toContain('--yes');\n  if (args.indexOf('--import') !== -1) {\n    return '';\n  }\n  expect(args).toContain('--armor');\n  if (args.indexOf('--export') !== -1) {\n    return mockPublicKey;\n  } else if (args.indexOf('--export-secret-keys') !== -1) {\n    return mockPrivateKey;\n  }\n  throw new Error('Invalid call to _exec');\n});\n\nconst mockSecretsManagerClient = {\n  createSecret: jest.fn().mockName('SecretsManager.createSecret'),\n  updateSecret: jest.fn().mockName('SecretsManager.updateSecret'),\n  getSecretValue: jest.fn().mockName('SecretsManager.getSecretValue'),\n  deleteSecret: jest.fn().mockName('SecretsManager.deleteSecret'),\n};\n\njest.mock('@aws-sdk/client-secrets-manager', () => {\n  return {\n    SecretsManager: jest.fn().mockImplementation(() => {\n      return mockSecretsManagerClient;\n    }),\n  };\n});\n\nbeforeEach(() => {\n  mockSecretsManagerClient.createSecret.mockImplementation(() => Promise.resolve({ ARN: secretArn, VersionId: 'Secret-VersionId' }));\n  mockSecretsManagerClient.updateSecret.mockImplementation(() => Promise.resolve({ ARN: secretArn }));\n  mockSecretsManagerClient.getSecretValue.mockImplementation(() => Promise.resolve({\n    SecretString: JSON.stringify({\n      PrivateKey: mockPrivateKey,\n      Passphrase: passphrase.toString('base64'),\n    }),\n  }));\n  mockSecretsManagerClient.deleteSecret.mockImplementation(() => Promise.resolve());\n});\n\nconst mockSendResponse = jest.spyOn(cfn, 'sendResponse').mockName('cfn.sendResponse').mockResolvedValue(Promise.resolve());\nconst mockRmrf = jest.fn().mockName('_rmrf').mockResolvedValue(undefined);\njest.mock('../../custom-resource-handlers/src/_rmrf', () => mockRmrf);\n\ntest('Create', async () => {\n  const { handler } = require('../../custom-resource-handlers/src/pgp-secret');\n  const event: cfn.Event = {\n    RequestType: cfn.RequestType.CREATE,\n    PhysicalResourceId: undefined,\n    ...mockEventBase,\n  };\n\n  await expect(handler(event, context)).resolves.toBe(undefined);\n\n  expect(writeFile)\n    .toHaveBeenCalledWith(path.join(mockTmpDir, 'key.config'),\n      keyConfig,\n      expect.anything(),\n      expect.any(Function));\n  expect(mockRmrf)\n    .toHaveBeenCalledWith(mockTmpDir);\n  expect(mockSecretsManagerClient.createSecret)\n    .toHaveBeenCalledWith({\n      ClientRequestToken: context.awsRequestId,\n      Description: event.ResourceProperties.Description,\n      KmsKeyId: event.ResourceProperties.KeyArn,\n      Name: event.ResourceProperties.SecretName,\n      SecretString: JSON.stringify({\n        PrivateKey: mockPrivateKey,\n        Passphrase: passphrase.toString('base64'),\n      }),\n    });\n  return expect(mockSendResponse)\n    .toHaveBeenCalledWith(event,\n      cfn.Status.SUCCESS,\n      secretArn,\n      {\n        Ref: secretArn,\n        SecretArn: secretArn,\n        PublicKey: mockPublicKey,\n      });\n});\n\ntest('Update', async () => {\n  const { handler } = require('../../custom-resource-handlers/src/pgp-secret');\n  const event: cfn.Event = {\n    RequestType: cfn.RequestType.UPDATE,\n    PhysicalResourceId: secretArn,\n    OldResourceProperties: {\n      ...mockEventBase.ResourceProperties,\n      Description: 'Old Description',\n      KeyArn: 'alias/OldKey',\n    },\n    ...mockEventBase,\n  };\n\n  await expect(handler(event, context)).resolves.toBe(undefined);\n  expect(mockSecretsManagerClient.updateSecret)\n    .toHaveBeenCalledWith({\n      ClientRequestToken: context.awsRequestId,\n      SecretId: secretArn,\n      Description: event.ResourceProperties.Description,\n      KmsKeyId: event.ResourceProperties.KeyArn,\n    });\n  expect(mockSecretsManagerClient.getSecretValue)\n    .toHaveBeenCalledWith({ SecretId: secretArn });\n  expect(mockRmrf)\n    .toHaveBeenCalledWith(mockTmpDir);\n  return expect(mockSendResponse)\n    .toHaveBeenCalledWith(event,\n      cfn.Status.SUCCESS,\n      secretArn,\n      {\n        Ref: secretArn,\n        SecretArn: secretArn,\n        PublicKey: mockPublicKey,\n      });\n});\n\ntest('Delete', async () => {\n  const { handler } = require('../../custom-resource-handlers/src/pgp-secret');\n  const event: cfn.Event = {\n    RequestType: cfn.RequestType.DELETE,\n    PhysicalResourceId: secretArn,\n    ...mockEventBase,\n  };\n\n  await expect(handler(event, context)).resolves.toBe(undefined);\n\n  expect(mockSecretsManagerClient.deleteSecret).toHaveBeenCalledWith({\n    SecretId: secretArn,\n    ForceDeleteWithoutRecovery: false,\n  });\n\n  return expect(mockSendResponse)\n    .toHaveBeenCalledWith(event, cfn.Status.SUCCESS, event.PhysicalResourceId, {\n      Ref: event.PhysicalResourceId,\n    });\n});\n"
  },
  {
    "path": "lib/__tests__/custom-resource-handlers/private-key.test.ts",
    "content": "/* eslint-disable @typescript-eslint/no-require-imports */\nimport fs = require('fs');\nimport cfn = require('../../custom-resource-handlers/src/_cloud-formation');\nimport lambda = require('../../custom-resource-handlers/src/_lambda');\n\nconst context: lambda.Context = { awsRequestId: '90E99AAE-B120-409A-9156-0C5925FDD996' } as lambda.Context;\nconst mockKeySize = 4_096;\nconst eventBase = {\n  LogicalResourceId: 'ResourceID12345689',\n  ResponseURL: 'https://response/url',\n  RequestId: '5EF100FB-0075-4716-970B-FBCA05BFE118',\n  ResourceProperties: {\n    ServiceToken: 'Service-Token (Would be the function ARN',\n    ResourceVersion: 'The hash of the function code',\n\n    Description: 'Description of my secret',\n    KeySize: 4_096,\n    KmsKeyId: 'alias/KmsKey',\n    SecretName: 'Sekret/Name/Shhhh',\n  },\n  ResourceType: 'Custom::Resource::Type',\n  StackId: 'StackID-1324597',\n};\nconst mockTmpDir = '/tmp/directory/is/phony';\nconst mockPrivateKey = 'Phony PEM-Encoded Private Key';\nconst secretArn = 'arn::::::secret';\n\ncfn.sendResponse = jest.fn().mockName('cfn.sendResponse').mockResolvedValue(undefined);\njest.mock('../../custom-resource-handlers/src/_exec', () => async (cmd: string, ...args: string[]) => {\n  expect(cmd).toBe('openssl');\n  expect(args).toEqual(['genrsa', '-out', require('path').join(mockTmpDir, 'private_key.pem'), mockKeySize]);\n  return '';\n});\njest.spyOn(fs, 'mkdtemp').mockName('fs.mkdtemp')\n  .mockImplementation(async (_, cb) => cb(undefined as any, mockTmpDir));\nfs.readFile = jest.fn().mockName('fs.readFile')\n  .mockImplementation(async (file, opts, cb) => {\n    expect(file).toBe(require('path').join(mockTmpDir, 'private_key.pem'));\n    expect(opts.encoding).toBe('utf8');\n    return cb(undefined, mockPrivateKey);\n  }) as any;\nconst mockRmrf = jest.fn().mockName('_rmrf').mockResolvedValue(undefined);\njest.mock('../../custom-resource-handlers/src/_rmrf', () => mockRmrf);\n\nbeforeEach(() => jest.clearAllMocks());\n\nconst mockSecretsManagerClient = {\n  createSecret: jest.fn().mockName('SecretsManager.createSecret'),\n  updateSecret: jest.fn().mockName('SecretsManager.updateSecret'),\n  deleteSecret: jest.fn().mockName('SecretsManager.deleteSecret'),\n};\n\njest.mock('@aws-sdk/client-secrets-manager', () => {\n  return {\n    SecretsManager: jest.fn().mockImplementation(() => {\n      return mockSecretsManagerClient;\n    }),\n  };\n});\n\nbeforeEach(() => {\n  mockSecretsManagerClient.createSecret.mockImplementation(() => Promise.resolve({ ARN: secretArn, VersionId: 'Secret-VersionID' }));\n  mockSecretsManagerClient.updateSecret.mockImplementation(() => Promise.resolve({ ARN: secretArn }));\n  mockSecretsManagerClient.deleteSecret.mockImplementation(() => Promise.resolve({}));\n});\n\ntest('Create', async () => {\n  const event: cfn.Event = {\n    RequestType: cfn.RequestType.CREATE,\n    PhysicalResourceId: undefined,\n    ...eventBase,\n  };\n\n  const { handler } = require('../../custom-resource-handlers/src/private-key');\n  await expect(handler(event, context)).resolves.toBe(undefined);\n\n  expect(mockSecretsManagerClient.createSecret)\n    .toHaveBeenCalledWith({\n      ClientRequestToken: context.awsRequestId,\n      Description: event.ResourceProperties.Description,\n      KmsKeyId: event.ResourceProperties.KmsKeyId,\n      Name: event.ResourceProperties.SecretName,\n      SecretString: mockPrivateKey,\n    });\n  expect(mockSecretsManagerClient.updateSecret).not.toHaveBeenCalled();\n  expect(mockSecretsManagerClient.deleteSecret).not.toHaveBeenCalled();\n  expect(mockRmrf).toHaveBeenCalledWith(mockTmpDir);\n  return expect(cfn.sendResponse)\n    .toHaveBeenCalledWith(event,\n      cfn.Status.SUCCESS,\n      secretArn,\n      { Ref: secretArn, SecretArn: secretArn });\n});\n\ntest('Update (changing KeySize)', async () => {\n  const event: cfn.Event = {\n    RequestType: cfn.RequestType.UPDATE,\n    PhysicalResourceId: secretArn,\n    OldResourceProperties: {\n      ...eventBase.ResourceProperties,\n      KeySize: mockKeySize * 2,\n    },\n    ...eventBase,\n  };\n\n  const { handler } = require('../../custom-resource-handlers/src/private-key');\n  await expect(handler(event, context)).resolves.toBe(undefined);\n\n  expect(mockSecretsManagerClient.createSecret).not.toHaveBeenCalled();\n  expect(mockSecretsManagerClient.updateSecret).not.toHaveBeenCalled();\n  expect(mockSecretsManagerClient.deleteSecret).not.toHaveBeenCalled();\n  return expect(cfn.sendResponse)\n    .toHaveBeenCalledWith(event,\n      cfn.Status.FAILED,\n      secretArn,\n      {},\n      expect.stringContaining('The KeySize property cannot be updated'));\n});\n\ntest('Update (changing SecretName)', async () => {\n  const event: cfn.Event = {\n    RequestType: cfn.RequestType.UPDATE,\n    PhysicalResourceId: secretArn,\n    OldResourceProperties: {\n      ...eventBase.ResourceProperties,\n      SecretName: 'Old/Secret/Name',\n    },\n    ...eventBase,\n  };\n\n  const { handler } = require('../../custom-resource-handlers/src/private-key');\n  await expect(handler(event, context)).resolves.toBe(undefined);\n\n  expect(mockSecretsManagerClient.createSecret).not.toHaveBeenCalled();\n  expect(mockSecretsManagerClient.updateSecret).not.toHaveBeenCalled();\n  expect(mockSecretsManagerClient.deleteSecret).not.toHaveBeenCalled();\n  return expect(cfn.sendResponse)\n    .toHaveBeenCalledWith(event,\n      cfn.Status.FAILED,\n      secretArn,\n      {},\n      expect.stringContaining('The SecretName property cannot be updated'));\n});\n\ntest('Update (changing Description and KmsKeyId)', async () => {\n  const event: cfn.Event = {\n    RequestType: cfn.RequestType.UPDATE,\n    PhysicalResourceId: secretArn,\n    OldResourceProperties: {\n      ...eventBase.ResourceProperties,\n      Description: 'Old description',\n      KmsKeyId: 'alias/OldKmsKey',\n    },\n    ...eventBase,\n  };\n\n  const { handler } = require('../../custom-resource-handlers/src/private-key');\n  await expect(handler(event, context)).resolves.toBe(undefined);\n\n  expect(mockSecretsManagerClient.createSecret).not.toHaveBeenCalled();\n  expect(mockSecretsManagerClient.updateSecret)\n    .toHaveBeenCalledWith({\n      ClientRequestToken: context.awsRequestId,\n      Description: event.ResourceProperties.Description,\n      KmsKeyId: event.ResourceProperties.KmsKeyId,\n      SecretId: secretArn,\n    });\n  expect(mockSecretsManagerClient.deleteSecret).not.toHaveBeenCalled();\n  return expect(cfn.sendResponse)\n    .toHaveBeenCalledWith(event,\n      cfn.Status.SUCCESS,\n      secretArn,\n      { Ref: secretArn, SecretArn: secretArn });\n});\n\ntest('Delete', async () => {\n  const event: cfn.Event = {\n    RequestType: cfn.RequestType.DELETE,\n    PhysicalResourceId: secretArn,\n    ...eventBase,\n  };\n\n  jest.spyOn(cfn, 'customResourceHandler').mockName('cfn.customResourceHandler')\n    .mockImplementation((cb) => {\n      return async (evt: any, ctx: any) => {\n        const result = await cb(evt, ctx);\n        expect(result).toEqual({\n          Ref: event.PhysicalResourceId,\n        });\n      };\n    });\n\n  const { handler } = require('../../custom-resource-handlers/src/private-key');\n  await expect(handler(event, context)).resolves.toBe(undefined);\n\n  expect(mockSecretsManagerClient.createSecret).not.toHaveBeenCalled();\n  expect(mockSecretsManagerClient.updateSecret).not.toHaveBeenCalled();\n  expect(mockSecretsManagerClient.deleteSecret)\n    .toHaveBeenCalledWith({\n      SecretId: secretArn,\n      ForceDeleteWithoutRecovery: true,\n    });\n  return expect(cfn.sendResponse)\n    .toHaveBeenCalledWith(event,\n      cfn.Status.SUCCESS,\n      event.PhysicalResourceId,\n      { Ref: event.PhysicalResourceId });\n});\n"
  },
  {
    "path": "lib/__tests__/delivlib-tests/assume-role/test.sh",
    "content": "#!/bin/bash\nset -euo pipefail\nset -x\nidentity=\"$(aws sts get-caller-identity --output text | xargs)\"\nrole_arn=$(echo \"${identity}\" | cut -d\" \" -f 2)\n\n# role arn will look like this:\n#     arn:aws:sts::712950704752:assumed-role/delivlib-test-e486dd-AssumeMe924099BB-1B4MOTFSLDZ2N/assume-role-test\nactual_role_name=$(echo \"${role_arn}\" | cut -d\"/\" -f2)\n\n\nif [ \"${actual_role_name}\" != \"${EXPECTED_ROLE_NAME}\" ]; then\n  echo \"Actual role name was ${actual_role_name} but we expected ${EXPECTED_ROLE_NAME}\"\n  exit 1\nfi\n\n"
  },
  {
    "path": "lib/__tests__/delivlib-tests/linux/README",
    "content": "## README\n\nThis file is bundled with the test and will be deployed as part of the test environment.\n\n---------------------------------------"
  },
  {
    "path": "lib/__tests__/delivlib-tests/linux/test.sh",
    "content": "#!/bin/bash\nset -e\nscriptdir=$(cd $(dirname $0) && pwd)\n\n# Some diagnostics output\necho \"| Workdir:\"\npwd\n\necho \"| Files in workdir:\"\nfind .\n\necho \"| environmentVariables\"\nset\n\n# Verify that test artifacts are downloaded together with the test script\necho \"| Test artifact:\"\ncat ${scriptdir}/README\n\necho \"-------\"\necho \"TEST PASS\"\n"
  },
  {
    "path": "lib/__tests__/delivlib-tests/linux/void.sh",
    "content": "#!/bin/bash\nset -e\necho ALL GOOD\n"
  },
  {
    "path": "lib/__tests__/delivlib-tests/windows/README",
    "content": "Hello, first Windows test\nThis README file will be bundled with the test\n----------------------------------------------\n"
  },
  {
    "path": "lib/__tests__/delivlib-tests/windows/test.ps1",
    "content": "\"Hello, World!\"\n\n# Verify test artifacts are bundled with the test script\nGet-Content -Path $PSScriptRoot\\README\n\nDIR /s\n\n"
  },
  {
    "path": "lib/__tests__/expected.yml",
    "content": "Transform: AWS::Serverless-2016-10-31\nResources:\n  CodeCommitPipelineBuildPipelineArtifactsBucketEncryptionKey05A62A83:\n    Type: AWS::KMS::Key\n    Properties:\n      KeyPolicy:\n        Statement:\n          - Action: kms:*\n            Effect: Allow\n            Principal:\n              AWS:\n                Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :iam::712950704752:root\n            Resource: \"*\"\n        Version: \"2012-10-17\"\n    UpdateReplacePolicy: Delete\n    DeletionPolicy: Delete\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/BuildPipeline/ArtifactsBucketEncryptionKey/Resource\n  CodeCommitPipelineBuildPipelineArtifactsBucketEncryptionKeyAliasB1396C5D:\n    Type: AWS::KMS::Alias\n    Properties:\n      AliasName: alias/codepipeline-delivlibtestcodecommitpipelinebuildpipeline5be6878f\n      TargetKeyId:\n        Fn::GetAtt:\n          - CodeCommitPipelineBuildPipelineArtifactsBucketEncryptionKey05A62A83\n          - Arn\n    UpdateReplacePolicy: Delete\n    DeletionPolicy: Delete\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/BuildPipeline/ArtifactsBucketEncryptionKeyAlias/Resource\n  CodeCommitPipelineBuildPipelineArtifactsBucketED2813B3:\n    Type: AWS::S3::Bucket\n    Properties:\n      BucketEncryption:\n        ServerSideEncryptionConfiguration:\n          - ServerSideEncryptionByDefault:\n              KMSMasterKeyID:\n                Fn::GetAtt:\n                  - CodeCommitPipelineBuildPipelineArtifactsBucketEncryptionKey05A62A83\n                  - Arn\n              SSEAlgorithm: aws:kms\n      PublicAccessBlockConfiguration:\n        BlockPublicAcls: true\n        BlockPublicPolicy: true\n        IgnorePublicAcls: true\n        RestrictPublicBuckets: true\n    UpdateReplacePolicy: Retain\n    DeletionPolicy: Retain\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/BuildPipeline/ArtifactsBucket/Resource\n  CodeCommitPipelineBuildPipelineArtifactsBucketPolicy97EF6204:\n    Type: AWS::S3::BucketPolicy\n    Properties:\n      Bucket:\n        Ref: CodeCommitPipelineBuildPipelineArtifactsBucketED2813B3\n      PolicyDocument:\n        Statement:\n          - Action: s3:*\n            Condition:\n              Bool:\n                aws:SecureTransport: \"false\"\n            Effect: Deny\n            Principal:\n              AWS: \"*\"\n            Resource:\n              - Fn::GetAtt:\n                  - CodeCommitPipelineBuildPipelineArtifactsBucketED2813B3\n                  - Arn\n              - Fn::Join:\n                  - \"\"\n                  - - Fn::GetAtt:\n                        - CodeCommitPipelineBuildPipelineArtifactsBucketED2813B3\n                        - Arn\n                    - /*\n        Version: \"2012-10-17\"\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/BuildPipeline/ArtifactsBucket/Policy/Resource\n  CodeCommitPipelineBuildPipelineRole1843599A:\n    Type: AWS::IAM::Role\n    Properties:\n      AssumeRolePolicyDocument:\n        Statement:\n          - Action: sts:AssumeRole\n            Effect: Allow\n            Principal:\n              Service: codepipeline.amazonaws.com\n        Version: \"2012-10-17\"\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/BuildPipeline/Role/Resource\n  CodeCommitPipelineBuildPipelineRoleDefaultPolicy94C30F44:\n    Type: AWS::IAM::Policy\n    Properties:\n      PolicyDocument:\n        Statement:\n          - Action:\n              - s3:GetObject*\n              - s3:GetBucket*\n              - s3:List*\n              - s3:DeleteObject*\n              - s3:PutObject\n              - s3:PutObjectLegalHold\n              - s3:PutObjectRetention\n              - s3:PutObjectTagging\n              - s3:PutObjectVersionTagging\n              - s3:Abort*\n            Effect: Allow\n            Resource:\n              - Fn::GetAtt:\n                  - CodeCommitPipelineBuildPipelineArtifactsBucketED2813B3\n                  - Arn\n              - Fn::Join:\n                  - \"\"\n                  - - Fn::GetAtt:\n                        - CodeCommitPipelineBuildPipelineArtifactsBucketED2813B3\n                        - Arn\n                    - /*\n          - Action:\n              - kms:Decrypt\n              - kms:DescribeKey\n              - kms:Encrypt\n              - kms:ReEncrypt*\n              - kms:GenerateDataKey*\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeCommitPipelineBuildPipelineArtifactsBucketEncryptionKey05A62A83\n                - Arn\n          - Action: sts:AssumeRole\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeCommitPipelineBuildPipelineBuildCodePipelineActionRoleF95CDA16\n                - Arn\n          - Action: sts:AssumeRole\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeCommitPipelineBuildPipelineTestTestHelloLinuxCodePipelineActionRole8FAC0642\n                - Arn\n          - Action: sts:AssumeRole\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeCommitPipelineBuildPipelineTestTestHelloWindowsCodePipelineActionRole9316936E\n                - Arn\n          - Action: sts:AssumeRole\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeCommitPipelineBuildPipelineTestTestAssumeRoleCodePipelineActionRole8A7F2D7D\n                - Arn\n          - Action: sts:AssumeRole\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeCommitPipelineBuildPipelineTestActionGenerateTwoArtifactsCodePipelineActionRoleD657FD04\n                - Arn\n          - Action: sts:AssumeRole\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeCommitPipelineBuildPipelinePublishNpmPublishCodePipelineActionRoleCAA948F0\n                - Arn\n          - Action: sts:AssumeRole\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeCommitPipelineBuildPipelinePublishNuGetPublishCodePipelineActionRole515B871C\n                - Arn\n          - Action: sts:AssumeRole\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeCommitPipelineBuildPipelinePublishMavenPublishCodePipelineActionRoleB41F452E\n                - Arn\n          - Action: sts:AssumeRole\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeCommitPipelineBuildPipelinePublishGitHubPublishCodePipelineActionRole17D6E0C9\n                - Arn\n          - Action: sts:AssumeRole\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeCommitPipelineBuildPipelinePublishGitHubPagesPublishCodePipelineActionRoleEEE32F4A\n                - Arn\n          - Action: sts:AssumeRole\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeCommitPipelineBuildPipelinePublishPyPIPublishCodePipelineActionRole05AF99D5\n                - Arn\n          - Action: sts:AssumeRole\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeCommitPipelineBuildPipelinePublishGolangPublishCodePipelineActionRole365FF3C7\n                - Arn\n        Version: \"2012-10-17\"\n      PolicyName: CodeCommitPipelineBuildPipelineRoleDefaultPolicy94C30F44\n      Roles:\n        - Ref: CodeCommitPipelineBuildPipelineRole1843599A\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/BuildPipeline/Role/DefaultPolicy/Resource\n  CodeCommitPipelineBuildPipeline656B8CCB:\n    Type: AWS::CodePipeline::Pipeline\n    Properties:\n      ArtifactStore:\n        EncryptionKey:\n          Id:\n            Fn::GetAtt:\n              - CodeCommitPipelineBuildPipelineArtifactsBucketEncryptionKey05A62A83\n              - Arn\n          Type: KMS\n        Location:\n          Ref: CodeCommitPipelineBuildPipelineArtifactsBucketED2813B3\n        Type: S3\n      RestartExecutionOnUpdate: true\n      RoleArn:\n        Fn::GetAtt:\n          - CodeCommitPipelineBuildPipelineRole1843599A\n          - Arn\n      Stages:\n        - Actions:\n            - ActionTypeId:\n                Category: Source\n                Owner: ThirdParty\n                Provider: GitHub\n                Version: \"1\"\n              Configuration:\n                Owner: awslabs\n                Repo: aws-delivlib-sample\n                Branch: master\n                OAuthToken: \"{{resolve:secretsmanager:arn:aws:secretsmanager:us-east-1:712950704752:secret:github-token-QDP6QX:SecretString:::}}\"\n                PollForSourceChanges: false\n              Name: Pull\n              OutputArtifacts:\n                - Name: Source\n              RunOrder: 1\n          Name: Source\n        - Actions:\n            - ActionTypeId:\n                Category: Build\n                Owner: AWS\n                Provider: CodeBuild\n                Version: \"1\"\n              Configuration:\n                ProjectName:\n                  Ref: CodeCommitPipelineBuildProject9F59E8AA\n              InputArtifacts:\n                - Name: Source\n              Name: Build\n              OutputArtifacts:\n                - Name: Artifact_Build_Build\n              RoleArn:\n                Fn::GetAtt:\n                  - CodeCommitPipelineBuildPipelineBuildCodePipelineActionRoleF95CDA16\n                  - Arn\n              RunOrder: 1\n          Name: Build\n        - Actions:\n            - ActionTypeId:\n                Category: Build\n                Owner: AWS\n                Provider: CodeBuild\n                Version: \"1\"\n              Configuration:\n                ProjectName:\n                  Ref: CodeCommitPipelineHelloLinuxCB82AB68\n              InputArtifacts:\n                - Name: Artifact_Build_Build\n              Name: TestHelloLinux\n              OutputArtifacts:\n                - Name: Artifact_c883e6647f907b1eb255846397acb348c18b48b3a2\n              RoleArn:\n                Fn::GetAtt:\n                  - CodeCommitPipelineBuildPipelineTestTestHelloLinuxCodePipelineActionRole8FAC0642\n                  - Arn\n              RunOrder: 1\n            - ActionTypeId:\n                Category: Build\n                Owner: AWS\n                Provider: CodeBuild\n                Version: \"1\"\n              Configuration:\n                ProjectName:\n                  Ref: CodeCommitPipelineHelloWindows61CA8F73\n              InputArtifacts:\n                - Name: Artifact_Build_Build\n              Name: TestHelloWindows\n              OutputArtifacts:\n                - Name: Artifact_c841b1bdd02c8dd629e3593235a8c4b73d361a30be\n              RoleArn:\n                Fn::GetAtt:\n                  - CodeCommitPipelineBuildPipelineTestTestHelloWindowsCodePipelineActionRole9316936E\n                  - Arn\n              RunOrder: 1\n            - ActionTypeId:\n                Category: Build\n                Owner: AWS\n                Provider: CodeBuild\n                Version: \"1\"\n              Configuration:\n                ProjectName:\n                  Ref: CodeCommitPipelineAssumeRole05A76F51\n              InputArtifacts:\n                - Name: Artifact_Build_Build\n              Name: TestAssumeRole\n              OutputArtifacts:\n                - Name: Artifact_c8681ea53827139c363558663e59350c1c894a3e54\n              RoleArn:\n                Fn::GetAtt:\n                  - CodeCommitPipelineBuildPipelineTestTestAssumeRoleCodePipelineActionRole8A7F2D7D\n                  - Arn\n              RunOrder: 1\n            - ActionTypeId:\n                Category: Build\n                Owner: AWS\n                Provider: CodeBuild\n                Version: \"1\"\n              Configuration:\n                ProjectName:\n                  Ref: CodeCommitPipelineGenerateTwoArtifactsA9DAD33B\n              InputArtifacts:\n                - Name: Artifact_Build_Build\n              Name: ActionGenerateTwoArtifacts\n              OutputArtifacts:\n                - Name: Artifact_c8e859296b521c19119769864a1f8ff14746ebd0c1\n                - Name: artifact2\n              RoleArn:\n                Fn::GetAtt:\n                  - CodeCommitPipelineBuildPipelineTestActionGenerateTwoArtifactsCodePipelineActionRoleD657FD04\n                  - Arn\n              RunOrder: 1\n          Name: Test\n        - Actions:\n            - ActionTypeId:\n                Category: Build\n                Owner: AWS\n                Provider: CodeBuild\n                Version: \"1\"\n              Configuration:\n                ProjectName:\n                  Ref: CodeCommitPipelineNpm0D31AEFC\n              InputArtifacts:\n                - Name: Artifact_Build_Build\n              Name: NpmPublish\n              RoleArn:\n                Fn::GetAtt:\n                  - CodeCommitPipelineBuildPipelinePublishNpmPublishCodePipelineActionRoleCAA948F0\n                  - Arn\n              RunOrder: 1\n            - ActionTypeId:\n                Category: Build\n                Owner: AWS\n                Provider: CodeBuild\n                Version: \"1\"\n              Configuration:\n                ProjectName:\n                  Ref: CodeCommitPipelineNuGet67CE1BA7\n              InputArtifacts:\n                - Name: Artifact_Build_Build\n              Name: NuGetPublish\n              RoleArn:\n                Fn::GetAtt:\n                  - CodeCommitPipelineBuildPipelinePublishNuGetPublishCodePipelineActionRole515B871C\n                  - Arn\n              RunOrder: 1\n            - ActionTypeId:\n                Category: Build\n                Owner: AWS\n                Provider: CodeBuild\n                Version: \"1\"\n              Configuration:\n                ProjectName:\n                  Ref: CodeCommitPipelineMavenB7154296\n              InputArtifacts:\n                - Name: Artifact_Build_Build\n              Name: MavenPublish\n              RoleArn:\n                Fn::GetAtt:\n                  - CodeCommitPipelineBuildPipelinePublishMavenPublishCodePipelineActionRoleB41F452E\n                  - Arn\n              RunOrder: 1\n            - ActionTypeId:\n                Category: Build\n                Owner: AWS\n                Provider: CodeBuild\n                Version: \"1\"\n              Configuration:\n                ProjectName:\n                  Ref: CodeCommitPipelineGitHub0797840C\n                PrimarySource: Artifact_Build_Build\n              InputArtifacts:\n                - Name: Artifact_Build_Build\n                - Name: Artifact_c8e859296b521c19119769864a1f8ff14746ebd0c1\n                - Name: artifact2\n              Name: GitHubPublish\n              RoleArn:\n                Fn::GetAtt:\n                  - CodeCommitPipelineBuildPipelinePublishGitHubPublishCodePipelineActionRole17D6E0C9\n                  - Arn\n              RunOrder: 1\n            - ActionTypeId:\n                Category: Build\n                Owner: AWS\n                Provider: CodeBuild\n                Version: \"1\"\n              Configuration:\n                ProjectName:\n                  Ref: CodeCommitPipelineGitHubPages53B77CF6\n              InputArtifacts:\n                - Name: Artifact_Build_Build\n              Name: GitHubPagesPublish\n              RoleArn:\n                Fn::GetAtt:\n                  - CodeCommitPipelineBuildPipelinePublishGitHubPagesPublishCodePipelineActionRoleEEE32F4A\n                  - Arn\n              RunOrder: 1\n            - ActionTypeId:\n                Category: Build\n                Owner: AWS\n                Provider: CodeBuild\n                Version: \"1\"\n              Configuration:\n                ProjectName:\n                  Ref: CodeCommitPipelinePyPI2C59CE7B\n              InputArtifacts:\n                - Name: Artifact_Build_Build\n              Name: PyPIPublish\n              RoleArn:\n                Fn::GetAtt:\n                  - CodeCommitPipelineBuildPipelinePublishPyPIPublishCodePipelineActionRole05AF99D5\n                  - Arn\n              RunOrder: 1\n            - ActionTypeId:\n                Category: Build\n                Owner: AWS\n                Provider: CodeBuild\n                Version: \"1\"\n              Configuration:\n                ProjectName:\n                  Ref: CodeCommitPipelineGolangBDFA17A1\n              InputArtifacts:\n                - Name: Artifact_Build_Build\n              Name: GolangPublish\n              RoleArn:\n                Fn::GetAtt:\n                  - CodeCommitPipelineBuildPipelinePublishGolangPublishCodePipelineActionRole365FF3C7\n                  - Arn\n              RunOrder: 1\n          Name: Publish\n    DependsOn:\n      - CodeCommitPipelineBuildPipelineRoleDefaultPolicy94C30F44\n      - CodeCommitPipelineBuildPipelineRole1843599A\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/BuildPipeline/Resource\n  CodeCommitPipelineBuildPipelineSourcePullWebhookResource0898F523:\n    Type: AWS::CodePipeline::Webhook\n    Properties:\n      Authentication: GITHUB_HMAC\n      AuthenticationConfiguration:\n        SecretToken: \"{{resolve:secretsmanager:arn:aws:secretsmanager:us-east-1:712950704752:secret:github-token-QDP6QX:SecretString:::}}\"\n      Filters:\n        - JsonPath: $.ref\n          MatchEquals: refs/heads/{Branch}\n      RegisterWithThirdParty: true\n      TargetAction: Pull\n      TargetPipeline:\n        Ref: CodeCommitPipelineBuildPipeline656B8CCB\n      TargetPipelineVersion: 1\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/BuildPipeline/Source/Pull/WebhookResource\n  CodeCommitPipelineBuildPipelineBuildCodePipelineActionRoleF95CDA16:\n    Type: AWS::IAM::Role\n    Properties:\n      AssumeRolePolicyDocument:\n        Statement:\n          - Action: sts:AssumeRole\n            Effect: Allow\n            Principal:\n              AWS:\n                Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :iam::712950704752:root\n        Version: \"2012-10-17\"\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/BuildPipeline/Build/Build/CodePipelineActionRole/Resource\n  CodeCommitPipelineBuildPipelineBuildCodePipelineActionRoleDefaultPolicy7735849D:\n    Type: AWS::IAM::Policy\n    Properties:\n      PolicyDocument:\n        Statement:\n          - Action:\n              - codebuild:BatchGetBuilds\n              - codebuild:StartBuild\n              - codebuild:StopBuild\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeCommitPipelineBuildProject9F59E8AA\n                - Arn\n        Version: \"2012-10-17\"\n      PolicyName: CodeCommitPipelineBuildPipelineBuildCodePipelineActionRoleDefaultPolicy7735849D\n      Roles:\n        - Ref: CodeCommitPipelineBuildPipelineBuildCodePipelineActionRoleF95CDA16\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/BuildPipeline/Build/Build/CodePipelineActionRole/DefaultPolicy/Resource\n  CodeCommitPipelineBuildPipelineTestTestHelloLinuxCodePipelineActionRole8FAC0642:\n    Type: AWS::IAM::Role\n    Properties:\n      AssumeRolePolicyDocument:\n        Statement:\n          - Action: sts:AssumeRole\n            Effect: Allow\n            Principal:\n              AWS:\n                Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :iam::712950704752:root\n        Version: \"2012-10-17\"\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/BuildPipeline/Test/TestHelloLinux/CodePipelineActionRole/Resource\n  CodeCommitPipelineBuildPipelineTestTestHelloLinuxCodePipelineActionRoleDefaultPolicyDD449768:\n    Type: AWS::IAM::Policy\n    Properties:\n      PolicyDocument:\n        Statement:\n          - Action:\n              - codebuild:BatchGetBuilds\n              - codebuild:StartBuild\n              - codebuild:StopBuild\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeCommitPipelineHelloLinuxCB82AB68\n                - Arn\n        Version: \"2012-10-17\"\n      PolicyName: CodeCommitPipelineBuildPipelineTestTestHelloLinuxCodePipelineActionRoleDefaultPolicyDD449768\n      Roles:\n        - Ref: CodeCommitPipelineBuildPipelineTestTestHelloLinuxCodePipelineActionRole8FAC0642\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/BuildPipeline/Test/TestHelloLinux/CodePipelineActionRole/DefaultPolicy/Resource\n  CodeCommitPipelineBuildPipelineTestTestHelloWindowsCodePipelineActionRole9316936E:\n    Type: AWS::IAM::Role\n    Properties:\n      AssumeRolePolicyDocument:\n        Statement:\n          - Action: sts:AssumeRole\n            Effect: Allow\n            Principal:\n              AWS:\n                Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :iam::712950704752:root\n        Version: \"2012-10-17\"\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/BuildPipeline/Test/TestHelloWindows/CodePipelineActionRole/Resource\n  CodeCommitPipelineBuildPipelineTestTestHelloWindowsCodePipelineActionRoleDefaultPolicyFC7988F8:\n    Type: AWS::IAM::Policy\n    Properties:\n      PolicyDocument:\n        Statement:\n          - Action:\n              - codebuild:BatchGetBuilds\n              - codebuild:StartBuild\n              - codebuild:StopBuild\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeCommitPipelineHelloWindows61CA8F73\n                - Arn\n        Version: \"2012-10-17\"\n      PolicyName: CodeCommitPipelineBuildPipelineTestTestHelloWindowsCodePipelineActionRoleDefaultPolicyFC7988F8\n      Roles:\n        - Ref: CodeCommitPipelineBuildPipelineTestTestHelloWindowsCodePipelineActionRole9316936E\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/BuildPipeline/Test/TestHelloWindows/CodePipelineActionRole/DefaultPolicy/Resource\n  CodeCommitPipelineBuildPipelineTestTestAssumeRoleCodePipelineActionRole8A7F2D7D:\n    Type: AWS::IAM::Role\n    Properties:\n      AssumeRolePolicyDocument:\n        Statement:\n          - Action: sts:AssumeRole\n            Effect: Allow\n            Principal:\n              AWS:\n                Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :iam::712950704752:root\n        Version: \"2012-10-17\"\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/BuildPipeline/Test/TestAssumeRole/CodePipelineActionRole/Resource\n  CodeCommitPipelineBuildPipelineTestTestAssumeRoleCodePipelineActionRoleDefaultPolicy22EE0A3D:\n    Type: AWS::IAM::Policy\n    Properties:\n      PolicyDocument:\n        Statement:\n          - Action:\n              - codebuild:BatchGetBuilds\n              - codebuild:StartBuild\n              - codebuild:StopBuild\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeCommitPipelineAssumeRole05A76F51\n                - Arn\n        Version: \"2012-10-17\"\n      PolicyName: CodeCommitPipelineBuildPipelineTestTestAssumeRoleCodePipelineActionRoleDefaultPolicy22EE0A3D\n      Roles:\n        - Ref: CodeCommitPipelineBuildPipelineTestTestAssumeRoleCodePipelineActionRole8A7F2D7D\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/BuildPipeline/Test/TestAssumeRole/CodePipelineActionRole/DefaultPolicy/Resource\n  CodeCommitPipelineBuildPipelineTestActionGenerateTwoArtifactsCodePipelineActionRoleD657FD04:\n    Type: AWS::IAM::Role\n    Properties:\n      AssumeRolePolicyDocument:\n        Statement:\n          - Action: sts:AssumeRole\n            Effect: Allow\n            Principal:\n              AWS:\n                Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :iam::712950704752:root\n        Version: \"2012-10-17\"\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/BuildPipeline/Test/ActionGenerateTwoArtifacts/CodePipelineActionRole/Resource\n  CodeCommitPipelineBuildPipelineTestActionGenerateTwoArtifactsCodePipelineActionRoleDefaultPolicy23313445:\n    Type: AWS::IAM::Policy\n    Properties:\n      PolicyDocument:\n        Statement:\n          - Action:\n              - codebuild:BatchGetBuilds\n              - codebuild:StartBuild\n              - codebuild:StopBuild\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeCommitPipelineGenerateTwoArtifactsA9DAD33B\n                - Arn\n        Version: \"2012-10-17\"\n      PolicyName: CodeCommitPipelineBuildPipelineTestActionGenerateTwoArtifactsCodePipelineActionRoleDefaultPolicy23313445\n      Roles:\n        - Ref: CodeCommitPipelineBuildPipelineTestActionGenerateTwoArtifactsCodePipelineActionRoleD657FD04\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/BuildPipeline/Test/ActionGenerateTwoArtifacts/CodePipelineActionRole/DefaultPolicy/Resource\n  CodeCommitPipelineBuildPipelinePublishNpmPublishCodePipelineActionRoleCAA948F0:\n    Type: AWS::IAM::Role\n    Properties:\n      AssumeRolePolicyDocument:\n        Statement:\n          - Action: sts:AssumeRole\n            Effect: Allow\n            Principal:\n              AWS:\n                Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :iam::712950704752:root\n        Version: \"2012-10-17\"\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/BuildPipeline/Publish/NpmPublish/CodePipelineActionRole/Resource\n  CodeCommitPipelineBuildPipelinePublishNpmPublishCodePipelineActionRoleDefaultPolicyA1E1E060:\n    Type: AWS::IAM::Policy\n    Properties:\n      PolicyDocument:\n        Statement:\n          - Action:\n              - codebuild:BatchGetBuilds\n              - codebuild:StartBuild\n              - codebuild:StopBuild\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeCommitPipelineNpm0D31AEFC\n                - Arn\n        Version: \"2012-10-17\"\n      PolicyName: CodeCommitPipelineBuildPipelinePublishNpmPublishCodePipelineActionRoleDefaultPolicyA1E1E060\n      Roles:\n        - Ref: CodeCommitPipelineBuildPipelinePublishNpmPublishCodePipelineActionRoleCAA948F0\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/BuildPipeline/Publish/NpmPublish/CodePipelineActionRole/DefaultPolicy/Resource\n  CodeCommitPipelineBuildPipelinePublishNuGetPublishCodePipelineActionRole515B871C:\n    Type: AWS::IAM::Role\n    Properties:\n      AssumeRolePolicyDocument:\n        Statement:\n          - Action: sts:AssumeRole\n            Effect: Allow\n            Principal:\n              AWS:\n                Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :iam::712950704752:root\n        Version: \"2012-10-17\"\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/BuildPipeline/Publish/NuGetPublish/CodePipelineActionRole/Resource\n  CodeCommitPipelineBuildPipelinePublishNuGetPublishCodePipelineActionRoleDefaultPolicy5224BD0C:\n    Type: AWS::IAM::Policy\n    Properties:\n      PolicyDocument:\n        Statement:\n          - Action:\n              - codebuild:BatchGetBuilds\n              - codebuild:StartBuild\n              - codebuild:StopBuild\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeCommitPipelineNuGet67CE1BA7\n                - Arn\n        Version: \"2012-10-17\"\n      PolicyName: CodeCommitPipelineBuildPipelinePublishNuGetPublishCodePipelineActionRoleDefaultPolicy5224BD0C\n      Roles:\n        - Ref: CodeCommitPipelineBuildPipelinePublishNuGetPublishCodePipelineActionRole515B871C\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/BuildPipeline/Publish/NuGetPublish/CodePipelineActionRole/DefaultPolicy/Resource\n  CodeCommitPipelineBuildPipelinePublishMavenPublishCodePipelineActionRoleB41F452E:\n    Type: AWS::IAM::Role\n    Properties:\n      AssumeRolePolicyDocument:\n        Statement:\n          - Action: sts:AssumeRole\n            Effect: Allow\n            Principal:\n              AWS:\n                Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :iam::712950704752:root\n        Version: \"2012-10-17\"\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/BuildPipeline/Publish/MavenPublish/CodePipelineActionRole/Resource\n  CodeCommitPipelineBuildPipelinePublishMavenPublishCodePipelineActionRoleDefaultPolicy07DE5816:\n    Type: AWS::IAM::Policy\n    Properties:\n      PolicyDocument:\n        Statement:\n          - Action:\n              - codebuild:BatchGetBuilds\n              - codebuild:StartBuild\n              - codebuild:StopBuild\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeCommitPipelineMavenB7154296\n                - Arn\n        Version: \"2012-10-17\"\n      PolicyName: CodeCommitPipelineBuildPipelinePublishMavenPublishCodePipelineActionRoleDefaultPolicy07DE5816\n      Roles:\n        - Ref: CodeCommitPipelineBuildPipelinePublishMavenPublishCodePipelineActionRoleB41F452E\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/BuildPipeline/Publish/MavenPublish/CodePipelineActionRole/DefaultPolicy/Resource\n  CodeCommitPipelineBuildPipelinePublishGitHubPublishCodePipelineActionRole17D6E0C9:\n    Type: AWS::IAM::Role\n    Properties:\n      AssumeRolePolicyDocument:\n        Statement:\n          - Action: sts:AssumeRole\n            Effect: Allow\n            Principal:\n              AWS:\n                Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :iam::712950704752:root\n        Version: \"2012-10-17\"\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/BuildPipeline/Publish/GitHubPublish/CodePipelineActionRole/Resource\n  CodeCommitPipelineBuildPipelinePublishGitHubPublishCodePipelineActionRoleDefaultPolicyF10F860F:\n    Type: AWS::IAM::Policy\n    Properties:\n      PolicyDocument:\n        Statement:\n          - Action:\n              - codebuild:BatchGetBuilds\n              - codebuild:StartBuild\n              - codebuild:StopBuild\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeCommitPipelineGitHub0797840C\n                - Arn\n        Version: \"2012-10-17\"\n      PolicyName: CodeCommitPipelineBuildPipelinePublishGitHubPublishCodePipelineActionRoleDefaultPolicyF10F860F\n      Roles:\n        - Ref: CodeCommitPipelineBuildPipelinePublishGitHubPublishCodePipelineActionRole17D6E0C9\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/BuildPipeline/Publish/GitHubPublish/CodePipelineActionRole/DefaultPolicy/Resource\n  CodeCommitPipelineBuildPipelinePublishGitHubPagesPublishCodePipelineActionRoleEEE32F4A:\n    Type: AWS::IAM::Role\n    Properties:\n      AssumeRolePolicyDocument:\n        Statement:\n          - Action: sts:AssumeRole\n            Effect: Allow\n            Principal:\n              AWS:\n                Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :iam::712950704752:root\n        Version: \"2012-10-17\"\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/BuildPipeline/Publish/GitHubPagesPublish/CodePipelineActionRole/Resource\n  CodeCommitPipelineBuildPipelinePublishGitHubPagesPublishCodePipelineActionRoleDefaultPolicyDE4085C1:\n    Type: AWS::IAM::Policy\n    Properties:\n      PolicyDocument:\n        Statement:\n          - Action:\n              - codebuild:BatchGetBuilds\n              - codebuild:StartBuild\n              - codebuild:StopBuild\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeCommitPipelineGitHubPages53B77CF6\n                - Arn\n        Version: \"2012-10-17\"\n      PolicyName: CodeCommitPipelineBuildPipelinePublishGitHubPagesPublishCodePipelineActionRoleDefaultPolicyDE4085C1\n      Roles:\n        - Ref: CodeCommitPipelineBuildPipelinePublishGitHubPagesPublishCodePipelineActionRoleEEE32F4A\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/BuildPipeline/Publish/GitHubPagesPublish/CodePipelineActionRole/DefaultPolicy/Resource\n  CodeCommitPipelineBuildPipelinePublishPyPIPublishCodePipelineActionRole05AF99D5:\n    Type: AWS::IAM::Role\n    Properties:\n      AssumeRolePolicyDocument:\n        Statement:\n          - Action: sts:AssumeRole\n            Effect: Allow\n            Principal:\n              AWS:\n                Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :iam::712950704752:root\n        Version: \"2012-10-17\"\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/BuildPipeline/Publish/PyPIPublish/CodePipelineActionRole/Resource\n  CodeCommitPipelineBuildPipelinePublishPyPIPublishCodePipelineActionRoleDefaultPolicyB6A54068:\n    Type: AWS::IAM::Policy\n    Properties:\n      PolicyDocument:\n        Statement:\n          - Action:\n              - codebuild:BatchGetBuilds\n              - codebuild:StartBuild\n              - codebuild:StopBuild\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeCommitPipelinePyPI2C59CE7B\n                - Arn\n        Version: \"2012-10-17\"\n      PolicyName: CodeCommitPipelineBuildPipelinePublishPyPIPublishCodePipelineActionRoleDefaultPolicyB6A54068\n      Roles:\n        - Ref: CodeCommitPipelineBuildPipelinePublishPyPIPublishCodePipelineActionRole05AF99D5\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/BuildPipeline/Publish/PyPIPublish/CodePipelineActionRole/DefaultPolicy/Resource\n  CodeCommitPipelineBuildPipelinePublishGolangPublishCodePipelineActionRole365FF3C7:\n    Type: AWS::IAM::Role\n    Properties:\n      AssumeRolePolicyDocument:\n        Statement:\n          - Action: sts:AssumeRole\n            Effect: Allow\n            Principal:\n              AWS:\n                Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :iam::712950704752:root\n        Version: \"2012-10-17\"\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/BuildPipeline/Publish/GolangPublish/CodePipelineActionRole/Resource\n  CodeCommitPipelineBuildPipelinePublishGolangPublishCodePipelineActionRoleDefaultPolicyED342278:\n    Type: AWS::IAM::Policy\n    Properties:\n      PolicyDocument:\n        Statement:\n          - Action:\n              - codebuild:BatchGetBuilds\n              - codebuild:StartBuild\n              - codebuild:StopBuild\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeCommitPipelineGolangBDFA17A1\n                - Arn\n        Version: \"2012-10-17\"\n      PolicyName: CodeCommitPipelineBuildPipelinePublishGolangPublishCodePipelineActionRoleDefaultPolicyED342278\n      Roles:\n        - Ref: CodeCommitPipelineBuildPipelinePublishGolangPublishCodePipelineActionRole365FF3C7\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/BuildPipeline/Publish/GolangPublish/CodePipelineActionRole/DefaultPolicy/Resource\n  CodeCommitPipelineBuildProjectRoleC6347B6E:\n    Type: AWS::IAM::Role\n    Properties:\n      AssumeRolePolicyDocument:\n        Statement:\n          - Action: sts:AssumeRole\n            Effect: Allow\n            Principal:\n              Service: codebuild.amazonaws.com\n        Version: \"2012-10-17\"\n      ManagedPolicyArns:\n        - Fn::Join:\n            - \"\"\n            - - \"arn:\"\n              - Ref: AWS::Partition\n              - :iam::aws:policy/AmazonElasticContainerRegistryPublicReadOnly\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/BuildProject/Role/Resource\n  CodeCommitPipelineBuildProjectRoleDefaultPolicy1184486E:\n    Type: AWS::IAM::Policy\n    Properties:\n      PolicyDocument:\n        Statement:\n          - Action:\n              - logs:CreateLogGroup\n              - logs:CreateLogStream\n              - logs:PutLogEvents\n            Effect: Allow\n            Resource:\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :logs:us-east-1:712950704752:log-group:/aws/codebuild/\n                    - Ref: CodeCommitPipelineBuildProject9F59E8AA\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :logs:us-east-1:712950704752:log-group:/aws/codebuild/\n                    - Ref: CodeCommitPipelineBuildProject9F59E8AA\n                    - :*\n          - Action:\n              - codebuild:CreateReportGroup\n              - codebuild:CreateReport\n              - codebuild:UpdateReport\n              - codebuild:BatchPutTestCases\n              - codebuild:BatchPutCodeCoverages\n            Effect: Allow\n            Resource:\n              Fn::Join:\n                - \"\"\n                - - \"arn:\"\n                  - Ref: AWS::Partition\n                  - :codebuild:us-east-1:712950704752:report-group/\n                  - Ref: CodeCommitPipelineBuildProject9F59E8AA\n                  - -*\n          - Action:\n              - ssmmessages:CreateControlChannel\n              - ssmmessages:CreateDataChannel\n              - ssmmessages:OpenControlChannel\n              - ssmmessages:OpenDataChannel\n              - logs:DescribeLogGroups\n              - logs:CreateLogStream\n              - logs:PutLogEvents\n              - s3:GetEncryptionConfiguration\n              - s3:PutObject\n            Effect: Allow\n            Resource: \"*\"\n          - Action:\n              - s3:GetObject*\n              - s3:GetBucket*\n              - s3:List*\n              - s3:DeleteObject*\n              - s3:PutObject\n              - s3:PutObjectLegalHold\n              - s3:PutObjectRetention\n              - s3:PutObjectTagging\n              - s3:PutObjectVersionTagging\n              - s3:Abort*\n            Effect: Allow\n            Resource:\n              - Fn::GetAtt:\n                  - CodeCommitPipelineBuildPipelineArtifactsBucketED2813B3\n                  - Arn\n              - Fn::Join:\n                  - \"\"\n                  - - Fn::GetAtt:\n                        - CodeCommitPipelineBuildPipelineArtifactsBucketED2813B3\n                        - Arn\n                    - /*\n          - Action:\n              - kms:Decrypt\n              - kms:DescribeKey\n              - kms:Encrypt\n              - kms:ReEncrypt*\n              - kms:GenerateDataKey*\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeCommitPipelineBuildPipelineArtifactsBucketEncryptionKey05A62A83\n                - Arn\n          - Action:\n              - kms:Decrypt\n              - kms:Encrypt\n              - kms:ReEncrypt*\n              - kms:GenerateDataKey*\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeCommitPipelineBuildPipelineArtifactsBucketEncryptionKey05A62A83\n                - Arn\n        Version: \"2012-10-17\"\n      PolicyName: CodeCommitPipelineBuildProjectRoleDefaultPolicy1184486E\n      Roles:\n        - Ref: CodeCommitPipelineBuildProjectRoleC6347B6E\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/BuildProject/Role/DefaultPolicy/Resource\n  CodeCommitPipelineBuildProject9F59E8AA:\n    Type: AWS::CodeBuild::Project\n    Properties:\n      Artifacts:\n        Type: CODEPIPELINE\n      Cache:\n        Type: NO_CACHE\n      EncryptionKey:\n        Fn::GetAtt:\n          - CodeCommitPipelineBuildPipelineArtifactsBucketEncryptionKey05A62A83\n          - Arn\n      Environment:\n        ComputeType: BUILD_GENERAL1_SMALL\n        EnvironmentVariables:\n          - Name: DELIVLIB_ENV_TEST\n            Type: PLAINTEXT\n            Value: MAGIC_1924\n        Image: public.ecr.aws/jsii/superchain:1-bullseye-slim-node18\n        ImagePullCredentialsType: SERVICE_ROLE\n        PrivilegedMode: false\n        Type: LINUX_CONTAINER\n      ServiceRole:\n        Fn::GetAtt:\n          - CodeCommitPipelineBuildProjectRoleC6347B6E\n          - Arn\n      Source:\n        Type: CODEPIPELINE\n      TimeoutInMinutes: 480\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/BuildProject/Resource\n  CodeCommitPipelineBuildProjectOnBuildFailed2A08058D:\n    Type: AWS::Events::Rule\n    Properties:\n      EventPattern:\n        source:\n          - aws.codebuild\n        detail:\n          project-name:\n            - Ref: CodeCommitPipelineBuildProject9F59E8AA\n          build-status:\n            - FAILED\n        detail-type:\n          - CodeBuild Build State Change\n      State: ENABLED\n      Targets:\n        - Arn:\n            Ref: CodeCommitPipelineNotificationsTopic36C2D667\n          Id: Target0\n          Input: '\"aws-delivlib test pipeline build failed\"'\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/BuildProject/OnBuildFailed/Resource\n  CodeCommitPipelineNotificationsTopic36C2D667:\n    Type: AWS::SNS::Topic\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/NotificationsTopic/Resource\n  CodeCommitPipelineNotificationsTopicawscdkdevdelivlibtestamazoncom7F5014D8:\n    Type: AWS::SNS::Subscription\n    Properties:\n      Endpoint: aws-cdk-dev+delivlib-test@amazon.com\n      Protocol: email\n      TopicArn:\n        Ref: CodeCommitPipelineNotificationsTopic36C2D667\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/NotificationsTopic/aws-cdk-dev+delivlib-test@amazon.com/Resource\n  CodeCommitPipelineNotificationsTopicPolicyBBE90C33:\n    Type: AWS::SNS::TopicPolicy\n    Properties:\n      PolicyDocument:\n        Statement:\n          - Action: sns:Publish\n            Effect: Allow\n            Principal:\n              Service: events.amazonaws.com\n            Resource:\n              Ref: CodeCommitPipelineNotificationsTopic36C2D667\n            Sid: \"0\"\n        Version: \"2012-10-17\"\n      Topics:\n        - Ref: CodeCommitPipelineNotificationsTopic36C2D667\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/NotificationsTopic/Policy/Resource\n  CodeCommitPipelinePipelineWatcherPollerServiceRole0A1D8005:\n    Type: AWS::IAM::Role\n    Properties:\n      AssumeRolePolicyDocument:\n        Statement:\n          - Action: sts:AssumeRole\n            Effect: Allow\n            Principal:\n              Service: lambda.amazonaws.com\n        Version: \"2012-10-17\"\n      ManagedPolicyArns:\n        - Fn::Join:\n            - \"\"\n            - - \"arn:\"\n              - Ref: AWS::Partition\n              - :iam::aws:policy/service-role/AWSLambdaBasicExecutionRole\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/PipelineWatcher/Poller/ServiceRole/Resource\n  CodeCommitPipelinePipelineWatcherPollerServiceRoleDefaultPolicyE2104AD1:\n    Type: AWS::IAM::Policy\n    Properties:\n      PolicyDocument:\n        Statement:\n          - Action: cloudwatch:PutMetricData\n            Condition:\n              StringEquals:\n                cloudwatch:namespace: CDK/Delivlib\n            Effect: Allow\n            Resource: \"*\"\n        Version: \"2012-10-17\"\n      PolicyName: CodeCommitPipelinePipelineWatcherPollerServiceRoleDefaultPolicyE2104AD1\n      Roles:\n        - Ref: CodeCommitPipelinePipelineWatcherPollerServiceRole0A1D8005\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/PipelineWatcher/Poller/ServiceRole/DefaultPolicy/Resource\n  CodeCommitPipelinePipelineWatcherPoller5C65ACDE:\n    Type: AWS::Lambda::Function\n    Properties:\n      Code:\n        S3Bucket: cdk-hnb659fds-assets-712950704752-us-east-1\n        S3Key: 53aa582de8394f0c03fbf115a5f8e5fc68947ec12efb9f6c3bb0b48973398c9c.zip\n      Environment:\n        Variables:\n          METRIC_NAMESPACE: CDK/Delivlib\n          METRIC_NAME: Failures\n      Handler: watcher-handler.handler\n      Role:\n        Fn::GetAtt:\n          - CodeCommitPipelinePipelineWatcherPollerServiceRole0A1D8005\n          - Arn\n      Runtime: nodejs20.x\n    DependsOn:\n      - CodeCommitPipelinePipelineWatcherPollerServiceRoleDefaultPolicyE2104AD1\n      - CodeCommitPipelinePipelineWatcherPollerServiceRole0A1D8005\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/PipelineWatcher/Poller/Resource\n  CodeCommitPipelinePipelineWatcherTriggerA38A4AD0:\n    Type: AWS::Events::Rule\n    Properties:\n      EventPattern:\n        source:\n          - aws.codepipeline\n        resources:\n          - Fn::Join:\n              - \"\"\n              - - \"arn:\"\n                - Ref: AWS::Partition\n                - \":codepipeline:us-east-1:712950704752:\"\n                - Ref: CodeCommitPipelineBuildPipeline656B8CCB\n        detail-type:\n          - CodePipeline Action Execution State Change\n          - CodePipeline Pipeline Execution State Change\n        detail:\n          state:\n            - FAILED\n            - SUCCEEDED\n      State: ENABLED\n      Targets:\n        - Arn:\n            Fn::GetAtt:\n              - CodeCommitPipelinePipelineWatcherPoller5C65ACDE\n              - Arn\n          Id: Target0\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/PipelineWatcher/Trigger/Resource\n  CodeCommitPipelinePipelineWatcherTriggerAllowEventRuledelivlibtestCodeCommitPipelinePipelineWatcherPoller7862623143029B4E:\n    Type: AWS::Lambda::Permission\n    Properties:\n      Action: lambda:InvokeFunction\n      FunctionName:\n        Fn::GetAtt:\n          - CodeCommitPipelinePipelineWatcherPoller5C65ACDE\n          - Arn\n      Principal: events.amazonaws.com\n      SourceArn:\n        Fn::GetAtt:\n          - CodeCommitPipelinePipelineWatcherTriggerA38A4AD0\n          - Arn\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/PipelineWatcher/Trigger/AllowEventRuledelivlibtestCodeCommitPipelinePipelineWatcherPoller78626231\n  CodeCommitPipelinePipelineWatcherAlarm73779F48:\n    Type: AWS::CloudWatch::Alarm\n    Properties:\n      AlarmDescription: Pipeline aws-delivlib test pipeline has failed stages\n      ComparisonOperator: GreaterThanOrEqualToThreshold\n      Dimensions:\n        - Name: Pipeline\n          Value:\n            Ref: CodeCommitPipelineBuildPipeline656B8CCB\n      EvaluationPeriods: 1\n      MetricName: Failures\n      Namespace: CDK/Delivlib\n      Period: 300\n      Statistic: Maximum\n      Threshold: 1\n      TreatMissingData: ignore\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/PipelineWatcher/Alarm/Resource\n  CodeCommitPipelineHelloLinuxRole97734933:\n    Type: AWS::IAM::Role\n    Properties:\n      AssumeRolePolicyDocument:\n        Statement:\n          - Action: sts:AssumeRole\n            Effect: Allow\n            Principal:\n              Service: codebuild.amazonaws.com\n        Version: \"2012-10-17\"\n      ManagedPolicyArns:\n        - Fn::Join:\n            - \"\"\n            - - \"arn:\"\n              - Ref: AWS::Partition\n              - :iam::aws:policy/AmazonElasticContainerRegistryPublicReadOnly\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/HelloLinux/Resource/Role/Resource\n  CodeCommitPipelineHelloLinuxRoleDefaultPolicy234DABC6:\n    Type: AWS::IAM::Policy\n    Properties:\n      PolicyDocument:\n        Statement:\n          - Action:\n              - logs:CreateLogGroup\n              - logs:CreateLogStream\n              - logs:PutLogEvents\n            Effect: Allow\n            Resource:\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :logs:us-east-1:712950704752:log-group:/aws/codebuild/\n                    - Ref: CodeCommitPipelineHelloLinuxCB82AB68\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :logs:us-east-1:712950704752:log-group:/aws/codebuild/\n                    - Ref: CodeCommitPipelineHelloLinuxCB82AB68\n                    - :*\n          - Action:\n              - codebuild:CreateReportGroup\n              - codebuild:CreateReport\n              - codebuild:UpdateReport\n              - codebuild:BatchPutTestCases\n              - codebuild:BatchPutCodeCoverages\n            Effect: Allow\n            Resource:\n              Fn::Join:\n                - \"\"\n                - - \"arn:\"\n                  - Ref: AWS::Partition\n                  - :codebuild:us-east-1:712950704752:report-group/\n                  - Ref: CodeCommitPipelineHelloLinuxCB82AB68\n                  - -*\n          - Action:\n              - ssmmessages:CreateControlChannel\n              - ssmmessages:CreateDataChannel\n              - ssmmessages:OpenControlChannel\n              - ssmmessages:OpenDataChannel\n              - logs:DescribeLogGroups\n              - logs:CreateLogStream\n              - logs:PutLogEvents\n              - s3:GetEncryptionConfiguration\n              - s3:PutObject\n            Effect: Allow\n            Resource: \"*\"\n          - Action:\n              - s3:GetObject*\n              - s3:GetBucket*\n              - s3:List*\n            Effect: Allow\n            Resource:\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :s3:::cdk-hnb659fds-assets-712950704752-us-east-1\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :s3:::cdk-hnb659fds-assets-712950704752-us-east-1/*\n          - Action:\n              - s3:GetObject*\n              - s3:GetBucket*\n              - s3:List*\n              - s3:DeleteObject*\n              - s3:PutObject\n              - s3:PutObjectLegalHold\n              - s3:PutObjectRetention\n              - s3:PutObjectTagging\n              - s3:PutObjectVersionTagging\n              - s3:Abort*\n            Effect: Allow\n            Resource:\n              - Fn::GetAtt:\n                  - CodeCommitPipelineBuildPipelineArtifactsBucketED2813B3\n                  - Arn\n              - Fn::Join:\n                  - \"\"\n                  - - Fn::GetAtt:\n                        - CodeCommitPipelineBuildPipelineArtifactsBucketED2813B3\n                        - Arn\n                    - /*\n          - Action:\n              - kms:Decrypt\n              - kms:DescribeKey\n              - kms:Encrypt\n              - kms:ReEncrypt*\n              - kms:GenerateDataKey*\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeCommitPipelineBuildPipelineArtifactsBucketEncryptionKey05A62A83\n                - Arn\n          - Action:\n              - kms:Decrypt\n              - kms:Encrypt\n              - kms:ReEncrypt*\n              - kms:GenerateDataKey*\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeCommitPipelineBuildPipelineArtifactsBucketEncryptionKey05A62A83\n                - Arn\n        Version: \"2012-10-17\"\n      PolicyName: CodeCommitPipelineHelloLinuxRoleDefaultPolicy234DABC6\n      Roles:\n        - Ref: CodeCommitPipelineHelloLinuxRole97734933\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/HelloLinux/Resource/Role/DefaultPolicy/Resource\n  CodeCommitPipelineHelloLinuxCB82AB68:\n    Type: AWS::CodeBuild::Project\n    Properties:\n      Artifacts:\n        Type: NO_ARTIFACTS\n      Cache:\n        Type: NO_CACHE\n      EncryptionKey:\n        Fn::GetAtt:\n          - CodeCommitPipelineBuildPipelineArtifactsBucketEncryptionKey05A62A83\n          - Arn\n      Environment:\n        ComputeType: BUILD_GENERAL1_MEDIUM\n        EnvironmentVariables:\n          - Name: SCRIPT_S3_BUCKET\n            Type: PLAINTEXT\n            Value: cdk-hnb659fds-assets-712950704752-us-east-1\n          - Name: SCRIPT_S3_KEY\n            Type: PLAINTEXT\n            Value: 3d34b07ba871989d030649c646b3096ba7c78ca531897bcdb0670774d2f9d3e4.zip\n        Image: aws/codebuild/standard:7.0\n        ImagePullCredentialsType: CODEBUILD\n        PrivilegedMode: false\n        Type: LINUX_CONTAINER\n      ServiceRole:\n        Fn::GetAtt:\n          - CodeCommitPipelineHelloLinuxRole97734933\n          - Arn\n      Source:\n        BuildSpec: |-\n          {\n            \"version\": \"0.2\",\n            \"phases\": {\n              \"install\": {\n                \"commands\": [\n                  \"command -v yarn > /dev/null || npm install --global yarn\"\n                ]\n              },\n              \"pre_build\": {\n                \"commands\": [\n                  \"echo \\\"Downloading scripts from s3://${SCRIPT_S3_BUCKET}/${SCRIPT_S3_KEY}\\\"\",\n                  \"aws s3 cp s3://${SCRIPT_S3_BUCKET}/${SCRIPT_S3_KEY} /tmp\",\n                  \"mkdir -p /tmp/scriptdir\",\n                  \"unzip /tmp/$(basename $SCRIPT_S3_KEY) -d /tmp/scriptdir\"\n                ]\n              },\n              \"build\": {\n                \"commands\": [\n                  \"export SCRIPT_DIR=/tmp/scriptdir\",\n                  \"echo \\\"Running test.sh\\\"\",\n                  \"/bin/bash /tmp/scriptdir/test.sh\"\n                ]\n              }\n            }\n          }\n        Type: NO_SOURCE\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/HelloLinux/Resource/Resource\n  CodeCommitPipelineHelloLinuxOnBuildFailedD96AF043:\n    Type: AWS::Events::Rule\n    Properties:\n      EventPattern:\n        source:\n          - aws.codebuild\n        detail:\n          project-name:\n            - Ref: CodeCommitPipelineHelloLinuxCB82AB68\n          build-status:\n            - FAILED\n        detail-type:\n          - CodeBuild Build State Change\n      State: ENABLED\n      Targets:\n        - Arn:\n            Ref: CodeCommitPipelineNotificationsTopic36C2D667\n          Id: Target0\n          Input: '\"Test HelloLinux failed\"'\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/HelloLinux/Resource/OnBuildFailed/Resource\n  CodeCommitPipelineHelloLinuxAlarmE81F4D20:\n    Type: AWS::CloudWatch::Alarm\n    Properties:\n      ComparisonOperator: GreaterThanOrEqualToThreshold\n      Dimensions:\n        - Name: ProjectName\n          Value:\n            Ref: CodeCommitPipelineHelloLinuxCB82AB68\n      EvaluationPeriods: 1\n      MetricName: FailedBuilds\n      Namespace: AWS/CodeBuild\n      Period: 300\n      Statistic: Sum\n      Threshold: 1\n      TreatMissingData: ignore\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/HelloLinux/Alarm/Resource\n  CodeCommitPipelineHelloWindowsRole769C073E:\n    Type: AWS::IAM::Role\n    Properties:\n      AssumeRolePolicyDocument:\n        Statement:\n          - Action: sts:AssumeRole\n            Effect: Allow\n            Principal:\n              Service: codebuild.amazonaws.com\n        Version: \"2012-10-17\"\n      ManagedPolicyArns:\n        - Fn::Join:\n            - \"\"\n            - - \"arn:\"\n              - Ref: AWS::Partition\n              - :iam::aws:policy/AmazonElasticContainerRegistryPublicReadOnly\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/HelloWindows/Resource/Role/Resource\n  CodeCommitPipelineHelloWindowsRoleDefaultPolicyA240EEEE:\n    Type: AWS::IAM::Policy\n    Properties:\n      PolicyDocument:\n        Statement:\n          - Action:\n              - logs:CreateLogGroup\n              - logs:CreateLogStream\n              - logs:PutLogEvents\n            Effect: Allow\n            Resource:\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :logs:us-east-1:712950704752:log-group:/aws/codebuild/\n                    - Ref: CodeCommitPipelineHelloWindows61CA8F73\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :logs:us-east-1:712950704752:log-group:/aws/codebuild/\n                    - Ref: CodeCommitPipelineHelloWindows61CA8F73\n                    - :*\n          - Action:\n              - codebuild:CreateReportGroup\n              - codebuild:CreateReport\n              - codebuild:UpdateReport\n              - codebuild:BatchPutTestCases\n              - codebuild:BatchPutCodeCoverages\n            Effect: Allow\n            Resource:\n              Fn::Join:\n                - \"\"\n                - - \"arn:\"\n                  - Ref: AWS::Partition\n                  - :codebuild:us-east-1:712950704752:report-group/\n                  - Ref: CodeCommitPipelineHelloWindows61CA8F73\n                  - -*\n          - Action:\n              - ssmmessages:CreateControlChannel\n              - ssmmessages:CreateDataChannel\n              - ssmmessages:OpenControlChannel\n              - ssmmessages:OpenDataChannel\n              - logs:DescribeLogGroups\n              - logs:CreateLogStream\n              - logs:PutLogEvents\n              - s3:GetEncryptionConfiguration\n              - s3:PutObject\n            Effect: Allow\n            Resource: \"*\"\n          - Action:\n              - s3:GetObject*\n              - s3:GetBucket*\n              - s3:List*\n            Effect: Allow\n            Resource:\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :s3:::cdk-hnb659fds-assets-712950704752-us-east-1\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :s3:::cdk-hnb659fds-assets-712950704752-us-east-1/*\n          - Action:\n              - s3:GetObject*\n              - s3:GetBucket*\n              - s3:List*\n              - s3:DeleteObject*\n              - s3:PutObject\n              - s3:PutObjectLegalHold\n              - s3:PutObjectRetention\n              - s3:PutObjectTagging\n              - s3:PutObjectVersionTagging\n              - s3:Abort*\n            Effect: Allow\n            Resource:\n              - Fn::GetAtt:\n                  - CodeCommitPipelineBuildPipelineArtifactsBucketED2813B3\n                  - Arn\n              - Fn::Join:\n                  - \"\"\n                  - - Fn::GetAtt:\n                        - CodeCommitPipelineBuildPipelineArtifactsBucketED2813B3\n                        - Arn\n                    - /*\n          - Action:\n              - kms:Decrypt\n              - kms:DescribeKey\n              - kms:Encrypt\n              - kms:ReEncrypt*\n              - kms:GenerateDataKey*\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeCommitPipelineBuildPipelineArtifactsBucketEncryptionKey05A62A83\n                - Arn\n          - Action:\n              - kms:Decrypt\n              - kms:Encrypt\n              - kms:ReEncrypt*\n              - kms:GenerateDataKey*\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeCommitPipelineBuildPipelineArtifactsBucketEncryptionKey05A62A83\n                - Arn\n        Version: \"2012-10-17\"\n      PolicyName: CodeCommitPipelineHelloWindowsRoleDefaultPolicyA240EEEE\n      Roles:\n        - Ref: CodeCommitPipelineHelloWindowsRole769C073E\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/HelloWindows/Resource/Role/DefaultPolicy/Resource\n  CodeCommitPipelineHelloWindows61CA8F73:\n    Type: AWS::CodeBuild::Project\n    Properties:\n      Artifacts:\n        Type: NO_ARTIFACTS\n      Cache:\n        Type: NO_CACHE\n      EncryptionKey:\n        Fn::GetAtt:\n          - CodeCommitPipelineBuildPipelineArtifactsBucketEncryptionKey05A62A83\n          - Arn\n      Environment:\n        ComputeType: BUILD_GENERAL1_MEDIUM\n        EnvironmentVariables:\n          - Name: SCRIPT_S3_BUCKET\n            Type: PLAINTEXT\n            Value: cdk-hnb659fds-assets-712950704752-us-east-1\n          - Name: SCRIPT_S3_KEY\n            Type: PLAINTEXT\n            Value: 36b33307c18c06726950e481637d4439c34e56a89ae6e2f1725e2718095e0985.zip\n        Image: aws/codebuild/windows-base:2019-1.0\n        ImagePullCredentialsType: CODEBUILD\n        PrivilegedMode: false\n        Type: WINDOWS_SERVER_2019_CONTAINER\n      ServiceRole:\n        Fn::GetAtt:\n          - CodeCommitPipelineHelloWindowsRole769C073E\n          - Arn\n      Source:\n        BuildSpec: |-\n          {\n            \"version\": \"0.2\",\n            \"phases\": {\n              \"install\": {\n                \"commands\": [\n                  \"Import-Module \\\"C:\\\\ProgramData\\\\chocolatey\\\\helpers\\\\chocolateyProfile.psm1\\\"\",\n                  \"C:\\\\ProgramData\\\\chocolatey\\\\bin\\\\choco.exe upgrade nodejs-lts -y\"\n                ]\n              },\n              \"pre_build\": {\n                \"commands\": []\n              },\n              \"build\": {\n                \"commands\": [\n                  \"Set-Variable -Name TEMPDIR -Value (New-TemporaryFile).DirectoryName\",\n                  \"aws s3 cp s3://$env:SCRIPT_S3_BUCKET/$env:SCRIPT_S3_KEY $TEMPDIR\\\\scripts.zip\",\n                  \"New-Item -ItemType Directory -Path $TEMPDIR\\\\scriptdir\",\n                  \"Expand-Archive -Path $TEMPDIR/scripts.zip -DestinationPath $TEMPDIR\\\\scriptdir\",\n                  \"$env:SCRIPT_DIR = \\\"$TEMPDIR\\\\scriptdir\\\"\",\n                  \"& $TEMPDIR\\\\scriptdir\\\\test.ps1\"\n                ]\n              }\n            }\n          }\n        Type: NO_SOURCE\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/HelloWindows/Resource/Resource\n  CodeCommitPipelineHelloWindowsOnBuildFailed25F55C59:\n    Type: AWS::Events::Rule\n    Properties:\n      EventPattern:\n        source:\n          - aws.codebuild\n        detail:\n          project-name:\n            - Ref: CodeCommitPipelineHelloWindows61CA8F73\n          build-status:\n            - FAILED\n        detail-type:\n          - CodeBuild Build State Change\n      State: ENABLED\n      Targets:\n        - Arn:\n            Ref: CodeCommitPipelineNotificationsTopic36C2D667\n          Id: Target0\n          Input: '\"Test HelloWindows failed\"'\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/HelloWindows/Resource/OnBuildFailed/Resource\n  CodeCommitPipelineHelloWindowsAlarmB6D353FA:\n    Type: AWS::CloudWatch::Alarm\n    Properties:\n      ComparisonOperator: GreaterThanOrEqualToThreshold\n      Dimensions:\n        - Name: ProjectName\n          Value:\n            Ref: CodeCommitPipelineHelloWindows61CA8F73\n      EvaluationPeriods: 1\n      MetricName: FailedBuilds\n      Namespace: AWS/CodeBuild\n      Period: 300\n      Statistic: Sum\n      Threshold: 1\n      TreatMissingData: ignore\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/HelloWindows/Alarm/Resource\n  CodeCommitPipelineAssumeRoleRole1186B781:\n    Type: AWS::IAM::Role\n    Properties:\n      AssumeRolePolicyDocument:\n        Statement:\n          - Action: sts:AssumeRole\n            Effect: Allow\n            Principal:\n              Service: codebuild.amazonaws.com\n        Version: \"2012-10-17\"\n      ManagedPolicyArns:\n        - Fn::Join:\n            - \"\"\n            - - \"arn:\"\n              - Ref: AWS::Partition\n              - :iam::aws:policy/AmazonElasticContainerRegistryPublicReadOnly\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/AssumeRole/Resource/Role/Resource\n  CodeCommitPipelineAssumeRoleRoleDefaultPolicy438D80DD:\n    Type: AWS::IAM::Policy\n    Properties:\n      PolicyDocument:\n        Statement:\n          - Action:\n              - logs:CreateLogGroup\n              - logs:CreateLogStream\n              - logs:PutLogEvents\n            Effect: Allow\n            Resource:\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :logs:us-east-1:712950704752:log-group:/aws/codebuild/\n                    - Ref: CodeCommitPipelineAssumeRole05A76F51\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :logs:us-east-1:712950704752:log-group:/aws/codebuild/\n                    - Ref: CodeCommitPipelineAssumeRole05A76F51\n                    - :*\n          - Action:\n              - codebuild:CreateReportGroup\n              - codebuild:CreateReport\n              - codebuild:UpdateReport\n              - codebuild:BatchPutTestCases\n              - codebuild:BatchPutCodeCoverages\n            Effect: Allow\n            Resource:\n              Fn::Join:\n                - \"\"\n                - - \"arn:\"\n                  - Ref: AWS::Partition\n                  - :codebuild:us-east-1:712950704752:report-group/\n                  - Ref: CodeCommitPipelineAssumeRole05A76F51\n                  - -*\n          - Action:\n              - ssmmessages:CreateControlChannel\n              - ssmmessages:CreateDataChannel\n              - ssmmessages:OpenControlChannel\n              - ssmmessages:OpenDataChannel\n              - logs:DescribeLogGroups\n              - logs:CreateLogStream\n              - logs:PutLogEvents\n              - s3:GetEncryptionConfiguration\n              - s3:PutObject\n            Effect: Allow\n            Resource: \"*\"\n          - Action:\n              - s3:GetObject*\n              - s3:GetBucket*\n              - s3:List*\n            Effect: Allow\n            Resource:\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :s3:::cdk-hnb659fds-assets-712950704752-us-east-1\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :s3:::cdk-hnb659fds-assets-712950704752-us-east-1/*\n          - Action: sts:AssumeRole\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - AssumeMe924099BB\n                - Arn\n          - Action:\n              - s3:GetObject*\n              - s3:GetBucket*\n              - s3:List*\n              - s3:DeleteObject*\n              - s3:PutObject\n              - s3:PutObjectLegalHold\n              - s3:PutObjectRetention\n              - s3:PutObjectTagging\n              - s3:PutObjectVersionTagging\n              - s3:Abort*\n            Effect: Allow\n            Resource:\n              - Fn::GetAtt:\n                  - CodeCommitPipelineBuildPipelineArtifactsBucketED2813B3\n                  - Arn\n              - Fn::Join:\n                  - \"\"\n                  - - Fn::GetAtt:\n                        - CodeCommitPipelineBuildPipelineArtifactsBucketED2813B3\n                        - Arn\n                    - /*\n          - Action:\n              - kms:Decrypt\n              - kms:DescribeKey\n              - kms:Encrypt\n              - kms:ReEncrypt*\n              - kms:GenerateDataKey*\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeCommitPipelineBuildPipelineArtifactsBucketEncryptionKey05A62A83\n                - Arn\n          - Action:\n              - kms:Decrypt\n              - kms:Encrypt\n              - kms:ReEncrypt*\n              - kms:GenerateDataKey*\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeCommitPipelineBuildPipelineArtifactsBucketEncryptionKey05A62A83\n                - Arn\n        Version: \"2012-10-17\"\n      PolicyName: CodeCommitPipelineAssumeRoleRoleDefaultPolicy438D80DD\n      Roles:\n        - Ref: CodeCommitPipelineAssumeRoleRole1186B781\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/AssumeRole/Resource/Role/DefaultPolicy/Resource\n  CodeCommitPipelineAssumeRole05A76F51:\n    Type: AWS::CodeBuild::Project\n    Properties:\n      Artifacts:\n        Type: NO_ARTIFACTS\n      Cache:\n        Type: NO_CACHE\n      EncryptionKey:\n        Fn::GetAtt:\n          - CodeCommitPipelineBuildPipelineArtifactsBucketEncryptionKey05A62A83\n          - Arn\n      Environment:\n        ComputeType: BUILD_GENERAL1_MEDIUM\n        EnvironmentVariables:\n          - Name: SCRIPT_S3_BUCKET\n            Type: PLAINTEXT\n            Value: cdk-hnb659fds-assets-712950704752-us-east-1\n          - Name: SCRIPT_S3_KEY\n            Type: PLAINTEXT\n            Value: fa3b8e01a3815c9af6c66b1e4c986e8743a43f68fb763464198c94900c0c96da.zip\n          - Name: EXPECTED_ROLE_NAME\n            Type: PLAINTEXT\n            Value:\n              Ref: AssumeMe924099BB\n        Image: aws/codebuild/standard:7.0\n        ImagePullCredentialsType: CODEBUILD\n        PrivilegedMode: false\n        Type: LINUX_CONTAINER\n      ServiceRole:\n        Fn::GetAtt:\n          - CodeCommitPipelineAssumeRoleRole1186B781\n          - Arn\n      Source:\n        BuildSpec:\n          Fn::Join:\n            - \"\"\n            - - |-\n                {\n                  \"version\": \"0.2\",\n                  \"phases\": {\n                    \"install\": {\n                      \"commands\": [\n                        \"command -v yarn > /dev/null || npm install --global yarn\"\n                      ]\n                    },\n                    \"pre_build\": {\n                      \"commands\": [\n                        \"echo \\\"Downloading scripts from s3://${SCRIPT_S3_BUCKET}/${SCRIPT_S3_KEY}\\\"\",\n                        \"aws s3 cp s3://${SCRIPT_S3_BUCKET}/${SCRIPT_S3_KEY} /tmp\",\n                        \"mkdir -p /tmp/scriptdir\",\n                        \"unzip /tmp/$(basename $SCRIPT_S3_KEY) -d /tmp/scriptdir\",\n                        \"creds=$(mktemp -d)/creds.json\",\n                        \"AWS_STS_REGIONAL_ENDPOINTS=legacy aws sts assume-role --role-arn \\\"\n              - Fn::GetAtt:\n                  - AssumeMe924099BB\n                  - Arn\n              - |-\n                \\\" --role-session-name \\\"assume-role-test\\\" --external-id \\\"require-me-please\\\" > $creds\",\n                        \"export AWS_ACCESS_KEY_ID=\\\"$(cat ${creds} | grep \\\"AccessKeyId\\\" | cut -d'\\\"' -f 4)\\\"\",\n                        \"export AWS_SECRET_ACCESS_KEY=\\\"$(cat ${creds} | grep \\\"SecretAccessKey\\\" | cut -d'\\\"' -f 4)\\\"\",\n                        \"export AWS_SESSION_TOKEN=\\\"$(cat ${creds} | grep \\\"SessionToken\\\" | cut -d'\\\"' -f 4)\\\"\"\n                      ]\n                    },\n                    \"build\": {\n                      \"commands\": [\n                        \"export SCRIPT_DIR=/tmp/scriptdir\",\n                        \"echo \\\"Running test.sh\\\"\",\n                        \"/bin/bash /tmp/scriptdir/test.sh\"\n                      ]\n                    }\n                  }\n                }\n        Type: NO_SOURCE\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/AssumeRole/Resource/Resource\n  CodeCommitPipelineAssumeRoleOnBuildFailed494CD87B:\n    Type: AWS::Events::Rule\n    Properties:\n      EventPattern:\n        source:\n          - aws.codebuild\n        detail:\n          project-name:\n            - Ref: CodeCommitPipelineAssumeRole05A76F51\n          build-status:\n            - FAILED\n        detail-type:\n          - CodeBuild Build State Change\n      State: ENABLED\n      Targets:\n        - Arn:\n            Ref: CodeCommitPipelineNotificationsTopic36C2D667\n          Id: Target0\n          Input: '\"Test AssumeRole failed\"'\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/AssumeRole/Resource/OnBuildFailed/Resource\n  CodeCommitPipelineAssumeRoleAlarm6D09484D:\n    Type: AWS::CloudWatch::Alarm\n    Properties:\n      ComparisonOperator: GreaterThanOrEqualToThreshold\n      Dimensions:\n        - Name: ProjectName\n          Value:\n            Ref: CodeCommitPipelineAssumeRole05A76F51\n      EvaluationPeriods: 1\n      MetricName: FailedBuilds\n      Namespace: AWS/CodeBuild\n      Period: 300\n      Statistic: Sum\n      Threshold: 1\n      TreatMissingData: ignore\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/AssumeRole/Alarm/Resource\n  CodeCommitPipelineGenerateTwoArtifactsRole91D2CDCA:\n    Type: AWS::IAM::Role\n    Properties:\n      AssumeRolePolicyDocument:\n        Statement:\n          - Action: sts:AssumeRole\n            Effect: Allow\n            Principal:\n              Service: codebuild.amazonaws.com\n        Version: \"2012-10-17\"\n      ManagedPolicyArns:\n        - Fn::Join:\n            - \"\"\n            - - \"arn:\"\n              - Ref: AWS::Partition\n              - :iam::aws:policy/AmazonElasticContainerRegistryPublicReadOnly\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/GenerateTwoArtifacts/Resource/Role/Resource\n  CodeCommitPipelineGenerateTwoArtifactsRoleDefaultPolicy770BE7EA:\n    Type: AWS::IAM::Policy\n    Properties:\n      PolicyDocument:\n        Statement:\n          - Action:\n              - logs:CreateLogGroup\n              - logs:CreateLogStream\n              - logs:PutLogEvents\n            Effect: Allow\n            Resource:\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :logs:us-east-1:712950704752:log-group:/aws/codebuild/\n                    - Ref: CodeCommitPipelineGenerateTwoArtifactsA9DAD33B\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :logs:us-east-1:712950704752:log-group:/aws/codebuild/\n                    - Ref: CodeCommitPipelineGenerateTwoArtifactsA9DAD33B\n                    - :*\n          - Action:\n              - codebuild:CreateReportGroup\n              - codebuild:CreateReport\n              - codebuild:UpdateReport\n              - codebuild:BatchPutTestCases\n              - codebuild:BatchPutCodeCoverages\n            Effect: Allow\n            Resource:\n              Fn::Join:\n                - \"\"\n                - - \"arn:\"\n                  - Ref: AWS::Partition\n                  - :codebuild:us-east-1:712950704752:report-group/\n                  - Ref: CodeCommitPipelineGenerateTwoArtifactsA9DAD33B\n                  - -*\n          - Action:\n              - ssmmessages:CreateControlChannel\n              - ssmmessages:CreateDataChannel\n              - ssmmessages:OpenControlChannel\n              - ssmmessages:OpenDataChannel\n              - logs:DescribeLogGroups\n              - logs:CreateLogStream\n              - logs:PutLogEvents\n              - s3:GetEncryptionConfiguration\n              - s3:PutObject\n            Effect: Allow\n            Resource: \"*\"\n          - Action:\n              - s3:GetObject*\n              - s3:GetBucket*\n              - s3:List*\n            Effect: Allow\n            Resource:\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :s3:::cdk-hnb659fds-assets-712950704752-us-east-1\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :s3:::cdk-hnb659fds-assets-712950704752-us-east-1/*\n          - Action:\n              - s3:GetObject*\n              - s3:GetBucket*\n              - s3:List*\n              - s3:DeleteObject*\n              - s3:PutObject\n              - s3:PutObjectLegalHold\n              - s3:PutObjectRetention\n              - s3:PutObjectTagging\n              - s3:PutObjectVersionTagging\n              - s3:Abort*\n            Effect: Allow\n            Resource:\n              - Fn::GetAtt:\n                  - CodeCommitPipelineBuildPipelineArtifactsBucketED2813B3\n                  - Arn\n              - Fn::Join:\n                  - \"\"\n                  - - Fn::GetAtt:\n                        - CodeCommitPipelineBuildPipelineArtifactsBucketED2813B3\n                        - Arn\n                    - /*\n          - Action:\n              - kms:Decrypt\n              - kms:DescribeKey\n              - kms:Encrypt\n              - kms:ReEncrypt*\n              - kms:GenerateDataKey*\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeCommitPipelineBuildPipelineArtifactsBucketEncryptionKey05A62A83\n                - Arn\n          - Action:\n              - kms:Decrypt\n              - kms:Encrypt\n              - kms:ReEncrypt*\n              - kms:GenerateDataKey*\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeCommitPipelineBuildPipelineArtifactsBucketEncryptionKey05A62A83\n                - Arn\n        Version: \"2012-10-17\"\n      PolicyName: CodeCommitPipelineGenerateTwoArtifactsRoleDefaultPolicy770BE7EA\n      Roles:\n        - Ref: CodeCommitPipelineGenerateTwoArtifactsRole91D2CDCA\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/GenerateTwoArtifacts/Resource/Role/DefaultPolicy/Resource\n  CodeCommitPipelineGenerateTwoArtifactsA9DAD33B:\n    Type: AWS::CodeBuild::Project\n    Properties:\n      Artifacts:\n        Type: NO_ARTIFACTS\n      Cache:\n        Type: NO_CACHE\n      EncryptionKey:\n        Fn::GetAtt:\n          - CodeCommitPipelineBuildPipelineArtifactsBucketEncryptionKey05A62A83\n          - Arn\n      Environment:\n        ComputeType: BUILD_GENERAL1_MEDIUM\n        EnvironmentVariables:\n          - Name: SCRIPT_S3_BUCKET\n            Type: PLAINTEXT\n            Value: cdk-hnb659fds-assets-712950704752-us-east-1\n          - Name: SCRIPT_S3_KEY\n            Type: PLAINTEXT\n            Value: 3d34b07ba871989d030649c646b3096ba7c78ca531897bcdb0670774d2f9d3e4.zip\n        Image: aws/codebuild/standard:7.0\n        ImagePullCredentialsType: CODEBUILD\n        PrivilegedMode: false\n        Type: LINUX_CONTAINER\n      ServiceRole:\n        Fn::GetAtt:\n          - CodeCommitPipelineGenerateTwoArtifactsRole91D2CDCA\n          - Arn\n      Source:\n        BuildSpec: |-\n          {\n            \"version\": \"0.2\",\n            \"phases\": {\n              \"install\": {\n                \"commands\": [\n                  \"command -v yarn > /dev/null || npm install --global yarn\"\n                ]\n              },\n              \"pre_build\": {\n                \"commands\": [\n                  \"echo \\\"Downloading scripts from s3://${SCRIPT_S3_BUCKET}/${SCRIPT_S3_KEY}\\\"\",\n                  \"aws s3 cp s3://${SCRIPT_S3_BUCKET}/${SCRIPT_S3_KEY} /tmp\",\n                  \"mkdir -p /tmp/scriptdir\",\n                  \"unzip /tmp/$(basename $SCRIPT_S3_KEY) -d /tmp/scriptdir\"\n                ]\n              },\n              \"build\": {\n                \"commands\": [\n                  \"export SCRIPT_DIR=/tmp/scriptdir\",\n                  \"echo \\\"Running void.sh\\\"\",\n                  \"/bin/bash /tmp/scriptdir/void.sh\",\n                  \"mkdir -p output1 output2\",\n                  \"echo '{\\\"name\\\": \\\"output1\\\", \\\"version\\\": \\\"1.2.3\\\", \\\"commit\\\": \\\"abcdef\\\"}' > output1/build.json\",\n                  \"echo '{\\\"name\\\": \\\"output2\\\", \\\"version\\\": \\\"1.2.3\\\", \\\"commit\\\": \\\"abcdef\\\"}' > output2/build.json\"\n                ]\n              }\n            },\n            \"artifacts\": {\n              \"secondary-artifacts\": {\n                \"artifact2\": {\n                  \"base-directory\": \"output2\",\n                  \"files\": [\n                    \"**/*\"\n                  ]\n                },\n                \"Artifact_c8e859296b521c19119769864a1f8ff14746ebd0c1\": {\n                  \"base-directory\": \"output1\",\n                  \"files\": [\n                    \"**/*\"\n                  ]\n                }\n              }\n            }\n          }\n        Type: NO_SOURCE\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/GenerateTwoArtifacts/Resource/Resource\n  CodeCommitPipelineGenerateTwoArtifactsAlarm4299580B:\n    Type: AWS::CloudWatch::Alarm\n    Properties:\n      ComparisonOperator: GreaterThanOrEqualToThreshold\n      Dimensions:\n        - Name: ProjectName\n          Value:\n            Ref: CodeCommitPipelineGenerateTwoArtifactsA9DAD33B\n      EvaluationPeriods: 1\n      MetricName: FailedBuilds\n      Namespace: AWS/CodeBuild\n      Period: 300\n      Statistic: Sum\n      Threshold: 1\n      TreatMissingData: ignore\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/GenerateTwoArtifacts/Alarm/Resource\n  CodeCommitPipelineCanaryHelloCanaryShellableRole65D634EB:\n    Type: AWS::IAM::Role\n    Properties:\n      AssumeRolePolicyDocument:\n        Statement:\n          - Action: sts:AssumeRole\n            Effect: Allow\n            Principal:\n              Service: codebuild.amazonaws.com\n        Version: \"2012-10-17\"\n      ManagedPolicyArns:\n        - Fn::Join:\n            - \"\"\n            - - \"arn:\"\n              - Ref: AWS::Partition\n              - :iam::aws:policy/AmazonElasticContainerRegistryPublicReadOnly\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/CanaryHelloCanary/Shellable/Resource/Role/Resource\n  CodeCommitPipelineCanaryHelloCanaryShellableRoleDefaultPolicyD466B3CA:\n    Type: AWS::IAM::Policy\n    Properties:\n      PolicyDocument:\n        Statement:\n          - Action:\n              - logs:CreateLogGroup\n              - logs:CreateLogStream\n              - logs:PutLogEvents\n            Effect: Allow\n            Resource:\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :logs:us-east-1:712950704752:log-group:/aws/codebuild/\n                    - Ref: CodeCommitPipelineCanaryHelloCanaryShellableC8458471\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :logs:us-east-1:712950704752:log-group:/aws/codebuild/\n                    - Ref: CodeCommitPipelineCanaryHelloCanaryShellableC8458471\n                    - :*\n          - Action:\n              - codebuild:CreateReportGroup\n              - codebuild:CreateReport\n              - codebuild:UpdateReport\n              - codebuild:BatchPutTestCases\n              - codebuild:BatchPutCodeCoverages\n            Effect: Allow\n            Resource:\n              Fn::Join:\n                - \"\"\n                - - \"arn:\"\n                  - Ref: AWS::Partition\n                  - :codebuild:us-east-1:712950704752:report-group/\n                  - Ref: CodeCommitPipelineCanaryHelloCanaryShellableC8458471\n                  - -*\n          - Action:\n              - ssmmessages:CreateControlChannel\n              - ssmmessages:CreateDataChannel\n              - ssmmessages:OpenControlChannel\n              - ssmmessages:OpenDataChannel\n              - logs:DescribeLogGroups\n              - logs:CreateLogStream\n              - logs:PutLogEvents\n              - s3:GetEncryptionConfiguration\n              - s3:PutObject\n            Effect: Allow\n            Resource: \"*\"\n          - Action:\n              - s3:GetObject*\n              - s3:GetBucket*\n              - s3:List*\n            Effect: Allow\n            Resource:\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :s3:::cdk-hnb659fds-assets-712950704752-us-east-1\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :s3:::cdk-hnb659fds-assets-712950704752-us-east-1/*\n        Version: \"2012-10-17\"\n      PolicyName: CodeCommitPipelineCanaryHelloCanaryShellableRoleDefaultPolicyD466B3CA\n      Roles:\n        - Ref: CodeCommitPipelineCanaryHelloCanaryShellableRole65D634EB\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/CanaryHelloCanary/Shellable/Resource/Role/DefaultPolicy/Resource\n  CodeCommitPipelineCanaryHelloCanaryShellableC8458471:\n    Type: AWS::CodeBuild::Project\n    Properties:\n      Artifacts:\n        Type: NO_ARTIFACTS\n      Cache:\n        Type: NO_CACHE\n      EncryptionKey: alias/aws/s3\n      Environment:\n        ComputeType: BUILD_GENERAL1_MEDIUM\n        EnvironmentVariables:\n          - Name: SCRIPT_S3_BUCKET\n            Type: PLAINTEXT\n            Value: cdk-hnb659fds-assets-712950704752-us-east-1\n          - Name: SCRIPT_S3_KEY\n            Type: PLAINTEXT\n            Value: 3d34b07ba871989d030649c646b3096ba7c78ca531897bcdb0670774d2f9d3e4.zip\n          - Name: IS_CANARY\n            Type: PLAINTEXT\n            Value: \"true\"\n        Image: aws/codebuild/standard:7.0\n        ImagePullCredentialsType: CODEBUILD\n        PrivilegedMode: false\n        Type: LINUX_CONTAINER\n      ServiceRole:\n        Fn::GetAtt:\n          - CodeCommitPipelineCanaryHelloCanaryShellableRole65D634EB\n          - Arn\n      Source:\n        BuildSpec: |-\n          {\n            \"version\": \"0.2\",\n            \"phases\": {\n              \"install\": {\n                \"commands\": [\n                  \"command -v yarn > /dev/null || npm install --global yarn\"\n                ]\n              },\n              \"pre_build\": {\n                \"commands\": [\n                  \"echo \\\"Downloading scripts from s3://${SCRIPT_S3_BUCKET}/${SCRIPT_S3_KEY}\\\"\",\n                  \"aws s3 cp s3://${SCRIPT_S3_BUCKET}/${SCRIPT_S3_KEY} /tmp\",\n                  \"mkdir -p /tmp/scriptdir\",\n                  \"unzip /tmp/$(basename $SCRIPT_S3_KEY) -d /tmp/scriptdir\"\n                ]\n              },\n              \"build\": {\n                \"commands\": [\n                  \"export SCRIPT_DIR=/tmp/scriptdir\",\n                  \"echo \\\"Running test.sh\\\"\",\n                  \"/bin/bash /tmp/scriptdir/test.sh\"\n                ]\n              }\n            }\n          }\n        Type: NO_SOURCE\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/CanaryHelloCanary/Shellable/Resource/Resource\n  CodeCommitPipelineCanaryHelloCanaryShellableEventsRole0F756230:\n    Type: AWS::IAM::Role\n    Properties:\n      AssumeRolePolicyDocument:\n        Statement:\n          - Action: sts:AssumeRole\n            Effect: Allow\n            Principal:\n              Service: events.amazonaws.com\n        Version: \"2012-10-17\"\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/CanaryHelloCanary/Shellable/Resource/EventsRole/Resource\n  CodeCommitPipelineCanaryHelloCanaryShellableEventsRoleDefaultPolicy6CE0D6E4:\n    Type: AWS::IAM::Policy\n    Properties:\n      PolicyDocument:\n        Statement:\n          - Action: codebuild:StartBuild\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeCommitPipelineCanaryHelloCanaryShellableC8458471\n                - Arn\n        Version: \"2012-10-17\"\n      PolicyName: CodeCommitPipelineCanaryHelloCanaryShellableEventsRoleDefaultPolicy6CE0D6E4\n      Roles:\n        - Ref: CodeCommitPipelineCanaryHelloCanaryShellableEventsRole0F756230\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/CanaryHelloCanary/Shellable/Resource/EventsRole/DefaultPolicy/Resource\n  CodeCommitPipelineCanaryHelloCanaryShellableAlarm049B43C4:\n    Type: AWS::CloudWatch::Alarm\n    Properties:\n      ComparisonOperator: GreaterThanOrEqualToThreshold\n      Dimensions:\n        - Name: ProjectName\n          Value:\n            Ref: CodeCommitPipelineCanaryHelloCanaryShellableC8458471\n      EvaluationPeriods: 1\n      MetricName: FailedBuilds\n      Namespace: AWS/CodeBuild\n      Period: 300\n      Statistic: Sum\n      Threshold: 1\n      TreatMissingData: ignore\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/CanaryHelloCanary/Shellable/Alarm/Resource\n  CodeCommitPipelineCanaryHelloCanarySchedule6177762B:\n    Type: AWS::Events::Rule\n    Properties:\n      ScheduleExpression: rate(1 minute)\n      State: ENABLED\n      Targets:\n        - Arn:\n            Fn::GetAtt:\n              - CodeCommitPipelineCanaryHelloCanaryShellableC8458471\n              - Arn\n          Id: Target0\n          RoleArn:\n            Fn::GetAtt:\n              - CodeCommitPipelineCanaryHelloCanaryShellableEventsRole0F756230\n              - Arn\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/CanaryHelloCanary/Schedule/Resource\n  CodeCommitPipelineNpmRole219D5F49:\n    Type: AWS::IAM::Role\n    Properties:\n      AssumeRolePolicyDocument:\n        Statement:\n          - Action: sts:AssumeRole\n            Effect: Allow\n            Principal:\n              Service: codebuild.amazonaws.com\n        Version: \"2012-10-17\"\n      ManagedPolicyArns:\n        - Fn::Join:\n            - \"\"\n            - - \"arn:\"\n              - Ref: AWS::Partition\n              - :iam::aws:policy/AmazonElasticContainerRegistryPublicReadOnly\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/Npm/Default/Resource/Role/Resource\n  CodeCommitPipelineNpmRoleDefaultPolicy1AFB68F0:\n    Type: AWS::IAM::Policy\n    Properties:\n      PolicyDocument:\n        Statement:\n          - Action:\n              - logs:CreateLogGroup\n              - logs:CreateLogStream\n              - logs:PutLogEvents\n            Effect: Allow\n            Resource:\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :logs:us-east-1:712950704752:log-group:/aws/codebuild/\n                    - Ref: CodeCommitPipelineNpm0D31AEFC\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :logs:us-east-1:712950704752:log-group:/aws/codebuild/\n                    - Ref: CodeCommitPipelineNpm0D31AEFC\n                    - :*\n          - Action:\n              - codebuild:CreateReportGroup\n              - codebuild:CreateReport\n              - codebuild:UpdateReport\n              - codebuild:BatchPutTestCases\n              - codebuild:BatchPutCodeCoverages\n            Effect: Allow\n            Resource:\n              Fn::Join:\n                - \"\"\n                - - \"arn:\"\n                  - Ref: AWS::Partition\n                  - :codebuild:us-east-1:712950704752:report-group/\n                  - Ref: CodeCommitPipelineNpm0D31AEFC\n                  - -*\n          - Action:\n              - ssmmessages:CreateControlChannel\n              - ssmmessages:CreateDataChannel\n              - ssmmessages:OpenControlChannel\n              - ssmmessages:OpenDataChannel\n              - logs:DescribeLogGroups\n              - logs:CreateLogStream\n              - logs:PutLogEvents\n              - s3:GetEncryptionConfiguration\n              - s3:PutObject\n            Effect: Allow\n            Resource: \"*\"\n          - Action:\n              - s3:GetObject*\n              - s3:GetBucket*\n              - s3:List*\n            Effect: Allow\n            Resource:\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :s3:::cdk-hnb659fds-assets-712950704752-us-east-1\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :s3:::cdk-hnb659fds-assets-712950704752-us-east-1/*\n          - Action:\n              - secretsmanager:ListSecrets\n              - secretsmanager:DescribeSecret\n              - secretsmanager:GetSecretValue\n            Effect: Allow\n            Resource: arn:aws:secretsmanager:us-east-1:712950704752:secret:delivlib/npm-MhaWgx\n          - Action:\n              - s3:GetObject*\n              - s3:GetBucket*\n              - s3:List*\n            Effect: Allow\n            Resource:\n              - Fn::GetAtt:\n                  - CodeCommitPipelineBuildPipelineArtifactsBucketED2813B3\n                  - Arn\n              - Fn::Join:\n                  - \"\"\n                  - - Fn::GetAtt:\n                        - CodeCommitPipelineBuildPipelineArtifactsBucketED2813B3\n                        - Arn\n                    - /*\n          - Action:\n              - kms:Decrypt\n              - kms:DescribeKey\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeCommitPipelineBuildPipelineArtifactsBucketEncryptionKey05A62A83\n                - Arn\n          - Action:\n              - kms:Decrypt\n              - kms:Encrypt\n              - kms:ReEncrypt*\n              - kms:GenerateDataKey*\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeCommitPipelineBuildPipelineArtifactsBucketEncryptionKey05A62A83\n                - Arn\n        Version: \"2012-10-17\"\n      PolicyName: CodeCommitPipelineNpmRoleDefaultPolicy1AFB68F0\n      Roles:\n        - Ref: CodeCommitPipelineNpmRole219D5F49\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/Npm/Default/Resource/Role/DefaultPolicy/Resource\n  CodeCommitPipelineNpm0D31AEFC:\n    Type: AWS::CodeBuild::Project\n    Properties:\n      Artifacts:\n        Type: NO_ARTIFACTS\n      Cache:\n        Type: NO_CACHE\n      EncryptionKey:\n        Fn::GetAtt:\n          - CodeCommitPipelineBuildPipelineArtifactsBucketEncryptionKey05A62A83\n          - Arn\n      Environment:\n        ComputeType: BUILD_GENERAL1_MEDIUM\n        EnvironmentVariables:\n          - Name: SCRIPT_S3_BUCKET\n            Type: PLAINTEXT\n            Value: cdk-hnb659fds-assets-712950704752-us-east-1\n          - Name: SCRIPT_S3_KEY\n            Type: PLAINTEXT\n            Value: b47ae622aa5e233309182a77632e391df4af339a7313ef79b47c718d0d5e4a9d.zip\n          - Name: FOR_REAL\n            Type: PLAINTEXT\n            Value: \"false\"\n          - Name: NPM_TOKEN_SECRET\n            Type: PLAINTEXT\n            Value: arn:aws:secretsmanager:us-east-1:712950704752:secret:delivlib/npm-MhaWgx\n          - Name: DISTTAG\n            Type: PLAINTEXT\n            Value: \"\"\n          - Name: ACCESS\n            Type: PLAINTEXT\n            Value: restricted\n        Image: aws/codebuild/standard:7.0\n        ImagePullCredentialsType: CODEBUILD\n        PrivilegedMode: false\n        Type: LINUX_CONTAINER\n      ServiceRole:\n        Fn::GetAtt:\n          - CodeCommitPipelineNpmRole219D5F49\n          - Arn\n      Source:\n        BuildSpec: |-\n          {\n            \"version\": \"0.2\",\n            \"phases\": {\n              \"install\": {\n                \"commands\": [\n                  \"command -v yarn > /dev/null || npm install --global yarn\"\n                ]\n              },\n              \"pre_build\": {\n                \"commands\": [\n                  \"echo \\\"Downloading scripts from s3://${SCRIPT_S3_BUCKET}/${SCRIPT_S3_KEY}\\\"\",\n                  \"aws s3 cp s3://${SCRIPT_S3_BUCKET}/${SCRIPT_S3_KEY} /tmp\",\n                  \"mkdir -p /tmp/scriptdir\",\n                  \"unzip /tmp/$(basename $SCRIPT_S3_KEY) -d /tmp/scriptdir\"\n                ]\n              },\n              \"build\": {\n                \"commands\": [\n                  \"export SCRIPT_DIR=/tmp/scriptdir\",\n                  \"echo \\\"Running publish.sh\\\"\",\n                  \"/bin/bash /tmp/scriptdir/publish.sh\"\n                ]\n              }\n            }\n          }\n        Type: NO_SOURCE\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/Npm/Default/Resource/Resource\n  CodeCommitPipelineNpmAlarm7A04F7A3:\n    Type: AWS::CloudWatch::Alarm\n    Properties:\n      ComparisonOperator: GreaterThanOrEqualToThreshold\n      Dimensions:\n        - Name: ProjectName\n          Value:\n            Ref: CodeCommitPipelineNpm0D31AEFC\n      EvaluationPeriods: 1\n      MetricName: FailedBuilds\n      Namespace: AWS/CodeBuild\n      Period: 300\n      Statistic: Sum\n      Threshold: 1\n      TreatMissingData: ignore\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/Npm/Default/Alarm/Resource\n  CodeCommitPipelineNuGetRole488DA302:\n    Type: AWS::IAM::Role\n    Properties:\n      AssumeRolePolicyDocument:\n        Statement:\n          - Action: sts:AssumeRole\n            Effect: Allow\n            Principal:\n              Service: codebuild.amazonaws.com\n        Version: \"2012-10-17\"\n      ManagedPolicyArns:\n        - Fn::Join:\n            - \"\"\n            - - \"arn:\"\n              - Ref: AWS::Partition\n              - :iam::aws:policy/AmazonElasticContainerRegistryPublicReadOnly\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/NuGet/Default/Resource/Role/Resource\n  CodeCommitPipelineNuGetRoleDefaultPolicy9AF66D81:\n    Type: AWS::IAM::Policy\n    Properties:\n      PolicyDocument:\n        Statement:\n          - Action:\n              - logs:CreateLogGroup\n              - logs:CreateLogStream\n              - logs:PutLogEvents\n            Effect: Allow\n            Resource:\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :logs:us-east-1:712950704752:log-group:/aws/codebuild/\n                    - Ref: CodeCommitPipelineNuGet67CE1BA7\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :logs:us-east-1:712950704752:log-group:/aws/codebuild/\n                    - Ref: CodeCommitPipelineNuGet67CE1BA7\n                    - :*\n          - Action:\n              - codebuild:CreateReportGroup\n              - codebuild:CreateReport\n              - codebuild:UpdateReport\n              - codebuild:BatchPutTestCases\n              - codebuild:BatchPutCodeCoverages\n            Effect: Allow\n            Resource:\n              Fn::Join:\n                - \"\"\n                - - \"arn:\"\n                  - Ref: AWS::Partition\n                  - :codebuild:us-east-1:712950704752:report-group/\n                  - Ref: CodeCommitPipelineNuGet67CE1BA7\n                  - -*\n          - Action:\n              - ssmmessages:CreateControlChannel\n              - ssmmessages:CreateDataChannel\n              - ssmmessages:OpenControlChannel\n              - ssmmessages:OpenDataChannel\n              - logs:DescribeLogGroups\n              - logs:CreateLogStream\n              - logs:PutLogEvents\n              - s3:GetEncryptionConfiguration\n              - s3:PutObject\n            Effect: Allow\n            Resource: \"*\"\n          - Action:\n              - s3:GetObject*\n              - s3:GetBucket*\n              - s3:List*\n            Effect: Allow\n            Resource:\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :s3:::cdk-hnb659fds-assets-712950704752-us-east-1\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :s3:::cdk-hnb659fds-assets-712950704752-us-east-1/*\n          - Action:\n              - secretsmanager:ListSecrets\n              - secretsmanager:DescribeSecret\n              - secretsmanager:GetSecretValue\n            Effect: Allow\n            Resource: arn:aws:secretsmanager:us-east-1:712950704752:secret:delivlib/nuget-jDbgrN\n          - Action:\n              - secretsmanager:ListSecrets\n              - secretsmanager:DescribeSecret\n              - secretsmanager:GetSecretValue\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - X509CodeSigningKeyRSAPrivateKeyResourceV2926395A0\n                - SecretArn\n          - Action: ssm:GetParameter\n            Effect: Allow\n            Resource:\n              Fn::Join:\n                - \"\"\n                - - \"arn:\"\n                  - Ref: AWS::Partition\n                  - :ssm:us-east-1:712950704752:parameter\n                  - Ref: X509CodeSigningKey8DE65BF8\n          - Action:\n              - s3:GetObject*\n              - s3:GetBucket*\n              - s3:List*\n            Effect: Allow\n            Resource:\n              - Fn::GetAtt:\n                  - X509CodeSigningKeyRSAPrivateKeyCertificateSigningRequestBucketD81FB261\n                  - Arn\n              - Fn::Join:\n                  - \"\"\n                  - - Fn::GetAtt:\n                        - X509CodeSigningKeyRSAPrivateKeyCertificateSigningRequestBucketD81FB261\n                        - Arn\n                    - /*\n          - Action:\n              - s3:GetObject*\n              - s3:GetBucket*\n              - s3:List*\n            Effect: Allow\n            Resource:\n              - Fn::GetAtt:\n                  - CodeCommitPipelineBuildPipelineArtifactsBucketED2813B3\n                  - Arn\n              - Fn::Join:\n                  - \"\"\n                  - - Fn::GetAtt:\n                        - CodeCommitPipelineBuildPipelineArtifactsBucketED2813B3\n                        - Arn\n                    - /*\n          - Action:\n              - kms:Decrypt\n              - kms:DescribeKey\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeCommitPipelineBuildPipelineArtifactsBucketEncryptionKey05A62A83\n                - Arn\n          - Action:\n              - kms:Decrypt\n              - kms:Encrypt\n              - kms:ReEncrypt*\n              - kms:GenerateDataKey*\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeCommitPipelineBuildPipelineArtifactsBucketEncryptionKey05A62A83\n                - Arn\n        Version: \"2012-10-17\"\n      PolicyName: CodeCommitPipelineNuGetRoleDefaultPolicy9AF66D81\n      Roles:\n        - Ref: CodeCommitPipelineNuGetRole488DA302\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/NuGet/Default/Resource/Role/DefaultPolicy/Resource\n  CodeCommitPipelineNuGet67CE1BA7:\n    Type: AWS::CodeBuild::Project\n    Properties:\n      Artifacts:\n        Type: NO_ARTIFACTS\n      Cache:\n        Type: NO_CACHE\n      EncryptionKey:\n        Fn::GetAtt:\n          - CodeCommitPipelineBuildPipelineArtifactsBucketEncryptionKey05A62A83\n          - Arn\n      Environment:\n        ComputeType: BUILD_GENERAL1_MEDIUM\n        EnvironmentVariables:\n          - Name: SCRIPT_S3_BUCKET\n            Type: PLAINTEXT\n            Value: cdk-hnb659fds-assets-712950704752-us-east-1\n          - Name: SCRIPT_S3_KEY\n            Type: PLAINTEXT\n            Value: 66a63786c570ced320dd48c3922fc8e5fd9c9393e5959b984f3c7e1cb7ac5f14.zip\n          - Name: FOR_REAL\n            Type: PLAINTEXT\n            Value: \"false\"\n          - Name: NUGET_SECRET_REGION\n            Type: PLAINTEXT\n            Value: us-east-1\n          - Name: NUGET_SECRET_ID\n            Type: PLAINTEXT\n            Value: arn:aws:secretsmanager:us-east-1:712950704752:secret:delivlib/nuget-jDbgrN\n        Image: public.ecr.aws/jsii/superchain:1-bullseye-slim-node18\n        ImagePullCredentialsType: SERVICE_ROLE\n        PrivilegedMode: false\n        Type: LINUX_CONTAINER\n      ServiceRole:\n        Fn::GetAtt:\n          - CodeCommitPipelineNuGetRole488DA302\n          - Arn\n      Source:\n        BuildSpec: |-\n          {\n            \"version\": \"0.2\",\n            \"phases\": {\n              \"install\": {\n                \"commands\": [\n                  \"command -v yarn > /dev/null || npm install --global yarn\"\n                ]\n              },\n              \"pre_build\": {\n                \"commands\": [\n                  \"echo \\\"Downloading scripts from s3://${SCRIPT_S3_BUCKET}/${SCRIPT_S3_KEY}\\\"\",\n                  \"aws s3 cp s3://${SCRIPT_S3_BUCKET}/${SCRIPT_S3_KEY} /tmp\",\n                  \"mkdir -p /tmp/scriptdir\",\n                  \"unzip /tmp/$(basename $SCRIPT_S3_KEY) -d /tmp/scriptdir\"\n                ]\n              },\n              \"build\": {\n                \"commands\": [\n                  \"export SCRIPT_DIR=/tmp/scriptdir\",\n                  \"echo \\\"Running publish.sh\\\"\",\n                  \"/bin/bash /tmp/scriptdir/publish.sh\"\n                ]\n              }\n            }\n          }\n        Type: NO_SOURCE\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/NuGet/Default/Resource/Resource\n  CodeCommitPipelineNuGetAlarm4F3CAC42:\n    Type: AWS::CloudWatch::Alarm\n    Properties:\n      ComparisonOperator: GreaterThanOrEqualToThreshold\n      Dimensions:\n        - Name: ProjectName\n          Value:\n            Ref: CodeCommitPipelineNuGet67CE1BA7\n      EvaluationPeriods: 1\n      MetricName: FailedBuilds\n      Namespace: AWS/CodeBuild\n      Period: 300\n      Statistic: Sum\n      Threshold: 1\n      TreatMissingData: ignore\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/NuGet/Default/Alarm/Resource\n  CodeCommitPipelineMavenRoleC3A7769B:\n    Type: AWS::IAM::Role\n    Properties:\n      AssumeRolePolicyDocument:\n        Statement:\n          - Action: sts:AssumeRole\n            Effect: Allow\n            Principal:\n              Service: codebuild.amazonaws.com\n        Version: \"2012-10-17\"\n      ManagedPolicyArns:\n        - Fn::Join:\n            - \"\"\n            - - \"arn:\"\n              - Ref: AWS::Partition\n              - :iam::aws:policy/AmazonElasticContainerRegistryPublicReadOnly\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/Maven/Default/Resource/Role/Resource\n  CodeCommitPipelineMavenRoleDefaultPolicyBCD15357:\n    Type: AWS::IAM::Policy\n    Properties:\n      PolicyDocument:\n        Statement:\n          - Action:\n              - logs:CreateLogGroup\n              - logs:CreateLogStream\n              - logs:PutLogEvents\n            Effect: Allow\n            Resource:\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :logs:us-east-1:712950704752:log-group:/aws/codebuild/\n                    - Ref: CodeCommitPipelineMavenB7154296\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :logs:us-east-1:712950704752:log-group:/aws/codebuild/\n                    - Ref: CodeCommitPipelineMavenB7154296\n                    - :*\n          - Action:\n              - codebuild:CreateReportGroup\n              - codebuild:CreateReport\n              - codebuild:UpdateReport\n              - codebuild:BatchPutTestCases\n              - codebuild:BatchPutCodeCoverages\n            Effect: Allow\n            Resource:\n              Fn::Join:\n                - \"\"\n                - - \"arn:\"\n                  - Ref: AWS::Partition\n                  - :codebuild:us-east-1:712950704752:report-group/\n                  - Ref: CodeCommitPipelineMavenB7154296\n                  - -*\n          - Action:\n              - ssmmessages:CreateControlChannel\n              - ssmmessages:CreateDataChannel\n              - ssmmessages:OpenControlChannel\n              - ssmmessages:OpenDataChannel\n              - logs:DescribeLogGroups\n              - logs:CreateLogStream\n              - logs:PutLogEvents\n              - s3:GetEncryptionConfiguration\n              - s3:PutObject\n            Effect: Allow\n            Resource: \"*\"\n          - Action:\n              - s3:GetObject*\n              - s3:GetBucket*\n              - s3:List*\n            Effect: Allow\n            Resource:\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :s3:::cdk-hnb659fds-assets-712950704752-us-east-1\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :s3:::cdk-hnb659fds-assets-712950704752-us-east-1/*\n          - Action:\n              - secretsmanager:ListSecrets\n              - secretsmanager:DescribeSecret\n              - secretsmanager:GetSecretValue\n            Effect: Allow\n            Resource: arn:aws:secretsmanager:us-east-1:712950704752:secret:delivlib/maven-S4Q2y3\n          - Action:\n              - secretsmanager:ListSecrets\n              - secretsmanager:DescribeSecret\n              - secretsmanager:GetSecretValue\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeSignResourceV25D0B3375\n                - SecretArn\n          - Action: kms:Decrypt\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeSignCMKC986BB89\n                - Arn\n          - Action:\n              - s3:GetObject*\n              - s3:GetBucket*\n              - s3:List*\n            Effect: Allow\n            Resource:\n              - Fn::GetAtt:\n                  - CodeCommitPipelineBuildPipelineArtifactsBucketED2813B3\n                  - Arn\n              - Fn::Join:\n                  - \"\"\n                  - - Fn::GetAtt:\n                        - CodeCommitPipelineBuildPipelineArtifactsBucketED2813B3\n                        - Arn\n                    - /*\n          - Action:\n              - kms:Decrypt\n              - kms:DescribeKey\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeCommitPipelineBuildPipelineArtifactsBucketEncryptionKey05A62A83\n                - Arn\n          - Action:\n              - kms:Decrypt\n              - kms:Encrypt\n              - kms:ReEncrypt*\n              - kms:GenerateDataKey*\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeCommitPipelineBuildPipelineArtifactsBucketEncryptionKey05A62A83\n                - Arn\n        Version: \"2012-10-17\"\n      PolicyName: CodeCommitPipelineMavenRoleDefaultPolicyBCD15357\n      Roles:\n        - Ref: CodeCommitPipelineMavenRoleC3A7769B\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/Maven/Default/Resource/Role/DefaultPolicy/Resource\n  CodeCommitPipelineMavenB7154296:\n    Type: AWS::CodeBuild::Project\n    Properties:\n      Artifacts:\n        Type: NO_ARTIFACTS\n      Cache:\n        Type: NO_CACHE\n      EncryptionKey:\n        Fn::GetAtt:\n          - CodeCommitPipelineBuildPipelineArtifactsBucketEncryptionKey05A62A83\n          - Arn\n      Environment:\n        ComputeType: BUILD_GENERAL1_MEDIUM\n        EnvironmentVariables:\n          - Name: SCRIPT_S3_BUCKET\n            Type: PLAINTEXT\n            Value: cdk-hnb659fds-assets-712950704752-us-east-1\n          - Name: SCRIPT_S3_KEY\n            Type: PLAINTEXT\n            Value: 23a043aa729a8dcc52950a3e3423edef094084d3f73bc8ed477d3736993de939.zip\n          - Name: STAGING_PROFILE_ID\n            Type: PLAINTEXT\n            Value: 68a05363083174\n          - Name: SIGNING_KEY_ARN\n            Type: PLAINTEXT\n            Value:\n              Fn::GetAtt:\n                - CodeSignResourceV25D0B3375\n                - SecretArn\n          - Name: FOR_REAL\n            Type: PLAINTEXT\n            Value: \"false\"\n          - Name: MAVEN_LOGIN_SECRET\n            Type: PLAINTEXT\n            Value: arn:aws:secretsmanager:us-east-1:712950704752:secret:delivlib/maven-S4Q2y3\n          - Name: MAVEN_ENDPOINT\n            Type: PLAINTEXT\n            Value: https://aws.oss.sonatype.org:443/\n        Image: public.ecr.aws/jsii/superchain:1-bullseye-slim-node18\n        ImagePullCredentialsType: SERVICE_ROLE\n        PrivilegedMode: false\n        Type: LINUX_CONTAINER\n      ServiceRole:\n        Fn::GetAtt:\n          - CodeCommitPipelineMavenRoleC3A7769B\n          - Arn\n      Source:\n        BuildSpec: |-\n          {\n            \"version\": \"0.2\",\n            \"phases\": {\n              \"install\": {\n                \"commands\": [\n                  \"command -v yarn > /dev/null || npm install --global yarn\"\n                ]\n              },\n              \"pre_build\": {\n                \"commands\": [\n                  \"echo \\\"Downloading scripts from s3://${SCRIPT_S3_BUCKET}/${SCRIPT_S3_KEY}\\\"\",\n                  \"aws s3 cp s3://${SCRIPT_S3_BUCKET}/${SCRIPT_S3_KEY} /tmp\",\n                  \"mkdir -p /tmp/scriptdir\",\n                  \"unzip /tmp/$(basename $SCRIPT_S3_KEY) -d /tmp/scriptdir\"\n                ]\n              },\n              \"build\": {\n                \"commands\": [\n                  \"export SCRIPT_DIR=/tmp/scriptdir\",\n                  \"echo \\\"Running publish.sh\\\"\",\n                  \"/bin/bash /tmp/scriptdir/publish.sh\"\n                ]\n              }\n            }\n          }\n        Type: NO_SOURCE\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/Maven/Default/Resource/Resource\n  CodeCommitPipelineMavenAlarmC4A88DC3:\n    Type: AWS::CloudWatch::Alarm\n    Properties:\n      ComparisonOperator: GreaterThanOrEqualToThreshold\n      Dimensions:\n        - Name: ProjectName\n          Value:\n            Ref: CodeCommitPipelineMavenB7154296\n      EvaluationPeriods: 1\n      MetricName: FailedBuilds\n      Namespace: AWS/CodeBuild\n      Period: 300\n      Statistic: Sum\n      Threshold: 1\n      TreatMissingData: ignore\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/Maven/Default/Alarm/Resource\n  CodeCommitPipelineGitHubRole77F2217D:\n    Type: AWS::IAM::Role\n    Properties:\n      AssumeRolePolicyDocument:\n        Statement:\n          - Action: sts:AssumeRole\n            Effect: Allow\n            Principal:\n              Service: codebuild.amazonaws.com\n        Version: \"2012-10-17\"\n      ManagedPolicyArns:\n        - Fn::Join:\n            - \"\"\n            - - \"arn:\"\n              - Ref: AWS::Partition\n              - :iam::aws:policy/AmazonElasticContainerRegistryPublicReadOnly\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/GitHub/Default/Resource/Role/Resource\n  CodeCommitPipelineGitHubRoleDefaultPolicy3FEA7E07:\n    Type: AWS::IAM::Policy\n    Properties:\n      PolicyDocument:\n        Statement:\n          - Action: secretsmanager:GetSecretValue\n            Effect: Allow\n            Resource:\n              Fn::Join:\n                - \"\"\n                - - \"arn:\"\n                  - Ref: AWS::Partition\n                  - :secretsmanager:us-east-1:712950704752:secret:github-token-??????\n          - Action:\n              - logs:CreateLogGroup\n              - logs:CreateLogStream\n              - logs:PutLogEvents\n            Effect: Allow\n            Resource:\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :logs:us-east-1:712950704752:log-group:/aws/codebuild/\n                    - Ref: CodeCommitPipelineGitHub0797840C\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :logs:us-east-1:712950704752:log-group:/aws/codebuild/\n                    - Ref: CodeCommitPipelineGitHub0797840C\n                    - :*\n          - Action:\n              - codebuild:CreateReportGroup\n              - codebuild:CreateReport\n              - codebuild:UpdateReport\n              - codebuild:BatchPutTestCases\n              - codebuild:BatchPutCodeCoverages\n            Effect: Allow\n            Resource:\n              Fn::Join:\n                - \"\"\n                - - \"arn:\"\n                  - Ref: AWS::Partition\n                  - :codebuild:us-east-1:712950704752:report-group/\n                  - Ref: CodeCommitPipelineGitHub0797840C\n                  - -*\n          - Action:\n              - ssmmessages:CreateControlChannel\n              - ssmmessages:CreateDataChannel\n              - ssmmessages:OpenControlChannel\n              - ssmmessages:OpenDataChannel\n              - logs:DescribeLogGroups\n              - logs:CreateLogStream\n              - logs:PutLogEvents\n              - s3:GetEncryptionConfiguration\n              - s3:PutObject\n            Effect: Allow\n            Resource: \"*\"\n          - Action:\n              - s3:GetObject*\n              - s3:GetBucket*\n              - s3:List*\n            Effect: Allow\n            Resource:\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :s3:::cdk-hnb659fds-assets-712950704752-us-east-1\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :s3:::cdk-hnb659fds-assets-712950704752-us-east-1/*\n          - Action:\n              - secretsmanager:GetSecretValue\n              - secretsmanager:DescribeSecret\n            Effect: Allow\n            Resource: arn:aws:secretsmanager:us-east-1:712950704752:secret:github-token-QDP6QX\n          - Action:\n              - secretsmanager:ListSecrets\n              - secretsmanager:DescribeSecret\n              - secretsmanager:GetSecretValue\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeSignResourceV25D0B3375\n                - SecretArn\n          - Action: kms:Decrypt\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeSignCMKC986BB89\n                - Arn\n          - Action:\n              - s3:GetObject*\n              - s3:GetBucket*\n              - s3:List*\n            Effect: Allow\n            Resource:\n              - Fn::GetAtt:\n                  - CodeCommitPipelineBuildPipelineArtifactsBucketED2813B3\n                  - Arn\n              - Fn::Join:\n                  - \"\"\n                  - - Fn::GetAtt:\n                        - CodeCommitPipelineBuildPipelineArtifactsBucketED2813B3\n                        - Arn\n                    - /*\n          - Action:\n              - kms:Decrypt\n              - kms:DescribeKey\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeCommitPipelineBuildPipelineArtifactsBucketEncryptionKey05A62A83\n                - Arn\n          - Action:\n              - kms:Decrypt\n              - kms:Encrypt\n              - kms:ReEncrypt*\n              - kms:GenerateDataKey*\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeCommitPipelineBuildPipelineArtifactsBucketEncryptionKey05A62A83\n                - Arn\n        Version: \"2012-10-17\"\n      PolicyName: CodeCommitPipelineGitHubRoleDefaultPolicy3FEA7E07\n      Roles:\n        - Ref: CodeCommitPipelineGitHubRole77F2217D\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/GitHub/Default/Resource/Role/DefaultPolicy/Resource\n  CodeCommitPipelineGitHub0797840C:\n    Type: AWS::CodeBuild::Project\n    Properties:\n      Artifacts:\n        Type: NO_ARTIFACTS\n      Cache:\n        Type: NO_CACHE\n      EncryptionKey:\n        Fn::GetAtt:\n          - CodeCommitPipelineBuildPipelineArtifactsBucketEncryptionKey05A62A83\n          - Arn\n      Environment:\n        ComputeType: BUILD_GENERAL1_MEDIUM\n        EnvironmentVariables:\n          - Name: SCRIPT_S3_BUCKET\n            Type: PLAINTEXT\n            Value: cdk-hnb659fds-assets-712950704752-us-east-1\n          - Name: SCRIPT_S3_KEY\n            Type: PLAINTEXT\n            Value: adda208bc85327ef49bd7171b20043c17eaf01a22e74d84e48e2f7947af380e6.zip\n          - Name: BUILD_MANIFEST\n            Type: PLAINTEXT\n            Value: ./build.json\n          - Name: CHANGELOG\n            Type: PLAINTEXT\n            Value: ./CHANGELOG.md\n          - Name: RELEASE_NOTES\n            Type: PLAINTEXT\n            Value: ./RELEASE_NOTES.md\n          - Name: SIGNING_KEY_ARN\n            Type: PLAINTEXT\n            Value:\n              Fn::GetAtt:\n                - CodeSignResourceV25D0B3375\n                - SecretArn\n          - Name: GITHUB_OWNER\n            Type: PLAINTEXT\n            Value: awslabs\n          - Name: GITHUB_REPO\n            Type: PLAINTEXT\n            Value: aws-delivlib-sample\n          - Name: FOR_REAL\n            Type: PLAINTEXT\n            Value: \"false\"\n          - Name: SECONDARY_SOURCE_NAMES\n            Type: PLAINTEXT\n            Value: Artifact_c8e859296b521c19119769864a1f8ff14746ebd0c1 artifact2\n          - Name: SIGN_ADDITIONAL_ARTIFACTS\n            Type: PLAINTEXT\n            Value: \"true\"\n          - Name: GITHUB_TOKEN\n            Type: SECRETS_MANAGER\n            Value: github-token\n        Image: aws/codebuild/standard:7.0\n        ImagePullCredentialsType: CODEBUILD\n        PrivilegedMode: false\n        Type: LINUX_CONTAINER\n      ServiceRole:\n        Fn::GetAtt:\n          - CodeCommitPipelineGitHubRole77F2217D\n          - Arn\n      Source:\n        BuildSpec: |-\n          {\n            \"version\": \"0.2\",\n            \"phases\": {\n              \"install\": {\n                \"commands\": [\n                  \"command -v yarn > /dev/null || npm install --global yarn\"\n                ]\n              },\n              \"pre_build\": {\n                \"commands\": [\n                  \"echo \\\"Downloading scripts from s3://${SCRIPT_S3_BUCKET}/${SCRIPT_S3_KEY}\\\"\",\n                  \"aws s3 cp s3://${SCRIPT_S3_BUCKET}/${SCRIPT_S3_KEY} /tmp\",\n                  \"mkdir -p /tmp/scriptdir\",\n                  \"unzip /tmp/$(basename $SCRIPT_S3_KEY) -d /tmp/scriptdir\"\n                ]\n              },\n              \"build\": {\n                \"commands\": [\n                  \"export SCRIPT_DIR=/tmp/scriptdir\",\n                  \"echo \\\"Running publish.sh\\\"\",\n                  \"/bin/bash /tmp/scriptdir/publish.sh\"\n                ]\n              }\n            }\n          }\n        Type: NO_SOURCE\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/GitHub/Default/Resource/Resource\n  CodeCommitPipelineGitHubAlarmBD31FE64:\n    Type: AWS::CloudWatch::Alarm\n    Properties:\n      ComparisonOperator: GreaterThanOrEqualToThreshold\n      Dimensions:\n        - Name: ProjectName\n          Value:\n            Ref: CodeCommitPipelineGitHub0797840C\n      EvaluationPeriods: 1\n      MetricName: FailedBuilds\n      Namespace: AWS/CodeBuild\n      Period: 300\n      Statistic: Sum\n      Threshold: 1\n      TreatMissingData: ignore\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/GitHub/Default/Alarm/Resource\n  CodeCommitPipelineGitHubPagesRole10784D1D:\n    Type: AWS::IAM::Role\n    Properties:\n      AssumeRolePolicyDocument:\n        Statement:\n          - Action: sts:AssumeRole\n            Effect: Allow\n            Principal:\n              Service: codebuild.amazonaws.com\n        Version: \"2012-10-17\"\n      ManagedPolicyArns:\n        - Fn::Join:\n            - \"\"\n            - - \"arn:\"\n              - Ref: AWS::Partition\n              - :iam::aws:policy/AmazonElasticContainerRegistryPublicReadOnly\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/GitHubPages/Default/Resource/Role/Resource\n  CodeCommitPipelineGitHubPagesRoleDefaultPolicy23292E7F:\n    Type: AWS::IAM::Policy\n    Properties:\n      PolicyDocument:\n        Statement:\n          - Action:\n              - logs:CreateLogGroup\n              - logs:CreateLogStream\n              - logs:PutLogEvents\n            Effect: Allow\n            Resource:\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :logs:us-east-1:712950704752:log-group:/aws/codebuild/\n                    - Ref: CodeCommitPipelineGitHubPages53B77CF6\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :logs:us-east-1:712950704752:log-group:/aws/codebuild/\n                    - Ref: CodeCommitPipelineGitHubPages53B77CF6\n                    - :*\n          - Action:\n              - codebuild:CreateReportGroup\n              - codebuild:CreateReport\n              - codebuild:UpdateReport\n              - codebuild:BatchPutTestCases\n              - codebuild:BatchPutCodeCoverages\n            Effect: Allow\n            Resource:\n              Fn::Join:\n                - \"\"\n                - - \"arn:\"\n                  - Ref: AWS::Partition\n                  - :codebuild:us-east-1:712950704752:report-group/\n                  - Ref: CodeCommitPipelineGitHubPages53B77CF6\n                  - -*\n          - Action:\n              - ssmmessages:CreateControlChannel\n              - ssmmessages:CreateDataChannel\n              - ssmmessages:OpenControlChannel\n              - ssmmessages:OpenDataChannel\n              - logs:DescribeLogGroups\n              - logs:CreateLogStream\n              - logs:PutLogEvents\n              - s3:GetEncryptionConfiguration\n              - s3:PutObject\n            Effect: Allow\n            Resource: \"*\"\n          - Action:\n              - s3:GetObject*\n              - s3:GetBucket*\n              - s3:List*\n            Effect: Allow\n            Resource:\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :s3:::cdk-hnb659fds-assets-712950704752-us-east-1\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :s3:::cdk-hnb659fds-assets-712950704752-us-east-1/*\n          - Action:\n              - secretsmanager:ListSecrets\n              - secretsmanager:DescribeSecret\n              - secretsmanager:GetSecretValue\n            Effect: Allow\n            Resource: arn:aws:secretsmanager:us-east-1:712950704752:secret:delivlib/github-ssh-okGazo\n          - Action:\n              - s3:GetObject*\n              - s3:GetBucket*\n              - s3:List*\n            Effect: Allow\n            Resource:\n              - Fn::GetAtt:\n                  - CodeCommitPipelineBuildPipelineArtifactsBucketED2813B3\n                  - Arn\n              - Fn::Join:\n                  - \"\"\n                  - - Fn::GetAtt:\n                        - CodeCommitPipelineBuildPipelineArtifactsBucketED2813B3\n                        - Arn\n                    - /*\n          - Action:\n              - kms:Decrypt\n              - kms:DescribeKey\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeCommitPipelineBuildPipelineArtifactsBucketEncryptionKey05A62A83\n                - Arn\n          - Action:\n              - kms:Decrypt\n              - kms:Encrypt\n              - kms:ReEncrypt*\n              - kms:GenerateDataKey*\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeCommitPipelineBuildPipelineArtifactsBucketEncryptionKey05A62A83\n                - Arn\n        Version: \"2012-10-17\"\n      PolicyName: CodeCommitPipelineGitHubPagesRoleDefaultPolicy23292E7F\n      Roles:\n        - Ref: CodeCommitPipelineGitHubPagesRole10784D1D\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/GitHubPages/Default/Resource/Role/DefaultPolicy/Resource\n  CodeCommitPipelineGitHubPages53B77CF6:\n    Type: AWS::CodeBuild::Project\n    Properties:\n      Artifacts:\n        Type: NO_ARTIFACTS\n      Cache:\n        Type: NO_CACHE\n      EncryptionKey:\n        Fn::GetAtt:\n          - CodeCommitPipelineBuildPipelineArtifactsBucketEncryptionKey05A62A83\n          - Arn\n      Environment:\n        ComputeType: BUILD_GENERAL1_MEDIUM\n        EnvironmentVariables:\n          - Name: SCRIPT_S3_BUCKET\n            Type: PLAINTEXT\n            Value: cdk-hnb659fds-assets-712950704752-us-east-1\n          - Name: SCRIPT_S3_KEY\n            Type: PLAINTEXT\n            Value: 3252e1539f1e33e68b94d8ee2a2a84ff6a7fdf4fbbdb7b77286f931145dfe3b3.zip\n          - Name: GITHUB_REPO\n            Type: PLAINTEXT\n            Value: git@github.com:awslabs/aws-delivlib-sample.git\n          - Name: GITHUB_PAGES_BRANCH\n            Type: PLAINTEXT\n            Value: gh-pages\n          - Name: SSH_KEY_SECRET\n            Type: PLAINTEXT\n            Value: arn:aws:secretsmanager:us-east-1:712950704752:secret:delivlib/github-ssh-okGazo\n          - Name: FOR_REAL\n            Type: PLAINTEXT\n            Value: \"false\"\n          - Name: COMMIT_USERNAME\n            Type: PLAINTEXT\n            Value: foobar\n          - Name: COMMIT_EMAIL\n            Type: PLAINTEXT\n            Value: foo@bar.com\n          - Name: BUILD_MANIFEST\n            Type: PLAINTEXT\n            Value: ./build.json\n        Image: aws/codebuild/standard:7.0\n        ImagePullCredentialsType: CODEBUILD\n        PrivilegedMode: false\n        Type: LINUX_CONTAINER\n      ServiceRole:\n        Fn::GetAtt:\n          - CodeCommitPipelineGitHubPagesRole10784D1D\n          - Arn\n      Source:\n        BuildSpec: |-\n          {\n            \"version\": \"0.2\",\n            \"phases\": {\n              \"install\": {\n                \"commands\": [\n                  \"command -v yarn > /dev/null || npm install --global yarn\"\n                ]\n              },\n              \"pre_build\": {\n                \"commands\": [\n                  \"echo \\\"Downloading scripts from s3://${SCRIPT_S3_BUCKET}/${SCRIPT_S3_KEY}\\\"\",\n                  \"aws s3 cp s3://${SCRIPT_S3_BUCKET}/${SCRIPT_S3_KEY} /tmp\",\n                  \"mkdir -p /tmp/scriptdir\",\n                  \"unzip /tmp/$(basename $SCRIPT_S3_KEY) -d /tmp/scriptdir\"\n                ]\n              },\n              \"build\": {\n                \"commands\": [\n                  \"export SCRIPT_DIR=/tmp/scriptdir\",\n                  \"echo \\\"Running publish.sh\\\"\",\n                  \"/bin/bash /tmp/scriptdir/publish.sh\"\n                ]\n              }\n            }\n          }\n        Type: NO_SOURCE\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/GitHubPages/Default/Resource/Resource\n  CodeCommitPipelineGitHubPagesAlarmC5B4BC57:\n    Type: AWS::CloudWatch::Alarm\n    Properties:\n      ComparisonOperator: GreaterThanOrEqualToThreshold\n      Dimensions:\n        - Name: ProjectName\n          Value:\n            Ref: CodeCommitPipelineGitHubPages53B77CF6\n      EvaluationPeriods: 1\n      MetricName: FailedBuilds\n      Namespace: AWS/CodeBuild\n      Period: 300\n      Statistic: Sum\n      Threshold: 1\n      TreatMissingData: ignore\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/GitHubPages/Default/Alarm/Resource\n  CodeCommitPipelinePyPIRole30E20A9B:\n    Type: AWS::IAM::Role\n    Properties:\n      AssumeRolePolicyDocument:\n        Statement:\n          - Action: sts:AssumeRole\n            Effect: Allow\n            Principal:\n              Service: codebuild.amazonaws.com\n        Version: \"2012-10-17\"\n      ManagedPolicyArns:\n        - Fn::Join:\n            - \"\"\n            - - \"arn:\"\n              - Ref: AWS::Partition\n              - :iam::aws:policy/AmazonElasticContainerRegistryPublicReadOnly\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/PyPI/Default/Resource/Role/Resource\n  CodeCommitPipelinePyPIRoleDefaultPolicy5062B3BA:\n    Type: AWS::IAM::Policy\n    Properties:\n      PolicyDocument:\n        Statement:\n          - Action:\n              - logs:CreateLogGroup\n              - logs:CreateLogStream\n              - logs:PutLogEvents\n            Effect: Allow\n            Resource:\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :logs:us-east-1:712950704752:log-group:/aws/codebuild/\n                    - Ref: CodeCommitPipelinePyPI2C59CE7B\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :logs:us-east-1:712950704752:log-group:/aws/codebuild/\n                    - Ref: CodeCommitPipelinePyPI2C59CE7B\n                    - :*\n          - Action:\n              - codebuild:CreateReportGroup\n              - codebuild:CreateReport\n              - codebuild:UpdateReport\n              - codebuild:BatchPutTestCases\n              - codebuild:BatchPutCodeCoverages\n            Effect: Allow\n            Resource:\n              Fn::Join:\n                - \"\"\n                - - \"arn:\"\n                  - Ref: AWS::Partition\n                  - :codebuild:us-east-1:712950704752:report-group/\n                  - Ref: CodeCommitPipelinePyPI2C59CE7B\n                  - -*\n          - Action:\n              - ssmmessages:CreateControlChannel\n              - ssmmessages:CreateDataChannel\n              - ssmmessages:OpenControlChannel\n              - ssmmessages:OpenDataChannel\n              - logs:DescribeLogGroups\n              - logs:CreateLogStream\n              - logs:PutLogEvents\n              - s3:GetEncryptionConfiguration\n              - s3:PutObject\n            Effect: Allow\n            Resource: \"*\"\n          - Action:\n              - s3:GetObject*\n              - s3:GetBucket*\n              - s3:List*\n            Effect: Allow\n            Resource:\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :s3:::cdk-hnb659fds-assets-712950704752-us-east-1\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :s3:::cdk-hnb659fds-assets-712950704752-us-east-1/*\n          - Action:\n              - secretsmanager:ListSecrets\n              - secretsmanager:DescribeSecret\n              - secretsmanager:GetSecretValue\n            Effect: Allow\n            Resource: arn:aws:secretsmanager:us-east-1:712950704752:secret:delivlib/pypi-tp8M57\n          - Action:\n              - s3:GetObject*\n              - s3:GetBucket*\n              - s3:List*\n            Effect: Allow\n            Resource:\n              - Fn::GetAtt:\n                  - CodeCommitPipelineBuildPipelineArtifactsBucketED2813B3\n                  - Arn\n              - Fn::Join:\n                  - \"\"\n                  - - Fn::GetAtt:\n                        - CodeCommitPipelineBuildPipelineArtifactsBucketED2813B3\n                        - Arn\n                    - /*\n          - Action:\n              - kms:Decrypt\n              - kms:DescribeKey\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeCommitPipelineBuildPipelineArtifactsBucketEncryptionKey05A62A83\n                - Arn\n          - Action:\n              - kms:Decrypt\n              - kms:Encrypt\n              - kms:ReEncrypt*\n              - kms:GenerateDataKey*\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeCommitPipelineBuildPipelineArtifactsBucketEncryptionKey05A62A83\n                - Arn\n        Version: \"2012-10-17\"\n      PolicyName: CodeCommitPipelinePyPIRoleDefaultPolicy5062B3BA\n      Roles:\n        - Ref: CodeCommitPipelinePyPIRole30E20A9B\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/PyPI/Default/Resource/Role/DefaultPolicy/Resource\n  CodeCommitPipelinePyPI2C59CE7B:\n    Type: AWS::CodeBuild::Project\n    Properties:\n      Artifacts:\n        Type: NO_ARTIFACTS\n      Cache:\n        Type: NO_CACHE\n      EncryptionKey:\n        Fn::GetAtt:\n          - CodeCommitPipelineBuildPipelineArtifactsBucketEncryptionKey05A62A83\n          - Arn\n      Environment:\n        ComputeType: BUILD_GENERAL1_MEDIUM\n        EnvironmentVariables:\n          - Name: SCRIPT_S3_BUCKET\n            Type: PLAINTEXT\n            Value: cdk-hnb659fds-assets-712950704752-us-east-1\n          - Name: SCRIPT_S3_KEY\n            Type: PLAINTEXT\n            Value: c17f8f9d719e9e4e72c47092e9d9a130a19c607b87d5f5a327b05f38c219c1ca.zip\n          - Name: FOR_REAL\n            Type: PLAINTEXT\n            Value: \"false\"\n          - Name: PYPI_CREDENTIALS_SECRET_ID\n            Type: PLAINTEXT\n            Value: arn:aws:secretsmanager:us-east-1:712950704752:secret:delivlib/pypi-tp8M57\n        Image: aws/codebuild/standard:7.0\n        ImagePullCredentialsType: CODEBUILD\n        PrivilegedMode: false\n        Type: LINUX_CONTAINER\n      ServiceRole:\n        Fn::GetAtt:\n          - CodeCommitPipelinePyPIRole30E20A9B\n          - Arn\n      Source:\n        BuildSpec: |-\n          {\n            \"version\": \"0.2\",\n            \"phases\": {\n              \"install\": {\n                \"commands\": [\n                  \"command -v yarn > /dev/null || npm install --global yarn\"\n                ]\n              },\n              \"pre_build\": {\n                \"commands\": [\n                  \"echo \\\"Downloading scripts from s3://${SCRIPT_S3_BUCKET}/${SCRIPT_S3_KEY}\\\"\",\n                  \"aws s3 cp s3://${SCRIPT_S3_BUCKET}/${SCRIPT_S3_KEY} /tmp\",\n                  \"mkdir -p /tmp/scriptdir\",\n                  \"unzip /tmp/$(basename $SCRIPT_S3_KEY) -d /tmp/scriptdir\"\n                ]\n              },\n              \"build\": {\n                \"commands\": [\n                  \"export SCRIPT_DIR=/tmp/scriptdir\",\n                  \"echo \\\"Running publish.sh\\\"\",\n                  \"/bin/bash /tmp/scriptdir/publish.sh\"\n                ]\n              }\n            }\n          }\n        Type: NO_SOURCE\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/PyPI/Default/Resource/Resource\n  CodeCommitPipelinePyPIAlarmEA15EF14:\n    Type: AWS::CloudWatch::Alarm\n    Properties:\n      ComparisonOperator: GreaterThanOrEqualToThreshold\n      Dimensions:\n        - Name: ProjectName\n          Value:\n            Ref: CodeCommitPipelinePyPI2C59CE7B\n      EvaluationPeriods: 1\n      MetricName: FailedBuilds\n      Namespace: AWS/CodeBuild\n      Period: 300\n      Statistic: Sum\n      Threshold: 1\n      TreatMissingData: ignore\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/PyPI/Default/Alarm/Resource\n  CodeCommitPipelineGolangRole46DA8D4C:\n    Type: AWS::IAM::Role\n    Properties:\n      AssumeRolePolicyDocument:\n        Statement:\n          - Action: sts:AssumeRole\n            Effect: Allow\n            Principal:\n              Service: codebuild.amazonaws.com\n        Version: \"2012-10-17\"\n      ManagedPolicyArns:\n        - Fn::Join:\n            - \"\"\n            - - \"arn:\"\n              - Ref: AWS::Partition\n              - :iam::aws:policy/AmazonElasticContainerRegistryPublicReadOnly\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/Golang/Default/Resource/Role/Resource\n  CodeCommitPipelineGolangRoleDefaultPolicy189AF9A0:\n    Type: AWS::IAM::Policy\n    Properties:\n      PolicyDocument:\n        Statement:\n          - Action:\n              - logs:CreateLogGroup\n              - logs:CreateLogStream\n              - logs:PutLogEvents\n            Effect: Allow\n            Resource:\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :logs:us-east-1:712950704752:log-group:/aws/codebuild/\n                    - Ref: CodeCommitPipelineGolangBDFA17A1\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :logs:us-east-1:712950704752:log-group:/aws/codebuild/\n                    - Ref: CodeCommitPipelineGolangBDFA17A1\n                    - :*\n          - Action:\n              - codebuild:CreateReportGroup\n              - codebuild:CreateReport\n              - codebuild:UpdateReport\n              - codebuild:BatchPutTestCases\n              - codebuild:BatchPutCodeCoverages\n            Effect: Allow\n            Resource:\n              Fn::Join:\n                - \"\"\n                - - \"arn:\"\n                  - Ref: AWS::Partition\n                  - :codebuild:us-east-1:712950704752:report-group/\n                  - Ref: CodeCommitPipelineGolangBDFA17A1\n                  - -*\n          - Action:\n              - ssmmessages:CreateControlChannel\n              - ssmmessages:CreateDataChannel\n              - ssmmessages:OpenControlChannel\n              - ssmmessages:OpenDataChannel\n              - logs:DescribeLogGroups\n              - logs:CreateLogStream\n              - logs:PutLogEvents\n              - s3:GetEncryptionConfiguration\n              - s3:PutObject\n            Effect: Allow\n            Resource: \"*\"\n          - Action:\n              - s3:GetObject*\n              - s3:GetBucket*\n              - s3:List*\n            Effect: Allow\n            Resource:\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :s3:::cdk-hnb659fds-assets-712950704752-us-east-1\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :s3:::cdk-hnb659fds-assets-712950704752-us-east-1/*\n          - Action:\n              - secretsmanager:ListSecrets\n              - secretsmanager:DescribeSecret\n              - secretsmanager:GetSecretValue\n            Effect: Allow\n            Resource: arn:aws:secretsmanager:us-east-1:712950704752:secret:github-token-QDP6QX\n          - Action:\n              - s3:GetObject*\n              - s3:GetBucket*\n              - s3:List*\n            Effect: Allow\n            Resource:\n              - Fn::GetAtt:\n                  - CodeCommitPipelineBuildPipelineArtifactsBucketED2813B3\n                  - Arn\n              - Fn::Join:\n                  - \"\"\n                  - - Fn::GetAtt:\n                        - CodeCommitPipelineBuildPipelineArtifactsBucketED2813B3\n                        - Arn\n                    - /*\n          - Action:\n              - kms:Decrypt\n              - kms:DescribeKey\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeCommitPipelineBuildPipelineArtifactsBucketEncryptionKey05A62A83\n                - Arn\n          - Action:\n              - kms:Decrypt\n              - kms:Encrypt\n              - kms:ReEncrypt*\n              - kms:GenerateDataKey*\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - CodeCommitPipelineBuildPipelineArtifactsBucketEncryptionKey05A62A83\n                - Arn\n        Version: \"2012-10-17\"\n      PolicyName: CodeCommitPipelineGolangRoleDefaultPolicy189AF9A0\n      Roles:\n        - Ref: CodeCommitPipelineGolangRole46DA8D4C\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/Golang/Default/Resource/Role/DefaultPolicy/Resource\n  CodeCommitPipelineGolangBDFA17A1:\n    Type: AWS::CodeBuild::Project\n    Properties:\n      Artifacts:\n        Type: NO_ARTIFACTS\n      Cache:\n        Type: NO_CACHE\n      EncryptionKey:\n        Fn::GetAtt:\n          - CodeCommitPipelineBuildPipelineArtifactsBucketEncryptionKey05A62A83\n          - Arn\n      Environment:\n        ComputeType: BUILD_GENERAL1_MEDIUM\n        EnvironmentVariables:\n          - Name: SCRIPT_S3_BUCKET\n            Type: PLAINTEXT\n            Value: cdk-hnb659fds-assets-712950704752-us-east-1\n          - Name: SCRIPT_S3_KEY\n            Type: PLAINTEXT\n            Value: d51656c063a0eef8e6e43eebc868209915793a138eb4a16217cb8d51583f6424.zip\n          - Name: DRYRUN\n            Type: PLAINTEXT\n            Value: \"true\"\n          - Name: GITHUB_TOKEN_SECRET\n            Type: PLAINTEXT\n            Value: arn:aws:secretsmanager:us-east-1:712950704752:secret:github-token-QDP6QX\n          - Name: GIT_BRANCH\n            Type: PLAINTEXT\n            Value: golang\n          - Name: GIT_USER_NAME\n            Type: PLAINTEXT\n            Value: Delivlib Tests\n          - Name: GIT_USER_EMAIL\n            Type: PLAINTEXT\n            Value: aws-cdk-dev+delivlib@amazon.com\n        Image: aws/codebuild/standard:7.0\n        ImagePullCredentialsType: CODEBUILD\n        PrivilegedMode: false\n        Type: LINUX_CONTAINER\n      ServiceRole:\n        Fn::GetAtt:\n          - CodeCommitPipelineGolangRole46DA8D4C\n          - Arn\n      Source:\n        BuildSpec: |-\n          {\n            \"version\": \"0.2\",\n            \"phases\": {\n              \"install\": {\n                \"commands\": [\n                  \"command -v yarn > /dev/null || npm install --global yarn\"\n                ]\n              },\n              \"pre_build\": {\n                \"commands\": [\n                  \"echo \\\"Downloading scripts from s3://${SCRIPT_S3_BUCKET}/${SCRIPT_S3_KEY}\\\"\",\n                  \"aws s3 cp s3://${SCRIPT_S3_BUCKET}/${SCRIPT_S3_KEY} /tmp\",\n                  \"mkdir -p /tmp/scriptdir\",\n                  \"unzip /tmp/$(basename $SCRIPT_S3_KEY) -d /tmp/scriptdir\"\n                ]\n              },\n              \"build\": {\n                \"commands\": [\n                  \"export SCRIPT_DIR=/tmp/scriptdir\",\n                  \"echo \\\"Running publish.sh\\\"\",\n                  \"/bin/bash /tmp/scriptdir/publish.sh\"\n                ]\n              }\n            }\n          }\n        Type: NO_SOURCE\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/Golang/Default/Resource/Resource\n  CodeCommitPipelineGolangAlarmF9F61D0D:\n    Type: AWS::CloudWatch::Alarm\n    Properties:\n      ComparisonOperator: GreaterThanOrEqualToThreshold\n      Dimensions:\n        - Name: ProjectName\n          Value:\n            Ref: CodeCommitPipelineGolangBDFA17A1\n      EvaluationPeriods: 1\n      MetricName: FailedBuilds\n      Namespace: AWS/CodeBuild\n      Period: 300\n      Statistic: Sum\n      Threshold: 1\n      TreatMissingData: ignore\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/Golang/Default/Alarm/Resource\n  CodeCommitPipelineAutoBumpAutoPullRequestRoleE7E0E388:\n    Type: AWS::IAM::Role\n    Properties:\n      AssumeRolePolicyDocument:\n        Statement:\n          - Action: sts:AssumeRole\n            Effect: Allow\n            Principal:\n              Service: codebuild.amazonaws.com\n        Version: \"2012-10-17\"\n      ManagedPolicyArns:\n        - Fn::Join:\n            - \"\"\n            - - \"arn:\"\n              - Ref: AWS::Partition\n              - :iam::aws:policy/AmazonElasticContainerRegistryPublicReadOnly\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/AutoBump/AutoPullRequest/PullRequest/Role/Resource\n  CodeCommitPipelineAutoBumpAutoPullRequestRoleDefaultPolicy3BB1CD6F:\n    Type: AWS::IAM::Policy\n    Properties:\n      PolicyDocument:\n        Statement:\n          - Action:\n              - logs:CreateLogGroup\n              - logs:CreateLogStream\n              - logs:PutLogEvents\n            Effect: Allow\n            Resource:\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :logs:us-east-1:712950704752:log-group:/aws/codebuild/\n                    - Ref: CodeCommitPipelineAutoBumpAutoPullRequest033F6993\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :logs:us-east-1:712950704752:log-group:/aws/codebuild/\n                    - Ref: CodeCommitPipelineAutoBumpAutoPullRequest033F6993\n                    - :*\n          - Action:\n              - codebuild:CreateReportGroup\n              - codebuild:CreateReport\n              - codebuild:UpdateReport\n              - codebuild:BatchPutTestCases\n              - codebuild:BatchPutCodeCoverages\n            Effect: Allow\n            Resource:\n              Fn::Join:\n                - \"\"\n                - - \"arn:\"\n                  - Ref: AWS::Partition\n                  - :codebuild:us-east-1:712950704752:report-group/\n                  - Ref: CodeCommitPipelineAutoBumpAutoPullRequest033F6993\n                  - -*\n          - Action:\n              - ssmmessages:CreateControlChannel\n              - ssmmessages:CreateDataChannel\n              - ssmmessages:OpenControlChannel\n              - ssmmessages:OpenDataChannel\n              - logs:DescribeLogGroups\n              - logs:CreateLogStream\n              - logs:PutLogEvents\n              - s3:GetEncryptionConfiguration\n              - s3:PutObject\n            Effect: Allow\n            Resource: \"*\"\n          - Action:\n              - secretsmanager:ListSecrets\n              - secretsmanager:DescribeSecret\n              - secretsmanager:GetSecretValue\n            Effect: Allow\n            Resource: arn:aws:secretsmanager:us-east-1:712950704752:secret:delivlib/github-ssh-okGazo\n          - Action:\n              - secretsmanager:ListSecrets\n              - secretsmanager:DescribeSecret\n              - secretsmanager:GetSecretValue\n            Effect: Allow\n            Resource: arn:aws:secretsmanager:us-east-1:712950704752:secret:github-token-QDP6QX\n        Version: \"2012-10-17\"\n      PolicyName: CodeCommitPipelineAutoBumpAutoPullRequestRoleDefaultPolicy3BB1CD6F\n      Roles:\n        - Ref: CodeCommitPipelineAutoBumpAutoPullRequestRoleE7E0E388\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/AutoBump/AutoPullRequest/PullRequest/Role/DefaultPolicy/Resource\n  CodeCommitPipelineAutoBumpAutoPullRequest033F6993:\n    Type: AWS::CodeBuild::Project\n    Properties:\n      Artifacts:\n        Type: NO_ARTIFACTS\n      Cache:\n        Type: NO_CACHE\n      Description: Release awslabs/aws-delivlib-sample, branch master\n      EncryptionKey: alias/aws/s3\n      Environment:\n        ComputeType: BUILD_GENERAL1_SMALL\n        Image: public.ecr.aws/jsii/superchain:1-bullseye-slim-node18\n        ImagePullCredentialsType: SERVICE_ROLE\n        PrivilegedMode: false\n        Type: LINUX_CONTAINER\n      ServiceRole:\n        Fn::GetAtt:\n          - CodeCommitPipelineAutoBumpAutoPullRequestRoleE7E0E388\n          - Arn\n      Source:\n        BuildSpec: |-\n          {\n            \"version\": \"0.2\",\n            \"phases\": {\n              \"pre_build\": {\n                \"commands\": [\n                  \"git config --global user.email \\\"foo@bar.com\\\"\",\n                  \"git config --global user.name \\\"foobar\\\"\"\n                ]\n              },\n              \"build\": {\n                \"commands\": [\n                  \"export SKIP=false\",\n                  \"$SKIP || { aws secretsmanager get-secret-value --secret-id \\\"arn:aws:secretsmanager:us-east-1:712950704752:secret:delivlib/github-ssh-okGazo\\\" --output=text --query=SecretString > ~/.ssh/id_rsa ; }\",\n                  \"$SKIP || { mkdir -p ~/.ssh ; }\",\n                  \"$SKIP || { chmod 0600 ~/.ssh/id_rsa ~/.ssh/config ; }\",\n                  \"$SKIP || { ssh-keyscan -t rsa github.com >> ~/.ssh/known_hosts ; }\",\n                  \"$SKIP || { ls .git && { echo \\\".git directory exists\\\";  } || { echo \\\".git directory doesnot exist - cloning...\\\" && git init . && git remote add origin git@github.com:awslabs/aws-delivlib-sample.git && git fetch && git reset --hard origin/master && git branch -M master && git clean -fqdx; } ; }\",\n                  \"$SKIP || { git describe --exact-match master && { echo 'Skip condition is met, skipping...' && export SKIP=true; } || { echo 'Skip condition is not met, continuing...' && export SKIP=false; } ; }\",\n                  \"$SKIP || { export GITHUB_TOKEN=$(aws secretsmanager get-secret-value --secret-id \\\"arn:aws:secretsmanager:us-east-1:712950704752:secret:github-token-QDP6QX\\\" --output=text --query=SecretString) ; }\",\n                  \"$SKIP || { git rev-parse --verify origin/bump/$VERSION && { git checkout bump/$VERSION && git merge master && npm i && npm run bump && export VERSION=$(git describe) && echo Finished running user commands;  } || { git checkout master && git checkout -b temp && npm i && npm run bump && export VERSION=$(git describe) && echo Finished running user commands && git branch -M bump/$VERSION; } ; }\",\n                  \"$SKIP || { git merge-base --is-ancestor bump/$VERSION origin/master && { echo \\\"Skipping: bump/$VERSION is an ancestor of origin/master\\\"; export SKIP=true; } || { echo \\\"Pushing: bump/$VERSION is ahead of origin/master\\\"; export SKIP=false; } ; }\",\n                  \"$SKIP || { git remote add origin_ssh git@github.com:awslabs/aws-delivlib-sample.git ; }\",\n                  \"$SKIP || { git push --atomic --follow-tags origin_ssh bump/$VERSION:bump/$VERSION ; }\",\n                  \"$SKIP || { curl --fail -X POST -o pr.json --header \\\"Authorization: token $GITHUB_TOKEN\\\" --header \\\"Content-Type: application/json\\\" -d \\\"{\\\\\\\"title\\\\\\\":\\\\\\\"chore(release): $VERSION\\\\\\\",\\\\\\\"base\\\\\\\":\\\\\\\"master\\\\\\\",\\\\\\\"head\\\\\\\":\\\\\\\"bump/$VERSION\\\\\\\"}\\\" https://api.github.com/repos/awslabs/aws-delivlib-sample/pulls && export PR_NUMBER=$(node -p 'require(\\\"./pr.json\\\").number') ; }\",\n                  \"$SKIP || { curl --fail -X PATCH --header \\\"Authorization: token $GITHUB_TOKEN\\\" --header \\\"Content-Type: application/json\\\" -d \\\"{\\\\\\\"body\\\\\\\":\\\\\\\"See [CHANGELOG](https://github.com/awslabs/aws-delivlib-sample/blob/bump/$VERSION/CHANGELOG.md)\\\\\\\"}\\\" https://api.github.com/repos/awslabs/aws-delivlib-sample/pulls/$PR_NUMBER ; }\"\n                ]\n              }\n            }\n          }\n        GitCloneDepth: 0\n        Location: https://github.com/awslabs/aws-delivlib-sample.git\n        ReportBuildStatus: false\n        Type: GITHUB\n      Triggers:\n        Webhook: false\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/AutoBump/AutoPullRequest/PullRequest/Resource\n  CodeCommitPipelineAutoBumpAutoPullRequestAutoPullRequestFailedAlarmEFC2345F:\n    Type: AWS::CloudWatch::Alarm\n    Properties:\n      ComparisonOperator: GreaterThanOrEqualToThreshold\n      Dimensions:\n        - Name: ProjectName\n          Value:\n            Ref: CodeCommitPipelineAutoBumpAutoPullRequest033F6993\n      EvaluationPeriods: 1\n      MetricName: FailedBuilds\n      Namespace: AWS/CodeBuild\n      Period: 300\n      Statistic: Sum\n      Threshold: 1\n      TreatMissingData: ignore\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/AutoBump/AutoPullRequest/AutoPullRequestFailedAlarm/Resource\n  CodeCommitPipelineAutoBuildProjectRole733AD222:\n    Type: AWS::IAM::Role\n    Properties:\n      AssumeRolePolicyDocument:\n        Statement:\n          - Action: sts:AssumeRole\n            Effect: Allow\n            Principal:\n              Service: codebuild.amazonaws.com\n        Version: \"2012-10-17\"\n      ManagedPolicyArns:\n        - Fn::Join:\n            - \"\"\n            - - \"arn:\"\n              - Ref: AWS::Partition\n              - :iam::aws:policy/AmazonElasticContainerRegistryPublicReadOnly\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/AutoBuild/Project/Role/Resource\n  CodeCommitPipelineAutoBuildProjectRoleDefaultPolicyFF5563AC:\n    Type: AWS::IAM::Policy\n    Properties:\n      PolicyDocument:\n        Statement:\n          - Action:\n              - logs:CreateLogGroup\n              - logs:CreateLogStream\n              - logs:PutLogEvents\n            Effect: Allow\n            Resource:\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :logs:us-east-1:712950704752:log-group:/aws/codebuild/\n                    - Ref: CodeCommitPipelineAutoBuildProject5D212EE9\n              - Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :logs:us-east-1:712950704752:log-group:/aws/codebuild/\n                    - Ref: CodeCommitPipelineAutoBuildProject5D212EE9\n                    - :*\n          - Action:\n              - codebuild:CreateReportGroup\n              - codebuild:CreateReport\n              - codebuild:UpdateReport\n              - codebuild:BatchPutTestCases\n              - codebuild:BatchPutCodeCoverages\n            Effect: Allow\n            Resource:\n              Fn::Join:\n                - \"\"\n                - - \"arn:\"\n                  - Ref: AWS::Partition\n                  - :codebuild:us-east-1:712950704752:report-group/\n                  - Ref: CodeCommitPipelineAutoBuildProject5D212EE9\n                  - -*\n          - Action:\n              - ssmmessages:CreateControlChannel\n              - ssmmessages:CreateDataChannel\n              - ssmmessages:OpenControlChannel\n              - ssmmessages:OpenDataChannel\n              - logs:DescribeLogGroups\n              - logs:CreateLogStream\n              - logs:PutLogEvents\n              - s3:GetEncryptionConfiguration\n              - s3:PutObject\n            Effect: Allow\n            Resource: \"*\"\n        Version: \"2012-10-17\"\n      PolicyName: CodeCommitPipelineAutoBuildProjectRoleDefaultPolicyFF5563AC\n      Roles:\n        - Ref: CodeCommitPipelineAutoBuildProjectRole733AD222\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/AutoBuild/Project/Role/DefaultPolicy/Resource\n  CodeCommitPipelineAutoBuildProject5D212EE9:\n    Type: AWS::CodeBuild::Project\n    Properties:\n      Artifacts:\n        Type: NO_ARTIFACTS\n      BadgeEnabled: true\n      Cache:\n        Type: NO_CACHE\n      Description: Automatic PR build for awslabs/aws-delivlib-sample\n      EncryptionKey: alias/aws/s3\n      Environment:\n        ComputeType: BUILD_GENERAL1_SMALL\n        Image: public.ecr.aws/jsii/superchain:1-bullseye-slim-node18\n        ImagePullCredentialsType: SERVICE_ROLE\n        PrivilegedMode: false\n        Type: LINUX_CONTAINER\n      ServiceRole:\n        Fn::GetAtt:\n          - CodeCommitPipelineAutoBuildProjectRole733AD222\n          - Arn\n      Source:\n        Location: https://github.com/awslabs/aws-delivlib-sample.git\n        ReportBuildStatus: true\n        Type: GITHUB\n      Triggers:\n        FilterGroups:\n          - - Pattern: PUSH, PULL_REQUEST_CREATED, PULL_REQUEST_UPDATED\n              Type: EVENT\n        Webhook: true\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/AutoBuild/Project/Resource\n  CodeCommitPipelineAutoBuildGitHubCodeBuildLogsSAR75EABC5D:\n    Type: AWS::Serverless::Application\n    Properties:\n      Location:\n        ApplicationId: arn:aws:serverlessrepo:us-east-1:277187709615:applications/github-codebuild-logs\n        SemanticVersion: 1.6.0\n      Parameters:\n        CodeBuildProjectName:\n          Ref: CodeCommitPipelineAutoBuildProject5D212EE9\n        DeletePreviousComments: \"true\"\n        CommentOnSuccess: \"true\"\n        GitHubOAuthToken: \"{{resolve:secretsmanager:arn:aws:secretsmanager:us-east-1:712950704752:secret:github-token-QDP6QX:SecretString:::}}\"\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/AutoBuild/GitHubCodeBuildLogsSAR\n  CodeCommitPipelineChangeControllerCalendar94B1DEA8:\n    Type: AWS::S3::Bucket\n    Properties:\n      VersioningConfiguration:\n        Status: Enabled\n    UpdateReplacePolicy: Delete\n    DeletionPolicy: Delete\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/ChangeController/Calendar/Resource\n  CodeCommitPipelineChangeControllerCalendarNotifications1AFBE6E9:\n    Type: Custom::S3BucketNotifications\n    Properties:\n      ServiceToken:\n        Fn::GetAtt:\n          - BucketNotificationsHandler050a0587b7544547bf325f094a3db8347ECC3691\n          - Arn\n      BucketName:\n        Ref: CodeCommitPipelineChangeControllerCalendar94B1DEA8\n      NotificationConfiguration:\n        LambdaFunctionConfigurations:\n          - Events:\n              - s3:ObjectCreated:*\n            Filter:\n              Key:\n                FilterRules:\n                  - Name: prefix\n                    Value: change-control.ics\n            LambdaFunctionArn:\n              Fn::GetAtt:\n                - CodeCommitPipelineChangeControllerFunction776EAE6A\n                - Arn\n      Managed: true\n    DependsOn:\n      - CodeCommitPipelineChangeControllerCalendarAllowBucketNotificationsTodelivlibtestCodeCommitPipelineChangeControllerFunction83CC56EB3330DA3F\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/ChangeController/Calendar/Notifications/Resource\n  CodeCommitPipelineChangeControllerCalendarAllowBucketNotificationsTodelivlibtestCodeCommitPipelineChangeControllerFunction83CC56EB3330DA3F:\n    Type: AWS::Lambda::Permission\n    Properties:\n      Action: lambda:InvokeFunction\n      FunctionName:\n        Fn::GetAtt:\n          - CodeCommitPipelineChangeControllerFunction776EAE6A\n          - Arn\n      Principal: s3.amazonaws.com\n      SourceAccount: \"712950704752\"\n      SourceArn:\n        Fn::GetAtt:\n          - CodeCommitPipelineChangeControllerCalendar94B1DEA8\n          - Arn\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/ChangeController/Calendar/AllowBucketNotificationsTodelivlibtestCodeCommitPipelineChangeControllerFunction83CC56EB\n  CodeCommitPipelineChangeControllerFunctionServiceRoleF02841DB:\n    Type: AWS::IAM::Role\n    Properties:\n      AssumeRolePolicyDocument:\n        Statement:\n          - Action: sts:AssumeRole\n            Effect: Allow\n            Principal:\n              Service: lambda.amazonaws.com\n        Version: \"2012-10-17\"\n      ManagedPolicyArns:\n        - Fn::Join:\n            - \"\"\n            - - \"arn:\"\n              - Ref: AWS::Partition\n              - :iam::aws:policy/service-role/AWSLambdaBasicExecutionRole\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/ChangeController/Function/ServiceRole/Resource\n  CodeCommitPipelineChangeControllerFunctionServiceRoleDefaultPolicy315F7AF5:\n    Type: AWS::IAM::Policy\n    Properties:\n      PolicyDocument:\n        Statement:\n          - Action:\n              - codepipeline:EnableStageTransition\n              - codepipeline:DisableStageTransition\n            Effect: Allow\n            Resource:\n              Fn::Join:\n                - \"\"\n                - - \"arn:\"\n                  - Ref: AWS::Partition\n                  - \":codepipeline:us-east-1:712950704752:\"\n                  - Ref: CodeCommitPipelineBuildPipeline656B8CCB\n                  - /Publish\n          - Action:\n              - s3:GetObject*\n              - s3:GetBucket*\n              - s3:List*\n            Effect: Allow\n            Resource:\n              - Fn::GetAtt:\n                  - CodeCommitPipelineChangeControllerCalendar94B1DEA8\n                  - Arn\n              - Fn::Join:\n                  - \"\"\n                  - - Fn::GetAtt:\n                        - CodeCommitPipelineChangeControllerCalendar94B1DEA8\n                        - Arn\n                    - /*\n        Version: \"2012-10-17\"\n      PolicyName: CodeCommitPipelineChangeControllerFunctionServiceRoleDefaultPolicy315F7AF5\n      Roles:\n        - Ref: CodeCommitPipelineChangeControllerFunctionServiceRoleF02841DB\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/ChangeController/Function/ServiceRole/DefaultPolicy/Resource\n  CodeCommitPipelineChangeControllerFunction776EAE6A:\n    Type: AWS::Lambda::Function\n    Properties:\n      Code:\n        S3Bucket: cdk-hnb659fds-assets-712950704752-us-east-1\n        S3Key: 7164ff4ec7f71118687118359d2a1a1197c400199c39eb26880190a50df3637c.zip\n      Description: Enforces a Change Control Policy into CodePipeline's Publish stage\n      Environment:\n        Variables:\n          CHANGE_CONTROL_BUCKET_NAME:\n            Ref: CodeCommitPipelineChangeControllerCalendar94B1DEA8\n          CHANGE_CONTROL_OBJECT_KEY: change-control.ics\n          PIPELINE_NAME:\n            Ref: CodeCommitPipelineBuildPipeline656B8CCB\n          STAGE_NAME: Publish\n      Handler: index.handler\n      Role:\n        Fn::GetAtt:\n          - CodeCommitPipelineChangeControllerFunctionServiceRoleF02841DB\n          - Arn\n      Runtime: nodejs20.x\n      Timeout: 300\n    DependsOn:\n      - CodeCommitPipelineChangeControllerFunctionServiceRoleDefaultPolicy315F7AF5\n      - CodeCommitPipelineChangeControllerFunctionServiceRoleF02841DB\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/ChangeController/Function/Resource\n  CodeCommitPipelineChangeControllerFailed03331BFB:\n    Type: AWS::CloudWatch::Alarm\n    Properties:\n      ComparisonOperator: GreaterThanOrEqualToThreshold\n      DatapointsToAlarm: 1\n      Dimensions:\n        - Name: FunctionName\n          Value:\n            Ref: CodeCommitPipelineChangeControllerFunction776EAE6A\n      EvaluationPeriods: 1\n      MetricName: Errors\n      Namespace: AWS/Lambda\n      Period: 300\n      Statistic: Sum\n      Threshold: 1\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/ChangeController/Failed/Resource\n  CodeCommitPipelineChangeControllerRuleAEEA7A52:\n    Type: AWS::Events::Rule\n    Properties:\n      Description:\n        Fn::Join:\n          - \"\"\n          - - \"Run the change controller for promotions into \"\n            - Ref: CodeCommitPipelineBuildPipeline656B8CCB\n            - \"'s Publish on a [object Object] schedule\"\n      ScheduleExpression: rate(15 minutes)\n      State: ENABLED\n      Targets:\n        - Arn:\n            Fn::GetAtt:\n              - CodeCommitPipelineChangeControllerFunction776EAE6A\n              - Arn\n          Id: Target0\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/ChangeController/Rule/Resource\n  CodeCommitPipelineChangeControllerRuleAllowEventRuledelivlibtestCodeCommitPipelineChangeControllerFunction83CC56EB9365DB12:\n    Type: AWS::Lambda::Permission\n    Properties:\n      Action: lambda:InvokeFunction\n      FunctionName:\n        Fn::GetAtt:\n          - CodeCommitPipelineChangeControllerFunction776EAE6A\n          - Arn\n      Principal: events.amazonaws.com\n      SourceArn:\n        Fn::GetAtt:\n          - CodeCommitPipelineChangeControllerRuleAEEA7A52\n          - Arn\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeCommitPipeline/ChangeController/Rule/AllowEventRuledelivlibtestCodeCommitPipelineChangeControllerFunction83CC56EB\n  AssumeMe924099BB:\n    Type: AWS::IAM::Role\n    Properties:\n      AssumeRolePolicyDocument:\n        Statement:\n          - Action: sts:AssumeRole\n            Condition:\n              StringEquals:\n                sts:ExternalId: require-me-please\n            Effect: Allow\n            Principal:\n              AWS:\n                Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :iam::712950704752:root\n        Version: \"2012-10-17\"\n    Metadata:\n      aws:cdk:path: delivlib-test/AssumeMe/Resource\n  X509CodeSigningKeyRSAPrivateKeyResourceV2926395A0:\n    Type: Custom::RsaPrivateKeySecret\n    Properties:\n      ServiceToken:\n        Fn::GetAtt:\n          - RSAPrivateKey517D342FA590447BB5255D06E403A40698672B0B\n          - Arn\n      ResourceVersion: H7YGMVLuZ+QukZc2cxaT6CX6eZHRf2OHSk/h85HtX74=\n      Description: The PEM-encoded private key of the x509 Code-Signing Certificate\n      KeySize: 2048\n      SecretName: delivlib-test/X509CodeSigningKey/RSAPrivateKeyV2\n    DependsOn:\n      - RSAPrivateKey517D342FA590447BB5255D06E403A406ServiceRoleDefaultPolicy27404286\n      - RSAPrivateKey517D342FA590447BB5255D06E403A406ServiceRoleFC773AAD\n    UpdateReplacePolicy: Delete\n    DeletionPolicy: Delete\n    Metadata:\n      aws:cdk:path: delivlib-test/X509CodeSigningKey/RSAPrivateKey/ResourceV2/Default\n  X509CodeSigningKeyRSAPrivateKeyCertificateSigningRequestBucketD81FB261:\n    Type: AWS::S3::Bucket\n    Properties:\n      BucketEncryption:\n        ServerSideEncryptionConfiguration:\n          - ServerSideEncryptionByDefault:\n              SSEAlgorithm: AES256\n      Tags:\n        - Key: aws-cdk:auto-delete-objects\n          Value: \"true\"\n    UpdateReplacePolicy: Delete\n    DeletionPolicy: Delete\n    Metadata:\n      aws:cdk:path: delivlib-test/X509CodeSigningKey/RSAPrivateKey/CertificateSigningRequest/Bucket/Resource\n  X509CodeSigningKeyRSAPrivateKeyCertificateSigningRequestBucketPolicy8E2DB075:\n    Type: AWS::S3::BucketPolicy\n    Properties:\n      Bucket:\n        Ref: X509CodeSigningKeyRSAPrivateKeyCertificateSigningRequestBucketD81FB261\n      PolicyDocument:\n        Statement:\n          - Action: s3:*\n            Condition:\n              Bool:\n                aws:SecureTransport: \"false\"\n            Effect: Deny\n            Principal:\n              AWS: \"*\"\n            Resource:\n              - Fn::GetAtt:\n                  - X509CodeSigningKeyRSAPrivateKeyCertificateSigningRequestBucketD81FB261\n                  - Arn\n              - Fn::Join:\n                  - \"\"\n                  - - Fn::GetAtt:\n                        - X509CodeSigningKeyRSAPrivateKeyCertificateSigningRequestBucketD81FB261\n                        - Arn\n                    - /*\n          - Action:\n              - s3:PutBucketPolicy\n              - s3:GetBucket*\n              - s3:List*\n              - s3:DeleteObject*\n            Effect: Allow\n            Principal:\n              AWS:\n                Fn::GetAtt:\n                  - CustomS3AutoDeleteObjectsCustomResourceProviderRole3B1BD092\n                  - Arn\n            Resource:\n              - Fn::GetAtt:\n                  - X509CodeSigningKeyRSAPrivateKeyCertificateSigningRequestBucketD81FB261\n                  - Arn\n              - Fn::Join:\n                  - \"\"\n                  - - Fn::GetAtt:\n                        - X509CodeSigningKeyRSAPrivateKeyCertificateSigningRequestBucketD81FB261\n                        - Arn\n                    - /*\n        Version: \"2012-10-17\"\n    Metadata:\n      aws:cdk:path: delivlib-test/X509CodeSigningKey/RSAPrivateKey/CertificateSigningRequest/Bucket/Policy/Resource\n  X509CodeSigningKeyRSAPrivateKeyCertificateSigningRequestBucketAutoDeleteObjectsCustomResource8471F189:\n    Type: Custom::S3AutoDeleteObjects\n    Properties:\n      ServiceToken:\n        Fn::GetAtt:\n          - CustomS3AutoDeleteObjectsCustomResourceProviderHandler9D90184F\n          - Arn\n      BucketName:\n        Ref: X509CodeSigningKeyRSAPrivateKeyCertificateSigningRequestBucketD81FB261\n    DependsOn:\n      - X509CodeSigningKeyRSAPrivateKeyCertificateSigningRequestBucketPolicy8E2DB075\n    UpdateReplacePolicy: Delete\n    DeletionPolicy: Delete\n    Metadata:\n      aws:cdk:path: delivlib-test/X509CodeSigningKey/RSAPrivateKey/CertificateSigningRequest/Bucket/AutoDeleteObjectsCustomResource/Default\n  X509CodeSigningKeyRSAPrivateKeyCertificateSigningRequestResourceV284A29392:\n    Type: Custom::CertificateSigningRequest\n    Properties:\n      ServiceToken:\n        Fn::GetAtt:\n          - CreateCSRF0641C152BC0481E94BA7BF43F8BBDE313A5E125\n          - Arn\n      ResourceVersion: H7YGMVLuZ+QukZc2cxaT6CX6eZHRf2OHSk/h85HtX74=\n      PrivateKeySecretId:\n        Fn::GetAtt:\n          - X509CodeSigningKeyRSAPrivateKeyResourceV2926395A0\n          - SecretArn\n      DnCommonName: delivlib-test\n      DnCountry: IL\n      DnStateOrProvince: Ztate\n      DnLocality: Zity\n      DnOrganizationName: Amazon Test\n      DnOrganizationalUnitName: AWS\n      DnEmailAddress: aws-cdk-dev+delivlib-test@amazon.com\n      ExtendedKeyUsage: critical,codeSigning\n      KeyUsage: critical,digitalSignature\n      OutputBucket:\n        Ref: X509CodeSigningKeyRSAPrivateKeyCertificateSigningRequestBucketD81FB261\n    DependsOn:\n      - CreateCSRF0641C152BC0481E94BA7BF43F8BBDE3ServiceRoleDefaultPolicyA96A3559\n      - CreateCSRF0641C152BC0481E94BA7BF43F8BBDE3ServiceRoleA7AA2800\n    UpdateReplacePolicy: Delete\n    DeletionPolicy: Delete\n    Metadata:\n      aws:cdk:path: delivlib-test/X509CodeSigningKey/RSAPrivateKey/CertificateSigningRequest/ResourceV2/Default\n  X509CodeSigningKey8DE65BF8:\n    Type: AWS::SSM::Parameter\n    Properties:\n      Description:\n        Fn::Join:\n          - \"\"\n          - - \"A PEM-encoded Code-Signing Certificate (private key in \"\n            - Fn::GetAtt:\n                - X509CodeSigningKeyRSAPrivateKeyResourceV2926395A0\n                - SecretArn\n            - )\n      Name: /delivlib-test/X509CodeSigningKey/Certificate\n      Type: String\n      Value:\n        Fn::GetAtt:\n          - X509CodeSigningKeyRSAPrivateKeyCertificateSigningRequestResourceV284A29392\n          - SelfSignedCertificate\n    Metadata:\n      aws:cdk:path: delivlib-test/X509CodeSigningKey/Resource/Resource\n  RSAPrivateKey517D342FA590447BB5255D06E403A406ServiceRoleFC773AAD:\n    Type: AWS::IAM::Role\n    Properties:\n      AssumeRolePolicyDocument:\n        Statement:\n          - Action: sts:AssumeRole\n            Effect: Allow\n            Principal:\n              Service: lambda.amazonaws.com\n        Version: \"2012-10-17\"\n      ManagedPolicyArns:\n        - Fn::Join:\n            - \"\"\n            - - \"arn:\"\n              - Ref: AWS::Partition\n              - :iam::aws:policy/service-role/AWSLambdaBasicExecutionRole\n    Metadata:\n      aws:cdk:path: delivlib-test/RSAPrivate-Key517D342FA590447BB5255D06E403A406/ServiceRole/Resource\n  RSAPrivateKey517D342FA590447BB5255D06E403A406ServiceRoleDefaultPolicy27404286:\n    Type: AWS::IAM::Policy\n    Properties:\n      PolicyDocument:\n        Statement:\n          - Action:\n              - secretsmanager:CreateSecret\n              - secretsmanager:DeleteSecret\n              - secretsmanager:UpdateSecret\n            Effect: Allow\n            Resource:\n              Fn::Join:\n                - \"\"\n                - - \"arn:\"\n                  - Ref: AWS::Partition\n                  - :secretsmanager:us-east-1:712950704752:secret:delivlib-test/X509CodeSigningKey/RSAPrivateKeyV2-??????\n        Version: \"2012-10-17\"\n      PolicyName: RSAPrivateKey517D342FA590447BB5255D06E403A406ServiceRoleDefaultPolicy27404286\n      Roles:\n        - Ref: RSAPrivateKey517D342FA590447BB5255D06E403A406ServiceRoleFC773AAD\n    Metadata:\n      aws:cdk:path: delivlib-test/RSAPrivate-Key517D342FA590447BB5255D06E403A406/ServiceRole/DefaultPolicy/Resource\n  RSAPrivateKey517D342FA590447BB5255D06E403A40698672B0B:\n    Type: AWS::Lambda::Function\n    Properties:\n      Code:\n        ImageUri:\n          Fn::Sub: 712950704752.dkr.ecr.us-east-1.${AWS::URLSuffix}/cdk-hnb659fds-container-assets-712950704752-us-east-1:33e4216cc004b0e5dd84308a4c6261df2f7d5b823c83d73bdc64386f486a3cfa\n      Description: Generates an RSA Private Key and stores it in AWS Secrets Manager\n      PackageType: Image\n      Role:\n        Fn::GetAtt:\n          - RSAPrivateKey517D342FA590447BB5255D06E403A406ServiceRoleFC773AAD\n          - Arn\n      Timeout: 300\n    DependsOn:\n      - RSAPrivateKey517D342FA590447BB5255D06E403A406ServiceRoleDefaultPolicy27404286\n      - RSAPrivateKey517D342FA590447BB5255D06E403A406ServiceRoleFC773AAD\n    Metadata:\n      aws:cdk:path: delivlib-test/RSAPrivate-Key517D342FA590447BB5255D06E403A406/Resource\n  CreateCSRF0641C152BC0481E94BA7BF43F8BBDE3ServiceRoleA7AA2800:\n    Type: AWS::IAM::Role\n    Properties:\n      AssumeRolePolicyDocument:\n        Statement:\n          - Action: sts:AssumeRole\n            Effect: Allow\n            Principal:\n              Service: lambda.amazonaws.com\n        Version: \"2012-10-17\"\n      ManagedPolicyArns:\n        - Fn::Join:\n            - \"\"\n            - - \"arn:\"\n              - Ref: AWS::Partition\n              - :iam::aws:policy/service-role/AWSLambdaBasicExecutionRole\n    Metadata:\n      aws:cdk:path: delivlib-test/CreateCSRF0641C152BC0481E94BA7BF43F8BBDE3/ServiceRole/Resource\n  CreateCSRF0641C152BC0481E94BA7BF43F8BBDE3ServiceRoleDefaultPolicyA96A3559:\n    Type: AWS::IAM::Policy\n    Properties:\n      PolicyDocument:\n        Statement:\n          - Action:\n              - s3:GetObject*\n              - s3:GetBucket*\n              - s3:List*\n              - s3:DeleteObject*\n              - s3:PutObject\n              - s3:PutObjectLegalHold\n              - s3:PutObjectRetention\n              - s3:PutObjectTagging\n              - s3:PutObjectVersionTagging\n              - s3:Abort*\n            Effect: Allow\n            Resource:\n              - Fn::GetAtt:\n                  - X509CodeSigningKeyRSAPrivateKeyCertificateSigningRequestBucketD81FB261\n                  - Arn\n              - Fn::Join:\n                  - \"\"\n                  - - Fn::GetAtt:\n                        - X509CodeSigningKeyRSAPrivateKeyCertificateSigningRequestBucketD81FB261\n                        - Arn\n                    - /*\n          - Action: secretsmanager:GetSecretValue\n            Effect: Allow\n            Resource:\n              Fn::GetAtt:\n                - X509CodeSigningKeyRSAPrivateKeyResourceV2926395A0\n                - SecretArn\n        Version: \"2012-10-17\"\n      PolicyName: CreateCSRF0641C152BC0481E94BA7BF43F8BBDE3ServiceRoleDefaultPolicyA96A3559\n      Roles:\n        - Ref: CreateCSRF0641C152BC0481E94BA7BF43F8BBDE3ServiceRoleA7AA2800\n    Metadata:\n      aws:cdk:path: delivlib-test/CreateCSRF0641C152BC0481E94BA7BF43F8BBDE3/ServiceRole/DefaultPolicy/Resource\n  CreateCSRF0641C152BC0481E94BA7BF43F8BBDE313A5E125:\n    Type: AWS::Lambda::Function\n    Properties:\n      Architectures:\n        - x86_64\n      Code:\n        ImageUri:\n          Fn::Sub: 712950704752.dkr.ecr.us-east-1.${AWS::URLSuffix}/cdk-hnb659fds-container-assets-712950704752-us-east-1:a99ae5a80882a88b13b68b765335e155bde969a70e18d367944a1f5d84fab9ff\n      Description: Creates a Certificate Signing Request document for an x509 certificate\n      PackageType: Image\n      Role:\n        Fn::GetAtt:\n          - CreateCSRF0641C152BC0481E94BA7BF43F8BBDE3ServiceRoleA7AA2800\n          - Arn\n      Timeout: 300\n    DependsOn:\n      - CreateCSRF0641C152BC0481E94BA7BF43F8BBDE3ServiceRoleDefaultPolicyA96A3559\n      - CreateCSRF0641C152BC0481E94BA7BF43F8BBDE3ServiceRoleA7AA2800\n    Metadata:\n      aws:cdk:path: delivlib-test/CreateCSRF0641C152BC0481E94BA7BF43F8BBDE3/Resource\n  CustomS3AutoDeleteObjectsCustomResourceProviderRole3B1BD092:\n    Type: AWS::IAM::Role\n    Properties:\n      AssumeRolePolicyDocument:\n        Version: \"2012-10-17\"\n        Statement:\n          - Action: sts:AssumeRole\n            Effect: Allow\n            Principal:\n              Service: lambda.amazonaws.com\n      ManagedPolicyArns:\n        - Fn::Sub: arn:${AWS::Partition}:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole\n    Metadata:\n      aws:cdk:path: delivlib-test/Custom::S3AutoDeleteObjectsCustomResourceProvider/Role\n  CustomS3AutoDeleteObjectsCustomResourceProviderHandler9D90184F:\n    Type: AWS::Lambda::Function\n    Properties:\n      Code:\n        S3Bucket: cdk-hnb659fds-assets-712950704752-us-east-1\n        S3Key: faa95a81ae7d7373f3e1f242268f904eb748d8d0fdd306e8a6fe515a1905a7d6.zip\n      Timeout: 900\n      MemorySize: 128\n      Handler: index.handler\n      Role:\n        Fn::GetAtt:\n          - CustomS3AutoDeleteObjectsCustomResourceProviderRole3B1BD092\n          - Arn\n      Runtime: nodejs20.x\n      Description:\n        Fn::Join:\n          - \"\"\n          - - \"Lambda function for auto-deleting objects in \"\n            - Ref: X509CodeSigningKeyRSAPrivateKeyCertificateSigningRequestBucketD81FB261\n            - \" S3 bucket.\"\n    DependsOn:\n      - CustomS3AutoDeleteObjectsCustomResourceProviderRole3B1BD092\n    Metadata:\n      aws:cdk:path: delivlib-test/Custom::S3AutoDeleteObjectsCustomResourceProvider/Handler\n  CodeSignCMKC986BB89:\n    Type: AWS::KMS::Key\n    Properties:\n      KeyPolicy:\n        Statement:\n          - Action: kms:*\n            Effect: Allow\n            Principal:\n              AWS:\n                Fn::Join:\n                  - \"\"\n                  - - \"arn:\"\n                    - Ref: AWS::Partition\n                    - :iam::712950704752:root\n            Resource: \"*\"\n          - Action:\n              - kms:Decrypt\n              - kms:GenerateDataKey\n            Condition:\n              StringEquals:\n                kms:ViaService: secretsmanager.us-east-1.amazonaws.com\n            Effect: Allow\n            Principal:\n              AWS:\n                Fn::GetAtt:\n                  - SingletonLambda2422BDC2DBB047C1B7015599E0849C54ServiceRole3FA81C88\n                  - Arn\n            Resource: \"*\"\n          - Action: kms:Decrypt\n            Effect: Allow\n            Principal:\n              AWS:\n                Fn::GetAtt:\n                  - CodeCommitPipelineMavenRoleC3A7769B\n                  - Arn\n            Resource: \"*\"\n          - Action: kms:Decrypt\n            Effect: Allow\n            Principal:\n              AWS:\n                Fn::GetAtt:\n                  - CodeCommitPipelineGitHubRole77F2217D\n                  - Arn\n            Resource: \"*\"\n        Version: \"2012-10-17\"\n    UpdateReplacePolicy: Retain\n    DeletionPolicy: Retain\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeSign-CMK/Resource\n  CodeSignResourceV25D0B3375:\n    Type: AWS::CloudFormation::CustomResource\n    Properties:\n      ServiceToken:\n        Fn::GetAtt:\n          - SingletonLambda2422BDC2DBB047C1B7015599E0849C541E98BE8B\n          - Arn\n      ResourceVersion: H7YGMVLuZ+QukZc2cxaT6CX6eZHRf2OHSk/h85HtX74=\n      Identity: aws-cdk-dev\n      Email: aws-cdk-dev+delivlib@amazon.com\n      Expiry: 4y\n      KeySizeBits: 4096\n      SecretName: delivlib-test/CodeSignV2\n      KeyArn:\n        Fn::GetAtt:\n          - CodeSignCMKC986BB89\n          - Arn\n      Version: 0\n      DeleteImmediately: true\n    UpdateReplacePolicy: Delete\n    DeletionPolicy: Delete\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeSign/ResourceV2/Default\n  CodeSignPrincipal30E4C212:\n    Type: AWS::SSM::Parameter\n    Properties:\n      Description:\n        Fn::Join:\n          - \"\"\n          - - \"The public part of the OpenPGP key in \"\n            - Fn::GetAtt:\n                - CodeSignResourceV25D0B3375\n                - SecretArn\n      Name: /delivlib-test/CodeSign.pub\n      Type: String\n      Value:\n        Fn::GetAtt:\n          - CodeSignResourceV25D0B3375\n          - PublicKey\n    Metadata:\n      aws:cdk:path: delivlib-test/CodeSign/Principal/Resource\n  SingletonLambda2422BDC2DBB047C1B7015599E0849C54ServiceRole3FA81C88:\n    Type: AWS::IAM::Role\n    Properties:\n      AssumeRolePolicyDocument:\n        Statement:\n          - Action: sts:AssumeRole\n            Effect: Allow\n            Principal:\n              Service: lambda.amazonaws.com\n        Version: \"2012-10-17\"\n      ManagedPolicyArns:\n        - Fn::Join:\n            - \"\"\n            - - \"arn:\"\n              - Ref: AWS::Partition\n              - :iam::aws:policy/service-role/AWSLambdaBasicExecutionRole\n    Metadata:\n      aws:cdk:path: delivlib-test/SingletonLambda2422BDC2DBB047C1B7015599E0849C54/ServiceRole/Resource\n  SingletonLambda2422BDC2DBB047C1B7015599E0849C54ServiceRoleDefaultPolicyAD608560:\n    Type: AWS::IAM::Policy\n    Properties:\n      PolicyDocument:\n        Statement:\n          - Action:\n              - secretsmanager:CreateSecret\n              - secretsmanager:GetSecretValue\n              - secretsmanager:UpdateSecret\n              - secretsmanager:DeleteSecret\n            Effect: Allow\n            Resource:\n              Fn::Join:\n                - \"\"\n                - - \"arn:\"\n                  - Ref: AWS::Partition\n                  - :secretsmanager:us-east-1:712950704752:secret:delivlib-test/CodeSignV2-??????\n          - Action: ssm:DeleteParameter\n            Effect: Allow\n            Resource: \"*\"\n        Version: \"2012-10-17\"\n      PolicyName: SingletonLambda2422BDC2DBB047C1B7015599E0849C54ServiceRoleDefaultPolicyAD608560\n      Roles:\n        - Ref: SingletonLambda2422BDC2DBB047C1B7015599E0849C54ServiceRole3FA81C88\n    Metadata:\n      aws:cdk:path: delivlib-test/SingletonLambda2422BDC2DBB047C1B7015599E0849C54/ServiceRole/DefaultPolicy/Resource\n  SingletonLambda2422BDC2DBB047C1B7015599E0849C541E98BE8B:\n    Type: AWS::Lambda::Function\n    Properties:\n      Code:\n        ImageUri:\n          Fn::Sub: 712950704752.dkr.ecr.us-east-1.${AWS::URLSuffix}/cdk-hnb659fds-container-assets-712950704752-us-east-1:108ee4cd321c857649aa8ee9ebd7fcf58be11944ca8dc179ca51ff5bf28671fe\n      Description: Generates an OpenPGP Key and stores the private key in Secrets Manager and the public key in an SSM Parameter\n      PackageType: Image\n      Role:\n        Fn::GetAtt:\n          - SingletonLambda2422BDC2DBB047C1B7015599E0849C54ServiceRole3FA81C88\n          - Arn\n      Timeout: 300\n    DependsOn:\n      - SingletonLambda2422BDC2DBB047C1B7015599E0849C54ServiceRoleDefaultPolicyAD608560\n      - SingletonLambda2422BDC2DBB047C1B7015599E0849C54ServiceRole3FA81C88\n    Metadata:\n      aws:cdk:path: delivlib-test/SingletonLambda2422BDC2DBB047C1B7015599E0849C54/Resource\n  BucketNotificationsHandler050a0587b7544547bf325f094a3db834RoleB6FB88EC:\n    Type: AWS::IAM::Role\n    Properties:\n      AssumeRolePolicyDocument:\n        Statement:\n          - Action: sts:AssumeRole\n            Effect: Allow\n            Principal:\n              Service: lambda.amazonaws.com\n        Version: \"2012-10-17\"\n      ManagedPolicyArns:\n        - Fn::Join:\n            - \"\"\n            - - \"arn:\"\n              - Ref: AWS::Partition\n              - :iam::aws:policy/service-role/AWSLambdaBasicExecutionRole\n    Metadata:\n      aws:cdk:path: delivlib-test/BucketNotificationsHandler050a0587b7544547bf325f094a3db834/Role/Resource\n  BucketNotificationsHandler050a0587b7544547bf325f094a3db834RoleDefaultPolicy2CF63D36:\n    Type: AWS::IAM::Policy\n    Properties:\n      PolicyDocument:\n        Statement:\n          - Action: s3:PutBucketNotification\n            Effect: Allow\n            Resource: \"*\"\n        Version: \"2012-10-17\"\n      PolicyName: BucketNotificationsHandler050a0587b7544547bf325f094a3db834RoleDefaultPolicy2CF63D36\n      Roles:\n        - Ref: BucketNotificationsHandler050a0587b7544547bf325f094a3db834RoleB6FB88EC\n    Metadata:\n      aws:cdk:path: delivlib-test/BucketNotificationsHandler050a0587b7544547bf325f094a3db834/Role/DefaultPolicy/Resource\n  BucketNotificationsHandler050a0587b7544547bf325f094a3db8347ECC3691:\n    Type: AWS::Lambda::Function\n    Properties:\n      Description: AWS CloudFormation handler for \"Custom::S3BucketNotifications\" resources (@aws-cdk/aws-s3)\n      Code:\n        ZipFile: |-\n          import boto3  # type: ignore\n          import json\n          import logging\n          import urllib.request\n\n          s3 = boto3.client(\"s3\")\n\n          EVENTBRIDGE_CONFIGURATION = 'EventBridgeConfiguration'\n          CONFIGURATION_TYPES = [\"TopicConfigurations\", \"QueueConfigurations\", \"LambdaFunctionConfigurations\"]\n\n          def handler(event: dict, context):\n            response_status = \"SUCCESS\"\n            error_message = \"\"\n            try:\n              props = event[\"ResourceProperties\"]\n              notification_configuration = props[\"NotificationConfiguration\"]\n              managed = props.get('Managed', 'true').lower() == 'true'\n              stack_id = event['StackId']\n              old = event.get(\"OldResourceProperties\", {}).get(\"NotificationConfiguration\", {})\n              if managed:\n                config = handle_managed(event[\"RequestType\"], notification_configuration)\n              else:\n                config = handle_unmanaged(props[\"BucketName\"], stack_id, event[\"RequestType\"], notification_configuration, old)\n              s3.put_bucket_notification_configuration(Bucket=props[\"BucketName\"], NotificationConfiguration=config)\n            except Exception as e:\n              logging.exception(\"Failed to put bucket notification configuration\")\n              response_status = \"FAILED\"\n              error_message = f\"Error: {str(e)}. \"\n            finally:\n              submit_response(event, context, response_status, error_message)\n\n          def handle_managed(request_type, notification_configuration):\n            if request_type == 'Delete':\n              return {}\n            return notification_configuration\n\n          def handle_unmanaged(bucket, stack_id, request_type, notification_configuration, old):\n            def get_id(n):\n              n['Id'] = ''\n              strToHash=json.dumps(n, sort_keys=True).replace('\"Name\": \"prefix\"', '\"Name\": \"Prefix\"').replace('\"Name\": \"suffix\"', '\"Name\": \"Suffix\"')\n              return f\"{stack_id}-{hash(strToHash)}\"\n            def with_id(n):\n              n['Id'] = get_id(n)\n              return n\n\n            external_notifications = {}\n            existing_notifications = s3.get_bucket_notification_configuration(Bucket=bucket)\n            for t in CONFIGURATION_TYPES:\n              if request_type == 'Update':\n                  old_incoming_ids = [get_id(n) for n in old.get(t, [])]\n                  external_notifications[t] = [n for n in existing_notifications.get(t, []) if not get_id(n) in old_incoming_ids]      \n              elif request_type == 'Delete':\n                  external_notifications[t] = [n for n in existing_notifications.get(t, []) if not n['Id'].startswith(f\"{stack_id}-\")]\n              elif request_type == 'Create':\n                  external_notifications[t] = [n for n in existing_notifications.get(t, [])]\n            if EVENTBRIDGE_CONFIGURATION in existing_notifications:\n              external_notifications[EVENTBRIDGE_CONFIGURATION] = existing_notifications[EVENTBRIDGE_CONFIGURATION]\n\n            if request_type == 'Delete':\n              return external_notifications\n\n            notifications = {}\n            for t in CONFIGURATION_TYPES:\n              external = external_notifications.get(t, [])\n              incoming = [with_id(n) for n in notification_configuration.get(t, [])]\n              notifications[t] = external + incoming\n\n            if EVENTBRIDGE_CONFIGURATION in notification_configuration:\n              notifications[EVENTBRIDGE_CONFIGURATION] = notification_configuration[EVENTBRIDGE_CONFIGURATION]\n            elif EVENTBRIDGE_CONFIGURATION in external_notifications:\n              notifications[EVENTBRIDGE_CONFIGURATION] = external_notifications[EVENTBRIDGE_CONFIGURATION]\n\n            return notifications\n\n          def submit_response(event: dict, context, response_status: str, error_message: str):\n            response_body = json.dumps(\n              {\n                \"Status\": response_status,\n                \"Reason\": f\"{error_message}See the details in CloudWatch Log Stream: {context.log_stream_name}\",\n                \"PhysicalResourceId\": event.get(\"PhysicalResourceId\") or event[\"LogicalResourceId\"],\n                \"StackId\": event[\"StackId\"],\n                \"RequestId\": event[\"RequestId\"],\n                \"LogicalResourceId\": event[\"LogicalResourceId\"],\n                \"NoEcho\": False,\n              }\n            ).encode(\"utf-8\")\n            headers = {\"content-type\": \"\", \"content-length\": str(len(response_body))}\n            try:\n              req = urllib.request.Request(url=event[\"ResponseURL\"], headers=headers, data=response_body, method=\"PUT\")\n              with urllib.request.urlopen(req) as response:\n                print(response.read().decode(\"utf-8\"))\n              print(\"Status code: \" + response.reason)\n            except Exception as e:\n                print(\"send(..) failed executing request.urlopen(..): \" + str(e))\n      Handler: index.handler\n      Role:\n        Fn::GetAtt:\n          - BucketNotificationsHandler050a0587b7544547bf325f094a3db834RoleB6FB88EC\n          - Arn\n      Runtime: python3.11\n      Timeout: 300\n    DependsOn:\n      - BucketNotificationsHandler050a0587b7544547bf325f094a3db834RoleDefaultPolicy2CF63D36\n      - BucketNotificationsHandler050a0587b7544547bf325f094a3db834RoleB6FB88EC\n    Metadata:\n      aws:cdk:path: delivlib-test/BucketNotificationsHandler050a0587b7544547bf325f094a3db834/Resource\nOutputs:\n  CodeCommitPipelineChangeControllerChangeControlBucketKeyCA921D21:\n    Value: change-control.ics\n  CodeCommitPipelineChangeControllerChangeControlBucket707A9E21:\n    Value:\n      Ref: CodeCommitPipelineChangeControllerCalendar94B1DEA8\n  X509CodeSigningKeyCSR5137C5A3:\n    Description: A PEM-encoded Certificate Signing Request for a Code-Signing Certificate\n    Value:\n      Fn::GetAtt:\n        - X509CodeSigningKeyRSAPrivateKeyCertificateSigningRequestResourceV284A29392\n        - CSR\nParameters:\n  BootstrapVersion:\n    Type: AWS::SSM::Parameter::Value<String>\n    Default: /cdk-bootstrap/hnb659fds/version\n    Description: Version of the CDK Bootstrap resources in this environment, automatically retrieved from SSM Parameter Store. [cdk:skip]\n\n"
  },
  {
    "path": "lib/__tests__/integ.delivlib.ts",
    "content": "import * as cdk from 'aws-cdk-lib';\nimport { TestStack } from './test-stack';\n\n\nconst stackName = process.env.TEST_STACK_NAME;\nif (!stackName) {\n  throw new Error('TEST_STACK_NAME must be defined');\n}\n\nconst app = new cdk.App();\nnew TestStack(app, stackName, {\n  env: { region: 'us-east-1', account: '712950704752' },\n});\napp.synth();\n"
  },
  {
    "path": "lib/__tests__/open-pgp-key-pair.test.ts",
    "content": "import {\n  App, Stack,\n  aws_kms as kms,\n} from 'aws-cdk-lib';\nimport { Template, Match } from 'aws-cdk-lib/assertions';\nimport { OpenPGPKeyPair } from '../../lib/open-pgp-key-pair';\n\n\ntest('correctly creates', () => {\n  // GIVEN\n  const stack = new Stack(new App(), 'TestStack');\n  const encryptionKey = new kms.Key(stack, 'CMK');\n  // WHEN\n  new OpenPGPKeyPair(stack, 'Secret', {\n    email: 'nobody@nowhere.com',\n    encryptionKey,\n    expiry: '1d',\n    identity: 'Test',\n    keySizeBits: 1_024,\n    pubKeyParameterName: 'TestParameter',\n    secretName: 'SecretName',\n    version: 0,\n  });\n  const template = Template.fromStack(stack);\n\n  // THEN\n  template.hasResourceProperties('AWS::CloudFormation::CustomResource', Match.objectLike({\n    Identity: 'Test',\n    Email: 'nobody@nowhere.com',\n    Expiry: '1d',\n    KeySizeBits: 1024,\n    SecretName: 'SecretName',\n    KeyArn: stack.resolve(encryptionKey.keyArn),\n    Version: 0,\n    DeleteImmediately: false,\n  }));\n});\n\ntest('correctly forwards parameter name', () => {\n  // GIVEN\n  const stack = new Stack(new App(), 'TestStack');\n  const parameterName = 'TestParameterName';\n\n  // WHEN\n  new OpenPGPKeyPair(stack, 'Secret', {\n    pubKeyParameterName: parameterName,\n    email: 'nobody@nowhere.com',\n    encryptionKey: new kms.Key(stack, 'CMK'),\n    expiry: '1d',\n    identity: 'Test',\n    keySizeBits: 1_024,\n    secretName: 'SecretName',\n    version: 0,\n  });\n  const template = Template.fromStack(stack);\n\n  // THEN\n  template.hasResourceProperties('AWS::SSM::Parameter', {\n    Type: 'String',\n    Value: { 'Fn::GetAtt': ['SecretResourceV2A1B4436D', 'PublicKey'] },\n    Name: parameterName,\n  });\n});\n\ntest('Handler has appropriate permissions', () => {\n  // GIVEN\n  const stack = new Stack(new App(), 'TestStack');\n\n  // WHEN\n  new OpenPGPKeyPair(stack, 'Secret', {\n    pubKeyParameterName: '/Foo',\n    email: 'nobody@nowhere.com',\n    encryptionKey: new kms.Key(stack, 'CMK'),\n    expiry: '1d',\n    identity: 'Test',\n    keySizeBits: 1_024,\n    secretName: 'Bar',\n    version: 0,\n  });\n  const template = Template.fromStack(stack);\n\n  // THEN\n  template.hasResourceProperties('AWS::IAM::Policy', {\n    PolicyDocument: {\n      Version: '2012-10-17',\n      Statement: [{\n        Effect: 'Allow',\n        Action: [\n          'secretsmanager:CreateSecret',\n          'secretsmanager:GetSecretValue',\n          'secretsmanager:UpdateSecret',\n          'secretsmanager:DeleteSecret',\n        ],\n        Resource: {\n          'Fn::Join': ['',\n            ['arn:', { Ref: 'AWS::Partition' }, ':secretsmanager:', { Ref: 'AWS::Region' }, ':', { Ref: 'AWS::AccountId' }, ':secret:Bar-??????']],\n        },\n      }, {\n        Effect: 'Allow',\n        Action: 'ssm:DeleteParameter',\n        Resource: '*',\n      }],\n    },\n    PolicyName: 'SingletonLambda2422BDC2DBB047C1B7015599E0849C54ServiceRoleDefaultPolicyAD608560',\n    Roles: [{ Ref: 'SingletonLambda2422BDC2DBB047C1B7015599E0849C54ServiceRole3FA81C88' }],\n  });\n\n  template.hasResourceProperties('AWS::KMS::Key', {\n    KeyPolicy: {\n      Statement: Match.arrayWith([Match.objectLike({\n        // The key administration enabler statement -- exact content is irrelevant\n        Resource: '*',\n      }), Match.objectLike({\n        Effect: 'Allow',\n        Action: ['kms:Decrypt', 'kms:GenerateDataKey'],\n        Resource: '*',\n        Condition: {\n          StringEquals: { 'kms:ViaService': { 'Fn::Join': ['', ['secretsmanager.', { Ref: 'AWS::Region' }, '.amazonaws.com']] } },\n        },\n        Principal: { AWS: { 'Fn::GetAtt': ['SingletonLambda2422BDC2DBB047C1B7015599E0849C54ServiceRole3FA81C88', 'Arn'] } },\n      })]),\n    },\n  });\n});\n"
  },
  {
    "path": "lib/__tests__/package-integrity/handler/__fixtures__/.gitignore",
    "content": "!.projenrc.js\n\n"
  },
  {
    "path": "lib/__tests__/package-integrity/handler/__fixtures__/non-projen-project/package.json",
    "content": "{\n  \"name\": \"non-project-project\",\n  \"private\": true,\n  \"comment\": \"Should fail because no .projen directory exists\"\n}"
  },
  {
    "path": "lib/__tests__/package-integrity/handler/__fixtures__/non-projen-project/yarn.lock",
    "content": "This file is only validated for existence, not content.\nSo it doesn't matter what we put here for."
  },
  {
    "path": "lib/__tests__/package-integrity/handler/__fixtures__/non-yarn-project/.projenrc.js",
    "content": "const path = require('path');\nconst { cdk, javascript } = require('projen');\n\n// see https://github.com/projen/projen/issues/1356\nconst projenVersion = require(path.join(require.resolve('projen'), '..', '..', 'package.json')).version;\n\nconst project = new cdk.JsiiProject({\n  defaultReleaseBranch: 'main',\n  name: 'non-yarn-project',\n  author: 'dummy',\n  authorAddress: 'dummy@example.com',\n  repositoryUrl: 'dummy',\n  projenVersion,\n  packageManager: javascript.NodePackageManager.NPM,\n  jsiiVersion: '^5'\n});\n\nproject.package.addField('private', true);\n\nproject.synth();\n\n"
  },
  {
    "path": "lib/__tests__/package-integrity/handler/__fixtures__/projen-jsii-project/.projenrc.js",
    "content": "const path = require('path');\nconst { cdk } = require('projen');\n\n// see https://github.com/projen/projen/issues/1356\nconst projenVersion = require(path.join(require.resolve('projen'), '..', '..', 'package.json')).version;\n\nconst project = new cdk.JsiiProject({\n  defaultReleaseBranch: 'main',\n  name: 'projen-jsii-project',\n  publishToPypi: {\n    distName: 'projen-jsii-project',\n    module: 'projen_jsii_project',\n  },\n  author: 'dummy',\n  authorAddress: 'dummy@example.com',\n  repositoryUrl: 'dummy',\n  projenVersion,\n  jsiiVersion: '^5',\n  tsconfigDev: {\n    compilerOptions: {\n      types: ['jest', 'node'],\n    },\n  },\n});\n\nproject.package.addField('private', true);\n\nproject.synth();\n\n"
  },
  {
    "path": "lib/__tests__/package-integrity/handler/__fixtures__/projen-non-jsii-project/.projenrc.js",
    "content": "const path = require('path');\nconst { typescript } = require('projen');\n\n// see https://github.com/projen/projen/issues/1356\nconst projenVersion = require(path.join(require.resolve('projen'), '..', '..', 'package.json')).version;\n\nconst project = new typescript.TypeScriptProject({\n  defaultReleaseBranch: 'main',\n  name: 'projen-non-jsii-project',\n  author: 'dummy',\n  authorAddress: 'dummy@example.com',\n  repositoryUrl: 'dummy',\n  projenVersion,\n  tsconfigDev: {\n    compilerOptions: {\n      types: ['jest', 'node'],\n    },\n  },\n});\n\nproject.package.addField('private', true);\n\nproject.synth();\n\n"
  },
  {
    "path": "lib/__tests__/package-integrity/handler/integrity.test.ts",
    "content": "import * as child from 'child_process';\nimport * as os from 'os';\nimport * as path from 'path';\nimport AdmZip from 'adm-zip';\nimport * as fs from 'fs-extra';\nimport * as tar from 'tar';\nimport { NpmArtifactIntegrity, PublishedPackage, PyPIArtifactIntegrity, RepositoryIntegrity } from '../../../package-integrity/handler/integrity';\nimport { Repository } from '../../../package-integrity/handler/repository';\n\n// these test might take some time since they run jsii-pacmak...\njest.setTimeout(5 * 60 * 1000);\n\nfunction fixturePath(name: string) {\n  return path.join(__dirname, '__fixtures__', name);\n}\n\ntype Download = (pkg: PublishedPackage, target: string) => void;\n\ninterface IntegrityInputs {\n  npmDownload: Download;\n  pypiDownload: Download;\n  repo: Repository;\n}\n\nfunction generateProject(fixture: string): string {\n\n  const tempdir = fs.mkdtempSync(path.join(os.tmpdir(), path.sep));\n  const repoDir = path.join(tempdir, fixture);\n  fs.mkdirSync(repoDir);\n  fs.copySync(fixturePath(fixture), repoDir);\n\n  const shell = (command: string) => child.execSync(command, { cwd: repoDir, stdio: ['ignore', 'inherit', 'inherit'] });\n\n  // we need CI=false since projen defaults to CI=true in a GitHub\n  // environment, and project generation normally happens outside of CI.\n  const projen = () => shell(`CI=false ${path.join(require.resolve('projen'), '..', '..', 'bin', 'projen')}`);\n\n  const isProjen = fs.existsSync(path.join(repoDir, '.projenrc.js'));\n\n  if (isProjen) {\n    // project is created with only .projenrc.js and sometimes it doesn't\n    // yarn install correctly, if this happens try to yarn install again\n    try {\n      projen();\n    } catch (e) {\n      shell('yarn install --check-files');\n      projen();\n    }\n  }\n\n  return repoDir;\n}\n\nfunction createIntegrity(inputs: IntegrityInputs) {\n\n  jest.spyOn<any, any>(NpmArtifactIntegrity.prototype, 'download').mockImplementation(inputs.npmDownload as any);\n  jest.spyOn<any, any>(PyPIArtifactIntegrity.prototype, 'download').mockImplementation(inputs.pypiDownload as any);\n\n  // we don't need a pack task since we prepack in the test\n  return new RepositoryIntegrity({ repository: inputs.repo, packCommand: 'echo success' });\n\n}\n\n/**\n * Helper class to cache packed repositories since it takes a long.\n */\nclass Repositories {\n\n  private _jsii: Repository | undefined;\n  private _ts: Repository | undefined;\n\n  public async jsii(): Promise<Repository> {\n    if (!this._jsii) {\n      const repoDir = generateProject('projen-jsii-project');\n      this._jsii = await Repository.fromDir({ repoDir });\n      this._jsii.pack('npx projen build');\n    }\n    return this._jsii!;\n  }\n\n  public async ts(): Promise<Repository> {\n    if (!this._ts) {\n      const repoDir = generateProject('projen-non-jsii-project');\n      this._ts = await Repository.fromDir({ repoDir });\n      this._ts.pack('npx projen build');\n    }\n    return this._ts!;\n  }\n\n  public clean() {\n    if (this._jsii) {\n      fs.removeSync(this._jsii.repoDir);\n    }\n    if (this._ts) {\n      fs.removeSync(this._ts.repoDir);\n    }\n  }\n\n}\n\nconst repositories = new Repositories();\n\nbeforeEach(() => {\n  jest.restoreAllMocks();\n});\n\nafterAll(() => {\n  repositories.clean();\n});\n\ntest('happy jsii', async () => {\n\n  const repo = await repositories.jsii();\n  const repoDir = repo.repoDir;\n\n  const npmDownload = async (pkg: PublishedPackage, targetFile: string) => {\n    const dist = path.join(repoDir, 'dist');\n    const name = `${pkg.name}@${pkg.version}.jsii.tgz`;\n    fs.copySync(path.join(dist, 'js', name), targetFile);\n  };\n\n  const pypiDownload = async (pkg: PublishedPackage, targetFile: string) => {\n    const dist = path.join(repoDir, 'dist');\n    const name = `${pkg.name}-${pkg.version}-py3-none-any.whl`;\n    fs.copySync(path.join(dist, 'python', name), targetFile);\n  };\n\n  const integrity = createIntegrity({ repo: repo, npmDownload, pypiDownload });\n  await integrity.validate();\n\n});\n\ntest('unhappy npm jsii', async () => {\n\n  const repo = await repositories.jsii();\n  const repoDir = repo.repoDir;\n\n  const npmDownload = (_: PublishedPackage, targetFile: string) => {\n    tar.create({ file: targetFile, gzip: true, sync: true },\n      [path.join(repoDir, 'package.json')],\n    );\n  };\n\n  const pypiDownload = (pkg: PublishedPackage, targetFile: string) => {\n    const dist = path.join(repoDir, 'dist');\n    const name = `${pkg.name}-${pkg.version}-py3-none-any.whl`;\n    fs.copySync(path.join(dist, 'python', name), targetFile);\n  };\n\n  const integrity = createIntegrity({ repo: repo, npmDownload, pypiDownload });\n  return expect(integrity.validate()).rejects.toThrow('NpmArtifactIntegrity validation failed');\n\n});\n\ntest('unhappy pypi jsii', async () => {\n\n  const repo = await repositories.jsii();\n  const repoDir = repo.repoDir;\n\n  const npmDownload = (pkg: PublishedPackage, targetFile: string) => {\n    const dist = path.join(repoDir, 'dist');\n    const name = `${pkg.name}@${pkg.version}.jsii.tgz`;\n    fs.copySync(path.join(dist, 'js', name), targetFile);\n  };\n\n  const pypiDownload = (_: PublishedPackage, targetFile: string) => {\n    const whl = new AdmZip();\n    whl.addLocalFile(path.join(repoDir, 'package.json'));\n    whl.writeZip(targetFile);\n  };\n\n  const integrity = createIntegrity({ repo: repo, npmDownload, pypiDownload });\n  return expect(integrity.validate()).rejects.toThrowError('PyPIArtifactIntegrity validation failed');\n\n});\n\ntest('happy ts', async () => {\n\n  const repo = await repositories.ts();\n  const repoDir = repo.repoDir;\n\n  const npmDownload = (pkg: PublishedPackage, targetFile: string) => {\n    const dist = path.join(repoDir, 'dist');\n    const name = `${pkg.name}-${pkg.version}.tgz`;\n    fs.copySync(path.join(dist, 'js', name), targetFile);\n  };\n\n  const pypiDownload = (pkg: PublishedPackage, targetFile: string) => {\n    const dist = path.join(repoDir, 'dist');\n    const name = `${pkg.name}-${pkg.version}-py3-none-any.whl`;\n    fs.copySync(path.join(dist, 'python', name), targetFile);\n  };\n\n  const integrity = createIntegrity({ repo: repo, npmDownload, pypiDownload });\n  await integrity.validate();\n\n});\n\ntest('only projen projects are supported', async () => {\n  const repoDir = generateProject('non-projen-project');\n  return expect(Repository.fromDir({ repoDir })).rejects.toThrowError('Only projen managed repositories are supported at this time');\n});\n\ntest('only yarn projects are supported', async () => {\n  const repoDir = generateProject('non-yarn-project');\n  return expect(Repository.fromDir({ repoDir })).rejects.toThrowError('Only yarn managed repositories are supported at this time');\n});\n"
  },
  {
    "path": "lib/__tests__/package-integrity/integrity.test.ts",
    "content": "import {\n  App, Duration, Stack,\n  aws_codebuild as codebuild,\n  aws_secretsmanager as sm,\n} from 'aws-cdk-lib';\nimport { Template, Match } from 'aws-cdk-lib/assertions';\nimport { LinuxPlatform, PackageIntegrityValidation } from '../..';\n\ntest('creates a codebuild project that triggers daily and runs the integrity handler', () => {\n  const stack = new Stack(new App(), 'TestStack');\n  const token = sm.Secret.fromSecretCompleteArn(stack, 'GitHubSecret', 'arn:aws:secretsmanager:us-east-1:123456789123:secret:github-token-000000');\n\n  new PackageIntegrityValidation(stack, 'Integrity', {\n    buildPlatform: new LinuxPlatform(codebuild.LinuxBuildImage.fromDockerRegistry('jsii/superchain:1-bullseye-slim-node14')),\n    githubTokenSecret: token,\n    repository: 'cdklabs/some-repo',\n  });\n\n  const template = Template.fromStack(stack);\n  template.hasResourceProperties('AWS::Events::Rule', {\n    ScheduleExpression: 'rate(1 day)',\n    State: 'ENABLED',\n    Targets: [\n      {\n        Arn: {\n          'Fn::GetAtt': [\n            'IntegrityD83C2C0B',\n            'Arn',\n          ],\n        },\n        Id: 'Target0',\n        RoleArn: {\n          'Fn::GetAtt': [\n            'IntegrityEventsRole1990400F',\n            'Arn',\n          ],\n        },\n      },\n    ],\n  });\n\n  template.hasResourceProperties('AWS::CodeBuild::Project', {\n    Environment: {\n      EnvironmentVariables: Match.arrayWith([\n        {\n          Name: 'GITHUB_REPOSITORY',\n          Type: 'PLAINTEXT',\n          Value: 'cdklabs/some-repo',\n        },\n        {\n          Name: 'TAG_PREFIX',\n          Type: 'PLAINTEXT',\n          Value: '',\n        },\n        {\n          Name: 'GITHUB_TOKEN_ARN',\n          Type: 'PLAINTEXT',\n          Value: 'arn:aws:secretsmanager:us-east-1:123456789123:secret:github-token-000000',\n        },\n      ]),\n    },\n  });\n});\n\ntest('can pass environment variables to the integrity handler code build project', () => {\n  const stack = new Stack(new App(), 'TestStack');\n  const token = sm.Secret.fromSecretCompleteArn(stack, 'GitHubSecret', 'arn:aws:secretsmanager:us-east-1:123456789123:secret:github-token-000000');\n\n  new PackageIntegrityValidation(stack, 'Integrity', {\n    buildPlatform: new LinuxPlatform(codebuild.LinuxBuildImage.fromDockerRegistry('jsii/superchain:1-bullseye-slim-node14')),\n    githubTokenSecret: token,\n    repository: 'cdklabs/some-repo',\n    environment: {\n      FOO: 'bar',\n    },\n    environmentSecrets: {\n      SECRET: 'arn:aws:secretsmanager:us-east-1:123456789123:secret:super-secret-token-000000',\n    },\n  });\n\n  const template = Template.fromStack(stack);\n  template.hasResourceProperties('AWS::CodeBuild::Project', {\n    Environment: {\n      EnvironmentVariables: Match.arrayWith([\n        {\n          Name: 'FOO',\n          Type: 'PLAINTEXT',\n          Value: 'bar',\n        },\n        {\n          Name: 'SECRET',\n          Type: 'SECRETS_MANAGER',\n          Value: 'super-secret-token',\n        },\n      ]),\n    },\n  });\n});\n\ntest('can configure alarm properties', () => {\n  const stack = new Stack(new App(), 'TestStack');\n  const token = sm.Secret.fromSecretCompleteArn(stack, 'GitHubSecret', 'arn:aws:secretsmanager:us-east-1:123456789123:secret:github-token-000000');\n\n  new PackageIntegrityValidation(stack, 'Integrity', {\n    buildPlatform: new LinuxPlatform(codebuild.LinuxBuildImage.fromDockerRegistry('jsii/superchain:1-bullseye-slim-node14')),\n    githubTokenSecret: token,\n    repository: 'cdklabs/some-repo',\n    environment: {\n      FOO: 'bar',\n    },\n    environmentSecrets: {\n      SECRET: 'arn:aws:secretsmanager:us-east-1:123456789123:secret:super-secret-token-000000',\n    },\n    rate: Duration.seconds(60),\n    consecutiveFailuresToAlarm: 5,\n  });\n\n  const template = Template.fromStack(stack);\n  template.hasResourceProperties('AWS::CloudWatch::Alarm', {\n    EvaluationPeriods: 5,\n    Period: 60,\n  });\n});\n"
  },
  {
    "path": "lib/__tests__/pipeline-notifications/chime.test.ts",
    "content": "import {\n  App, Stack,\n  aws_codecommit as codecommit,\n} from 'aws-cdk-lib';\nimport { Capture, Template, Match } from 'aws-cdk-lib/assertions';\nimport { Pipeline, CodeCommitRepo, ChimeNotification } from '../../../lib';\n\ndescribe('chime notifications', () => {\n  test('failure notification via chime', () => {\n    // GIVEN\n    const stack = new Stack(new App(), 'TestStack');\n    const pipe = new Pipeline(stack, 'Pipeline', {\n      repo: new CodeCommitRepo(new codecommit.Repository(stack, 'Repo1', { repositoryName: 'test' })),\n    });\n\n    // WHEN\n    pipe.notifyOnFailure(new ChimeNotification({\n      webhookUrls: ['url-1'],\n    }));\n\n    const template = Template.fromStack(stack);\n    const inputTemplateCapture = new Capture();\n\n    // THEN\n    template.hasResourceProperties('AWS::Events::Rule', {\n      EventPattern: {\n        source: ['aws.codepipeline'],\n        resources: [\n          stack.resolve(pipe.pipeline.pipelineArn),\n        ],\n      },\n      Targets: Match.arrayWith([\n        Match.objectLike({\n          InputTransformer: {\n            InputPathsMap: {\n              detail: '$.detail',\n            },\n            InputTemplate: inputTemplateCapture,\n          },\n        }),\n      ]),\n    });\n\n    expect(inputTemplateCapture.asString()).toContain('url-1');\n  });\n\n  test('multiple chime notifications', () => {\n    // GIVEN\n    const stack = new Stack(new App(), 'TestStack');\n    const pipe = new Pipeline(stack, 'Pipeline', {\n      repo: new CodeCommitRepo(new codecommit.Repository(stack, 'Repo1', { repositoryName: 'test' })),\n    });\n\n\n    // WHEN\n    pipe.notifyOnFailure(new ChimeNotification({\n      webhookUrls: ['url-1'],\n    }));\n\n    pipe.notifyOnFailure(new ChimeNotification({\n      webhookUrls: ['url-2'],\n    }));\n    const template = Template.fromStack(stack);\n    const inputTemplateCapture = new Capture();\n\n    // THEN\n    template.hasResourceProperties('AWS::Events::Rule', {\n      EventPattern: {\n        'source': [\n          'aws.codepipeline',\n        ],\n        'detail-type': [\n          'CodePipeline Pipeline Execution State Change',\n        ],\n      },\n      Targets: Match.arrayWith([\n        Match.objectLike({\n          InputTransformer: {\n            InputPathsMap: {\n              detail: '$.detail',\n            },\n            InputTemplate: inputTemplateCapture,\n          },\n        }),\n      ]),\n    });\n    expect(inputTemplateCapture.asString()).toContain('url-1');\n    inputTemplateCapture.next();\n    expect(inputTemplateCapture.asString()).toContain('url-2');\n  });\n});\n"
  },
  {
    "path": "lib/__tests__/pipeline-notifications/slack.test.ts",
    "content": "import {\n  App, Stack,\n  aws_codecommit as codecommit,\n  aws_chatbot as chatbot,\n} from 'aws-cdk-lib';\nimport { Template } from 'aws-cdk-lib/assertions';\nimport { Pipeline, CodeCommitRepo, SlackNotification } from '../../../lib';\n\ndescribe('slack notifications', () => {\n  test('failure notification via slack', () => {\n    // GIVEN\n    const stack = new Stack(new App(), 'TestStack');\n    const slackChannel = new chatbot.SlackChannelConfiguration(stack, 'notify', {\n      slackChannelConfigurationName: 'test-slack-config',\n      slackChannelId: 'test-channel-id',\n      slackWorkspaceId: 'test-workspace-id',\n    });\n    const pipe = new Pipeline(stack, 'Pipeline', {\n      repo: new CodeCommitRepo(new codecommit.Repository(stack, 'Repo1', { repositoryName: 'test' })),\n    });\n\n    // WHEN\n    pipe.notifyOnFailure(new SlackNotification({ channels: [slackChannel] }));\n    const template = Template.fromStack(stack);\n\n    // THEN\n    template.hasResourceProperties('AWS::CodeStarNotifications::NotificationRule', {\n      DetailType: 'BASIC',\n      EventTypeIds: ['codepipeline-pipeline-action-execution-failed'],\n      Name: {\n        'Fn::Join': [\n          '',\n          [\n            {\n              Ref: 'PipelineBuildPipeline04C6628A',\n            },\n            '-06cc6a8b3242c01f0cffbd9626c6a84d',\n          ],\n        ],\n      },\n      Resource: stack.resolve(pipe.pipeline.pipelineArn),\n      Targets: [\n        {\n          TargetAddress: stack.resolve(slackChannel.slackChannelConfigurationArn),\n          TargetType: 'AWSChatbotSlack',\n        },\n      ],\n    });\n  });\n\n  test('multiple notifications', () => {\n    // GIVEN\n    const stack = new Stack(new App(), 'TestStack');\n    const slackChannel1 = new chatbot.SlackChannelConfiguration(stack, 'slack1', {\n      slackChannelConfigurationName: 'test-slack-config-1',\n      slackChannelId: 'test-channel-id-1',\n      slackWorkspaceId: 'test-workspace-id-1',\n    });\n    const slackChannel2 = new chatbot.SlackChannelConfiguration(stack, 'slack2', {\n      slackChannelConfigurationName: 'test-slack-config-2',\n      slackChannelId: 'test-channel-id-2',\n      slackWorkspaceId: 'test-workspace-id-2',\n    });\n    const pipe = new Pipeline(stack, 'Pipeline', {\n      repo: new CodeCommitRepo(new codecommit.Repository(stack, 'Repo1', { repositoryName: 'test' })),\n    });\n\n    // WHEN\n    pipe.notifyOnFailure(new SlackNotification({ channels: [slackChannel1] }));\n    pipe.notifyOnFailure(new SlackNotification({ channels: [slackChannel2] }));\n    const template = Template.fromStack(stack);\n\n    // THEN\n    template.hasResourceProperties('AWS::CodeStarNotifications::NotificationRule', {\n      Targets: [\n        {\n          TargetAddress: stack.resolve(slackChannel1.slackChannelConfigurationArn),\n          TargetType: 'AWSChatbotSlack',\n        },\n      ],\n    });\n    template.hasResourceProperties('AWS::CodeStarNotifications::NotificationRule', {\n      Targets: [\n        {\n          TargetAddress: stack.resolve(slackChannel2.slackChannelConfigurationArn),\n          TargetType: 'AWSChatbotSlack',\n        },\n      ],\n    });\n  });\n});\n"
  },
  {
    "path": "lib/__tests__/pipeline.test.ts",
    "content": "import * as path from 'path';\nimport {\n  App, Duration, Stack,\n  aws_codebuild as codebuild,\n  aws_codecommit as codecommit,\n  aws_codepipeline as cpipeline,\n  aws_codepipeline_actions as cpipeline_actions,\n} from 'aws-cdk-lib';\nimport { Capture, Template, Match } from 'aws-cdk-lib/assertions';\nimport { Role } from 'aws-cdk-lib/aws-iam';\nimport { Function } from 'aws-cdk-lib/aws-lambda';\nimport { Bucket } from 'aws-cdk-lib/aws-s3';\nimport { Construct } from 'constructs';\nimport * as delivlib from '../../lib';\nimport { determineRunOrder } from '../../lib/util';\n\ntest('pipelineName can be used to set a physical name for the pipeline', async () => {\n  const stack = new Stack(new App(), 'TestStack');\n\n  new delivlib.Pipeline(stack, 'Pipeline', {\n    repo: createTestRepo(stack),\n    pipelineName: 'HelloPipeline',\n  });\n\n  const template = Template.fromStack(stack);\n\n  template.hasResourceProperties('AWS::CodePipeline::Pipeline', {\n    Name: 'HelloPipeline',\n  });\n});\n\ntest('concurrency: unlimited by default', async () => {\n  const stack = new Stack(new App(), 'TestStack');\n\n  const stages = createTestPipelineForConcurrencyTests(stack);\n\n  // default is RunOrder = 1 for all actions which means they all run in parallel\n  for (const stage of stages) {\n    const actions = stage.Actions;\n    for (const action of actions) {\n      expect(action.RunOrder).toBe(1);\n    }\n  }\n});\n\ntest('concurrency = 1: means that actions will run sequentially', async () => {\n  const stack = new Stack(new App(), 'TestStack');\n  const stages = createTestPipelineForConcurrencyTests(stack, { concurrency: 1 } as any);\n\n  for (const stage of stages) {\n    const actions = stage.Actions;\n    let expected = 1;\n    for (const action of actions) {\n      expect(action.RunOrder).toBe(expected);\n      expected++;\n    }\n  }\n});\n\ntest('determineRunOrder: creates groups of up to \"concurrency\" actions', async () => {\n  testCase({ actionCount: 1, concurrency: 1 });\n  testCase({ actionCount: 10, concurrency: 1 });\n  testCase({ actionCount: 56, concurrency: 4 });\n  testCase({ actionCount: 3, concurrency: 2 });\n\n  function testCase({ actionCount, concurrency }: { actionCount: number; concurrency: number }) {\n    const actionsPerRunOrder: { [runOrder: number]: number } = {};\n    for (let i = 0; i < actionCount; ++i) {\n      const runOrder = determineRunOrder(i, concurrency)!;\n      if (!actionsPerRunOrder[runOrder]) {\n        actionsPerRunOrder[runOrder] = 0;\n      }\n      actionsPerRunOrder[runOrder]++;\n    }\n\n    // assert that there are no more than *concurrency* actions in each runOrder\n    let total = 0;\n    for (const [, count] of Object.entries(actionsPerRunOrder)) {\n      expect(count).toBeLessThanOrEqual(concurrency);\n      total += count;\n    }\n\n    expect(total).toBe(actionCount); // sanity\n  }\n});\n\ntest('can add arbitrary shellables with different artifacts', () => {\n  const stack = new Stack(new App(), 'TestStack');\n\n  const pipeline = new delivlib.Pipeline(stack, 'Pipeline', {\n    repo: createTestRepo(stack),\n    pipelineName: 'HelloPipeline',\n  });\n\n  const action = pipeline.addShellable('Test', 'SecondStep', {\n    scriptDirectory: __dirname,\n    entrypoint: 'run-test.sh',\n  }).action;\n\n  pipeline.addPublish(new Pub(stack, 'Pub'), { inputArtifact: action.actionProperties.outputs![0] });\n  const template = Template.fromStack(stack);\n\n  template.hasResourceProperties('AWS::CodePipeline::Pipeline', {\n    Stages: Match.arrayWith([\n      {\n        Actions: [\n          Match.objectLike({\n            ActionTypeId: { Category: 'Source', Owner: 'AWS', Provider: 'CodeCommit', Version: '1' },\n            Name: 'Pull',\n            OutputArtifacts: [\n              {\n                Name: 'Source',\n              },\n            ],\n          }),\n        ],\n        Name: 'Source',\n      },\n      {\n        Actions: [\n          Match.objectLike({\n            Name: 'Build',\n            ActionTypeId: { Category: 'Build', Owner: 'AWS', Provider: 'CodeBuild', Version: '1' },\n            InputArtifacts: [{ Name: 'Source' }],\n            OutputArtifacts: [{ Name: 'Artifact_Build_Build' }],\n            RunOrder: 1,\n          }),\n        ],\n        Name: 'Build',\n      },\n      {\n        Actions: [\n          Match.objectLike({\n            ActionTypeId: { Category: 'Build', Owner: 'AWS', Provider: 'CodeBuild', Version: '1' },\n            InputArtifacts: [{ Name: 'Artifact_Build_Build' }],\n            Name: 'ActionSecondStep',\n            OutputArtifacts: [{ Name: 'Artifact_c81eddcbe9657bd312a728fb13df77bc09f9a519b4' }],\n            RunOrder: 1,\n          }),\n        ],\n        Name: 'Test',\n      },\n      {\n        Actions: [\n          Match.objectLike({\n            ActionTypeId: { Category: 'Build', Owner: 'AWS', Provider: 'CodeBuild', Version: '1' },\n            InputArtifacts: [{ Name: 'Artifact_c81eddcbe9657bd312a728fb13df77bc09f9a519b4' }],\n            Name: 'PubPublish',\n            RunOrder: 1,\n          }),\n        ],\n        Name: 'Publish',\n      },\n    ]),\n  });\n});\n\ntest('autoBuild() can be used to add automatic builds to the pipeline', () => {\n  // GIVEN\n  const stack = new Stack(new App(), 'TestStack');\n\n  // WHEN\n  new delivlib.Pipeline(stack, 'Pipeline', {\n    repo: createTestRepo(stack),\n    pipelineName: 'HelloPipeline',\n    autoBuild: true,\n  });\n  const template = Template.fromStack(stack);\n\n  template.resourceCountIs('AWS::Serverless::Application', 0);\n});\n\ntest('autoBuild() can be configured to publish logs publically', () => {\n  // GIVEN\n  const stack = new Stack(new App(), 'TestStack');\n\n  // WHEN\n  new delivlib.Pipeline(stack, 'Pipeline', {\n    repo: createTestRepo(stack),\n    pipelineName: 'HelloPipeline',\n    autoBuild: true,\n    autoBuildOptions: {\n      publicLogs: true,\n    },\n  });\n  const template = Template.fromStack(stack);\n\n  template.hasResourceProperties('AWS::Serverless::Application', {\n    Location: {\n      ApplicationId: 'arn:aws:serverlessrepo:us-east-1:277187709615:applications/github-codebuild-logs',\n      SemanticVersion: '1.6.0',\n    },\n    Parameters: {\n      CodeBuildProjectName: {\n        Ref: 'PipelineAutoBuildProjectB97B4446',\n      },\n      DeletePreviousComments: 'true',\n      CommentOnSuccess: 'true',\n    },\n  });\n});\n\ntest('autoBuild() can be configured with a different buildspec', () => {\n  // GIVEN\n  const stack = new Stack(new App(), 'TestStack');\n\n  // WHEN\n  new delivlib.Pipeline(stack, 'Pipeline', {\n    repo: createTestRepo(stack),\n    pipelineName: 'HelloPipeline',\n    autoBuild: true,\n    autoBuildOptions: {\n      buildSpec: codebuild.BuildSpec.fromSourceFilename('different-buildspec.yaml'),\n    },\n  });\n\n  const template = Template.fromStack(stack);\n  // THEN\n  template.hasResourceProperties('AWS::CodeBuild::Project', {\n    Source: {\n      BuildSpec: 'different-buildspec.yaml',\n      Location: {\n        'Fn::GetAtt': [\n          'Repo02AC86CF',\n          'CloneUrlHttp',\n        ],\n      },\n      Type: 'CODECOMMIT',\n    },\n  });\n});\n\ntest('CodeBuild Project name matches buildProjectName property', () => {\n  // GIVEN\n  const stack = new Stack(new App(), 'TestStack');\n\n  // WHEN\n  new delivlib.Pipeline(stack, 'Pipeline', {\n    repo: createTestRepo(stack),\n    pipelineName: 'HelloPipeline',\n    buildProjectName: 'HelloBuild',\n  });\n\n  const template = Template.fromStack(stack);\n  // THEN\n  template.hasResourceProperties('AWS::CodeBuild::Project', {\n    Name: 'HelloBuild',\n  });\n});\n\ntest('CodeBuild Project name is extended from pipelineName property', () => {\n  // GIVEN\n  const stack = new Stack(new App(), 'TestStack');\n\n  // WHEN\n  new delivlib.Pipeline(stack, 'Pipeline', {\n    repo: createTestRepo(stack),\n    pipelineName: 'HelloPipeline',\n  });\n\n  // THEN\n  const template = Template.fromStack(stack);\n  template.hasResourceProperties('AWS::CodeBuild::Project', {\n    Name: 'HelloPipeline-Build',\n  });\n});\n\ntest('CodeBuild Project name is left undefined when neither buildProjectName nor pipelineName are specified', () => {\n  // GIVEN\n  const stack = new Stack(new App(), 'TestStack');\n\n  // WHEN\n  new delivlib.Pipeline(stack, 'Pipeline', {\n    repo: createTestRepo(stack),\n  });\n  const template = Template.fromStack(stack);\n\n  // THEN\n  template.hasResourceProperties('AWS::CodeBuild::Project', {\n    Name: Match.absent(),\n  });\n});\n\ntest('metricFailures', () => {\n  const stack = new Stack(new App(), 'TestStack');\n  const pipeline = new delivlib.Pipeline(stack, 'Pipeline', {\n    repo: createTestRepo(stack),\n    pipelineName: 'HelloPipeline',\n  });\n\n  expect(stack.resolve(pipeline.metricFailures({}))).toEqual({\n    dimensions: { Pipeline: { Ref: 'PipelineBuildPipeline04C6628A' } },\n    namespace: 'CDK/Delivlib',\n    metricName: 'Failures',\n    period: Duration.minutes(5),\n    statistic: 'Sum',\n  });\n});\n\ntest('metricActionFailures', () => {\n  const stack = new Stack(new App(), 'TestStack');\n  const pipeline = new delivlib.Pipeline(stack, 'Pipeline', {\n    repo: createTestRepo(stack),\n    pipelineName: 'HelloPipeline',\n  });\n  const project = new codebuild.Project(stack, 'publish', {\n    buildSpec: codebuild.BuildSpec.fromObject({ version: '0.2' }),\n  });\n  const scriptDirectory = __dirname;\n  const entrypoint = 'run-test.sh';\n\n  pipeline.addShellable('PreBuild', 'FirstStep', { scriptDirectory, entrypoint });\n  pipeline.addShellable('PreBuild', 'SecondStep', { scriptDirectory, entrypoint });\n  pipeline.addPublish(new TestPublishable(stack, 'Publish1', { project }));\n  pipeline.addPublish(new TestPublishable(stack, 'Publish2', { project }));\n  pipeline.addTest('Test1', { scriptDirectory, entrypoint, platform: delivlib.ShellPlatform.LinuxUbuntu });\n  pipeline.addTest('Test2', { scriptDirectory, entrypoint, platform: delivlib.ShellPlatform.LinuxUbuntu });\n\n  const expectedMetricNames = [\n    'Pull',\n    'Build',\n    'ActionFirstStep',\n    'ActionSecondStep',\n    'Publish1Publish',\n    'Publish2Publish',\n    'TestTest1',\n    'TestTest2',\n  ];\n  const expectedMetrics = expectedMetricNames.map(name => {\n    return {\n      dimensions: { Pipeline: { Ref: 'PipelineBuildPipeline04C6628A' }, Action: name },\n      namespace: 'CDK/Delivlib',\n      metricName: 'Failures',\n      period: Duration.minutes(5),\n      statistic: 'Sum',\n    };\n  });\n\n  expect(stack.resolve(pipeline.metricActionFailures({}))).toEqual(expectedMetrics);\n});\n\ntest('signing output artifact is used as input artifact for all stages after signing stage', () => {\n  // GIVEN\n  const app = new App();\n  const stack = new Stack(app, 'TestStack');\n  const signingLambda = Function.fromFunctionName(stack, 'SigningLambda', 'signing-lambda');\n  const signingBucket = Bucket.fromBucketName(stack, 'SigningBucket', 'signing-bucket');\n  const accessRole = Role.fromRoleName(stack, 'AccessRole', 'access-role');\n  const pipeline = new delivlib.Pipeline(stack, 'Pipeline', {\n    repo: createTestRepo(stack),\n    pipelineName: 'TestPipeline',\n  });\n\n  // WHEN\n  pipeline.signNuGetWithSigner({\n    signingLambda,\n    signingBucket,\n    accessRole,\n  });\n\n  pipeline.publishToNuGet({\n    nugetApiKeySecret: {\n      secretArn: 'arn:aws:secretsmanager:us-east-1:123456789012:secret:nuget-secret',\n    },\n  });\n\n  pipeline.addTest('test1', {\n    scriptDirectory: path.join(__dirname, 'delivlib-tests/assume-role'),\n    entrypoint: 'test.sh',\n  });\n\n  // THEN\n  Template.fromStack(stack).hasResourceProperties('AWS::CodePipeline::Pipeline', {\n    Stages: [\n      {\n        Actions: [\n          Match.objectLike({\n            InputArtifacts: Match.absent(),\n            OutputArtifacts: [\n              { Name: 'Source' },\n            ],\n          }),\n        ],\n        Name: 'Source',\n      },\n      {\n        Actions: [\n          Match.objectLike({\n            InputArtifacts: [\n              { Name: 'Source' },\n            ],\n            OutputArtifacts: [\n              { Name: 'Artifact_Build_Build' },\n            ],\n          }),\n        ],\n        Name: 'Build',\n      },\n      {\n        Actions: [\n          Match.objectLike({\n            InputArtifacts: [\n              { Name: 'Artifact_Build_Build' },\n            ],\n            OutputArtifacts: [\n              { Name: 'Artifact_Sign_NuGetSigningSign' },\n            ],\n          }),\n        ],\n        Name: 'Sign',\n      },\n      {\n        Actions: [\n          Match.objectLike({\n            InputArtifacts: [\n              { Name: 'Artifact_Sign_NuGetSigningSign' },\n            ],\n            OutputArtifacts: Match.absent(),\n          }),\n        ],\n        Name: 'Publish',\n      },\n      {\n        Actions: [\n          Match.objectLike({\n            InputArtifacts: [\n              { Name: 'Artifact_Sign_NuGetSigningSign' },\n            ],\n            OutputArtifacts: [\n              { Name: 'Artifact_c8383dfefa10c0c326ab2bfac48dcf263ea515d7e1' },\n            ],\n          }),\n        ],\n        Name: 'Test',\n      },\n    ],\n  });\n});\n\nfunction createTestPipelineForConcurrencyTests(stack: Stack, props?: delivlib.PipelineProps) {\n  const pipeline = new delivlib.Pipeline(stack, 'Pipeline', {\n    repo: createTestRepo(stack),\n    ...props,\n  });\n\n  const project = new codebuild.Project(stack, 'publish', {\n    buildSpec: codebuild.BuildSpec.fromObject({ version: '0.2' }),\n  });\n\n  const scriptDirectory = path.join(__dirname, 'delivlib-tests', 'linux');\n  const entrypoint = 'test.sh';\n  pipeline.addTest('test1', { scriptDirectory, entrypoint, platform: delivlib.ShellPlatform.LinuxUbuntu });\n  pipeline.addTest('test2', { scriptDirectory, entrypoint, platform: delivlib.ShellPlatform.LinuxUbuntu });\n  pipeline.addTest('test3', { scriptDirectory, entrypoint, platform: delivlib.ShellPlatform.LinuxUbuntu });\n  pipeline.addTest('test4', { scriptDirectory, entrypoint, platform: delivlib.ShellPlatform.LinuxUbuntu });\n  pipeline.addTest('test5', { scriptDirectory, entrypoint, platform: delivlib.ShellPlatform.LinuxUbuntu });\n  pipeline.addPublish(new TestPublishable(stack, 'pub1', { project }));\n  pipeline.addPublish(new TestPublishable(stack, 'pub2', { project }));\n  pipeline.addPublish(new TestPublishable(stack, 'pub3', { project }));\n  pipeline.addPublish(new TestPublishable(stack, 'pub4', { project }));\n  pipeline.addPublish(new TestPublishable(stack, 'pub5', { project }));\n  pipeline.addPublish(new TestPublishable(stack, 'pub6', { project }));\n\n  const template = Template.fromStack(stack);\n  const capture = new Capture();\n  template.hasResourceProperties('AWS::CodePipeline::Pipeline', {\n    Stages: capture,\n  });\n  return capture.asArray();\n}\n\nfunction createTestRepo(stack: Stack) {\n  return new delivlib.CodeCommitRepo(new codecommit.Repository(stack, 'Repo', { repositoryName: 'test' }));\n}\n\nclass TestPublishable extends Construct implements delivlib.IPublisher {\n  public readonly project: codebuild.IProject;\n\n  constructor(scope: Construct, id: string, props: { project: codebuild.IProject }) {\n    super(scope, id);\n\n    this.project = props.project;\n  }\n\n  public addToPipeline(stage: cpipeline.IStage, id: string, options: delivlib.AddToPipelineOptions): void {\n    stage.addAction(new cpipeline_actions.CodeBuildAction({\n      actionName: id,\n      input: options.inputArtifact || new cpipeline.Artifact(),\n      project: this.project,\n      runOrder: options.runOrder,\n    }));\n  }\n}\n\nclass Pub extends Construct implements delivlib.IPublisher {\n  public readonly project: codebuild.IProject;\n\n  constructor(scope: Construct, id: string) {\n    super(scope, id);\n\n    this.project = new codebuild.PipelineProject(this, 'Project');\n  }\n\n  public addToPipeline(stage: cpipeline.IStage, id: string, options: delivlib.AddToPipelineOptions): void {\n    stage.addAction(new cpipeline_actions.CodeBuildAction({\n      actionName: id,\n      input: options.inputArtifact || new cpipeline.Artifact(),\n      project: this.project,\n      runOrder: options.runOrder,\n    }));\n  }\n}\n\n"
  },
  {
    "path": "lib/__tests__/pr.test.ts",
    "content": "// tslint:disable: max-line-length\nimport * as cdk from 'aws-cdk-lib';\nimport { Template, Match } from 'aws-cdk-lib/assertions';\nimport { AutoPullRequest, WritableGitHubRepo } from '../../lib';\n\nconst MOCK_REPO = new WritableGitHubRepo({\n  sshKeySecret: { secretArn: 'ssh-key-secret-arn' },\n  commitUsername: 'user',\n  commitEmail: 'email@email',\n  repository: 'owner/repo',\n  tokenSecretArn: 'token-secret-arn',\n});\n\n\nlet app: cdk.App;\nlet stack: cdk.Stack;\n\nbeforeEach(() => {\n  app = new cdk.App();\n  stack = new cdk.Stack(app, 'TestStack');\n});\n\ntest('skip PR if still open', () => {\n  // WHEN\n  new AutoPullRequest(stack, 'AutoPull', {\n    repo: MOCK_REPO,\n    head: { name: 'new-feature' },\n    skipIfOpenPrsWithLabels: ['asdf'],\n  });\n  const template = Template.fromStack(stack);\n\n  // THEN\n  template.hasResourceProperties('AWS::CodeBuild::Project', {\n    Source: {\n      BuildSpec: Match.serializedJson({\n        version: '0.2',\n        phases: Match.objectLike({\n          build: {\n            commands: Match.arrayWith([\n              '$SKIP || { export GITHUB_TOKEN=$(aws secretsmanager get-secret-value --secret-id \"token-secret-arn\" --output=text --query=SecretString) ; }',\n              '$SKIP || { curl --fail -o search.json --header \"Authorization: token $GITHUB_TOKEN\" --header \"Content-Type: application/json\" \\'https://api.github.com/search/issues?q=repo%3Aowner%2Frepo%20is%3Apr%20is%3Aopen%20label%3Aasdf\\' ; }',\n              '$SKIP || { node -e \\'process.exitCode = require(\"./search.json\").total_count\\' || { echo \"Found open PRs with label asdf, skipping PR.\"; export SKIP=true; } ; }',\n            ]),\n          },\n        }),\n      }),\n    },\n  });\n});\n"
  },
  {
    "path": "lib/__tests__/publishing.test.ts",
    "content": "import {\n  App, Stack,\n  aws_codebuild as codebuild,\n  aws_codecommit as codecommit,\n  aws_kms as kms,\n  assertions,\n} from 'aws-cdk-lib';\nimport * as delivlib from '../../lib';\n\nconst { Template, Match } = assertions;\n\n\ndescribe('with standard pipeline', () => {\n  let stack: Stack;\n  let pipeline: delivlib.Pipeline;\n  beforeEach(() => {\n    stack = new Stack(new App(), 'TestStack');\n\n    pipeline = new delivlib.Pipeline(stack, 'Pipeline', {\n      repo: new delivlib.CodeCommitRepo(new codecommit.Repository(stack, 'Repo', { repositoryName: 'test' })),\n      pipelineName: 'HelloPipeline',\n    });\n  });\n\n  test('can configure build image for NuGet publishing', () => {\n    pipeline.publishToNuGet({\n      nugetApiKeySecret: { secretArn: 'arn:aws:secretsmanager:us-east-1:712950704752:secret:delivlib/nuget-fHzSUD' },\n      buildImage: codebuild.LinuxBuildImage.fromDockerRegistry('xyz'),\n    });\n    const template = Template.fromStack(stack);\n\n    template.hasResourceProperties('AWS::CodeBuild::Project', {\n      Environment: {\n        Image: 'xyz',\n      },\n    });\n  });\n\n  test('can configure build image for Maven publishing', () => {\n    const signingKey = new delivlib.OpenPGPKeyPair(stack, 'CodeSign', {\n      email: 'aws-cdk-dev+delivlib@amazon.com',\n      encryptionKey: new kms.Key(stack, 'CodeSign-CMK'),\n      expiry: '4y',\n      identity: 'aws-cdk-dev',\n      keySizeBits: 4_096,\n      pubKeyParameterName: `/${stack.node.path}/CodeSign.pub`,\n      secretName: stack.node.path + '/CodeSign',\n      version: 0,\n      removalPolicy: delivlib.OpenPGPKeyPairRemovalPolicy.DESTROY_IMMEDIATELY,\n    });\n\n    pipeline.publishToMaven({\n      mavenLoginSecret: { secretArn: 'arn:aws:secretsmanager:us-east-1:712950704752:secret:delivlib/maven-7ROCWi' },\n      mavenEndpoint: 'https://aws.oss.sonatype.org:443/',\n      signingKey,\n      stagingProfileId: '68a05363083174',\n      buildImage: codebuild.LinuxBuildImage.fromDockerRegistry('xyz'),\n    });\n    const template = Template.fromStack(stack);\n\n    template.hasResourceProperties('AWS::CodeBuild::Project', {\n      Environment: {\n        Image: 'xyz',\n      },\n    });\n  });\n\n  test('can control stage name', () => {\n    pipeline.publishToNuGet({\n      nugetApiKeySecret: { secretArn: 'arn:aws:secretsmanager:us-east-1:712950704752:secret:delivlib/nuget-fHzSUD' },\n      buildImage: codebuild.LinuxBuildImage.fromDockerRegistry('xyz'),\n      stageName: 'MyPublishStage',\n    });\n\n    const template = Template.fromStack(stack);\n    template.hasResourceProperties('AWS::CodePipeline::Pipeline', {\n      Stages: Match.arrayWith([{\n        Name: 'MyPublishStage',\n        Actions: [Match.objectLike({\n          Name: 'NuGetPublish',\n        })],\n      }]),\n    });\n  });\n\n  test.each(['npm', 'nuget', 'maven', 'pypi'] as const)('publishing SSM timestamps adds IAM permissions: %p', (type) => {\n    switch (type) {\n      case 'npm':\n        pipeline.publishToNpm({\n          npmTokenSecret: { secretArn: 'arn:secret' },\n          ssmPrefix: '/published/jsii-sample/npm',\n        });\n        break;\n\n      case 'nuget':\n        pipeline.publishToNuGet({\n          nugetApiKeySecret: { secretArn: 'arn:secret' },\n          ssmPrefix: '/published/jsii-sample/nuget',\n        });\n        break;\n\n      case 'maven':\n        const signingKey = new delivlib.OpenPGPKeyPair(stack, 'CodeSign', {\n          email: 'aws-cdk-dev+delivlib@amazon.com',\n          encryptionKey: new kms.Key(stack, 'CodeSign-CMK'),\n          expiry: '4y',\n          identity: 'aws-cdk-dev',\n          keySizeBits: 4_096,\n          pubKeyParameterName: `/${stack.node.path}/CodeSign.pub`,\n          secretName: stack.node.path + '/CodeSign',\n          version: 0,\n          removalPolicy: delivlib.OpenPGPKeyPairRemovalPolicy.DESTROY_IMMEDIATELY,\n        });\n\n        pipeline.publishToMaven({\n          mavenLoginSecret: { secretArn: 'arn:secret' },\n          mavenEndpoint: 'https://aws.oss.sonatype.org:443/',\n          stagingProfileId: '68a05363083174',\n          ssmPrefix: '/published/jsii-sample/maven',\n          signingKey,\n        });\n        break;\n\n      case 'pypi':\n        pipeline.publishToPyPI({\n          loginSecret: { secretArn: 'arn:secret' },\n          ssmPrefix: '/published/jsii-sample/pypi',\n        });\n        break;\n    }\n\n    const template = Template.fromStack(stack);\n    template.hasResourceProperties('AWS::IAM::Policy', {\n      PolicyDocument: {\n        Statement: Match.arrayWith([{\n          Effect: 'Allow',\n          Action: ['ssm:PutParameter', 'ssm:GetParameter'],\n          Resource: {\n            'Fn::Join': ['', [\n              'arn:',\n              { Ref: 'AWS::Partition' },\n              ':ssm:',\n              { Ref: 'AWS::Region' },\n              ':',\n              { Ref: 'AWS::AccountId' },\n              `:parameter/published/jsii-sample/${type}/*`,\n            ]],\n          },\n        }]),\n      },\n    });\n  });\n});\n"
  },
  {
    "path": "lib/__tests__/registry-sync/docker-asset/Dockerfile",
    "content": "# empty"
  },
  {
    "path": "lib/__tests__/registry-sync/ecr-mirror.test.ts",
    "content": "import * as path from 'path';\nimport {\n  Aspects, Duration, Stack,\n  aws_codebuild as codebuild,\n  aws_events as events,\n  aws_secretsmanager as secrets,\n} from 'aws-cdk-lib';\nimport { Template, Match } from 'aws-cdk-lib/assertions';\nimport { EcrMirror, EcrMirrorAspect, MirrorSource } from '../../../lib/registry-sync';\n\ndescribe('EcrMirror', () => {\n  test('default', () => {\n    const stack = new Stack();\n    new EcrMirror(stack, 'EcrRegistrySync', {\n      sources: [MirrorSource.fromDockerHub('docker-image')],\n      dockerHubCredentials: {\n        usernameKey: 'username-key',\n        passwordKey: 'password-key',\n        secret: secrets.Secret.fromSecretPartialArn(stack, 'DockerhubSecret', 'arn:aws:secretsmanager:us-west-2:111122223333:secret:123aass'),\n      },\n      schedule: events.Schedule.cron({}),\n    });\n    const template = Template.fromStack(stack);\n\n    template.hasResourceProperties('AWS::CodeBuild::Project', {\n      Environment: {\n        EnvironmentVariables: [\n          {\n            Name: 'DOCKERHUB_USERNAME',\n            Type: 'SECRETS_MANAGER',\n            Value: '123aass:username-key:AWSCURRENT',\n          },\n          {\n            Name: 'DOCKERHUB_PASSWORD',\n            Type: 'SECRETS_MANAGER',\n            Value: '123aass:password-key:AWSCURRENT',\n          },\n        ],\n        Image: 'public.ecr.aws/jsii/superchain:1-bookworm-slim-node22',\n      },\n      Source: {\n        BuildSpec: {\n          'Fn::Join': [\n            '',\n            [\n              '{\\n  \"version\": \"0.2\",\\n  \"phases\": {\\n    \"build\": {\\n      \"commands\": [\\n        \"nohup /usr/bin/dockerd --host=unix:///var/run/docker.sock --host=tcp://127.0.0.1:2375 --storage-driver=overlay2&\",\\n        \"timeout 15 sh -c \\\\\"until docker info; do echo .; sleep 1; done\\\\\"\",\\n        \"docker login -u ${DOCKERHUB_USERNAME} -p ${DOCKERHUB_PASSWORD}\",\\n        \"aws ecr get-login-password | docker login --username AWS --password-stdin ',\n              {\n                Ref: 'AWS::AccountId',\n              },\n              '.dkr.ecr.',\n              {\n                Ref: 'AWS::Region',\n              },\n              '.amazonaws.com\",\\n        \"aws ecr-public get-login-password --region us-east-1 | docker login --username AWS --password-stdin public.ecr.aws\",\\n        \"docker pull library/docker-image:latest\",\\n        \"docker tag library/docker-image:latest ',\n              {\n                Ref: 'AWS::AccountId',\n              },\n              '.dkr.ecr.',\n              {\n                Ref: 'AWS::Region',\n              },\n              '.amazonaws.com/library/docker-image:latest\",\\n        \"docker push ',\n              {\n                Ref: 'AWS::AccountId',\n              },\n              '.dkr.ecr.',\n              {\n                Ref: 'AWS::Region',\n              },\n              '.amazonaws.com/library/docker-image:latest\",\\n        \"docker image prune --all --force\"\\n      ]\\n    }\\n  }\\n}',\n            ],\n          ],\n        },\n      },\n    });\n  });\n\n  test('autoStart', () => {\n    const stack = new Stack();\n    new EcrMirror(stack, 'EcrRegistrySync', {\n      sources: [MirrorSource.fromDockerHub('docker-image')],\n      dockerHubCredentials: {\n        usernameKey: 'username-key',\n        passwordKey: 'password-key',\n        secret: secrets.Secret.fromSecretPartialArn(stack, 'DockerhubSecret', 'arn:aws:secretsmanager:us-west-2:111122223333:secret:123aass'),\n      },\n      autoStart: true,\n    });\n\n    const template = Template.fromStack(stack);\n\n    template.resourceCountIs('Custom::AWS', 1);\n    template.resourceCountIs('AWS::Events::Rule', 0);\n  });\n\n  test('schedule', () => {\n    const stack = new Stack();\n    new EcrMirror(stack, 'EcrRegistrySync', {\n      sources: [MirrorSource.fromDockerHub('docker-image')],\n      dockerHubCredentials: {\n        usernameKey: 'username-key',\n        passwordKey: 'password-key',\n        secret: secrets.Secret.fromSecretPartialArn(stack, 'DockerhubSecret', 'arn:aws:secretsmanager:us-west-2:111122223333:secret:123aass'),\n      },\n      schedule: events.Schedule.rate(Duration.hours(1)),\n    });\n    const template = Template.fromStack(stack);\n\n    template.hasResourceProperties('AWS::Events::Rule', {\n      ScheduleExpression: 'rate(1 hour)',\n    });\n\n    template.resourceCountIs('Custom::AWS', 0);\n    template.resourceCountIs('AWS::Lambda::Function', 0);\n  });\n\n  test('errors on duplicate repository', () => {\n    const stack = new Stack();\n    expect(() => new EcrMirror(stack, 'EcrRegistrySync', {\n      sources: [\n        MirrorSource.fromDockerHub('my/docker-image'),\n        MirrorSource.fromDir(path.join(__dirname, 'docker-asset'), 'my/docker-image'),\n      ],\n      dockerHubCredentials: {\n        usernameKey: 'username-key',\n        passwordKey: 'password-key',\n        secret: secrets.Secret.fromSecretPartialArn(stack, 'DockerhubSecret', 'arn:aws:secretsmanager:us-west-2:111122223333:secret:123aass'),\n      },\n      schedule: events.Schedule.rate(Duration.hours(1)),\n    })).toThrow(/Mirror source.*already exists/);\n  });\n\n  describe('ecrRepository()', () => {\n    test('default', () => {\n      const stack = new Stack();\n      const image = MirrorSource.fromDockerHub('my/docker-image');\n      const registry = new EcrMirror(stack, 'EcrRegistrySync', {\n        sources: [image],\n        dockerHubCredentials: {\n          usernameKey: 'username-key',\n          passwordKey: 'password-key',\n          secret: secrets.Secret.fromSecretPartialArn(stack, 'DockerhubSecret', 'arn:aws:secretsmanager:us-west-2:111122223333:secret:123aass'),\n        },\n        schedule: events.Schedule.cron({}),\n      });\n\n      const repo = registry.ecrRepository('my/docker-image');\n      expect(repo).toBeDefined();\n      expect(stack.resolve(repo!.repositoryArn)).toEqual({\n        'Fn::GetAtt': ['EcrRegistrySyncRepomydockerimageCE3ABCA6', 'Arn'],\n      });\n    });\n\n    test('returning a mirrored repository does not depend on the tag', () => {\n      const stack = new Stack();\n      const image = MirrorSource.fromDockerHub('my/docker-image', 'mytag');\n      const registry = new EcrMirror(stack, 'EcrRegistrySync', {\n        sources: [image],\n        dockerHubCredentials: {\n          usernameKey: 'username-key',\n          passwordKey: 'password-key',\n          secret: secrets.Secret.fromSecretPartialArn(stack, 'DockerhubSecret', 'arn:aws:secretsmanager:us-west-2:111122223333:secret:123aass'),\n        },\n        schedule: events.Schedule.cron({}),\n      });\n\n      expect(registry.ecrRepository('my/docker-image')).toBeDefined();\n    });\n  });\n\n  test('schedule and/or autoStart', () => {\n    const stack = new Stack();\n    const image = MirrorSource.fromDockerHub('my/docker-image');\n    expect(() => new EcrMirror(stack, 'EcrRegistrySync', {\n      sources: [image],\n      dockerHubCredentials: {\n        usernameKey: 'username-key',\n        passwordKey: 'password-key',\n        secret: secrets.Secret.fromSecretPartialArn(stack, 'DockerhubSecret', 'arn:aws:secretsmanager:us-west-2:111122223333:secret:123aass'),\n      },\n    })).toThrow(/schedule or autoStart/);\n  });\n});\n\ndescribe('EcrMirrorAspect', () => {\n  test('applies to relevant codebuild projects', () => {\n    // GIVEN\n    const stack = new Stack();\n    const mirror = new EcrMirror(stack, 'Mirror', {\n      sources: [MirrorSource.fromDockerHub('my/docker-image')],\n      dockerHubCredentials: {\n        usernameKey: 'username-key',\n        passwordKey: 'password-key',\n        secret: secrets.Secret.fromSecretPartialArn(stack, 'DockerhubSecret', 'arn:aws:secretsmanager:us-west-2:111122223333:secret:123aass'),\n      },\n      schedule: events.Schedule.cron({}),\n    });\n    new codebuild.Project(stack, 'MyDockerImageProject', {\n      buildSpec: codebuild.BuildSpec.fromObject({}),\n      environment: {\n        buildImage: codebuild.LinuxBuildImage.fromDockerRegistry('my/docker-image'),\n      },\n    });\n\n    // WHEN\n    Aspects.of(stack).add(new EcrMirrorAspect(mirror));\n\n    const template = Template.fromStack(stack);\n\n    // THEN\n    template.hasResourceProperties('AWS::CodeBuild::Project', {\n      Environment: {\n        Image: {\n          'Fn::Join': [\n            '',\n            [\n              {\n                'Fn::Select': [\n                  4,\n                  {\n                    'Fn::Split': [\n                      ':',\n                      { 'Fn::GetAtt': ['MirrorRepomydockerimageE8DCCA4F', 'Arn'] },\n                    ],\n                  },\n                ],\n              },\n              '.dkr.ecr.',\n              {\n                'Fn::Select': [\n                  3,\n                  {\n                    'Fn::Split': [\n                      ':',\n                      { 'Fn::GetAtt': ['MirrorRepomydockerimageE8DCCA4F', 'Arn'] },\n                    ],\n                  },\n                ],\n              },\n              '.',\n              { Ref: 'AWS::URLSuffix' },\n              '/',\n              { Ref: 'MirrorRepomydockerimageE8DCCA4F' },\n              ':latest',\n            ],\n          ],\n        },\n      },\n    });\n  });\n\n  test('does not affect unrelated codebuild projects', () => {\n    // GIVEN\n    const stack = new Stack();\n    const mirror = new EcrMirror(stack, 'Mirror', {\n      sources: [MirrorSource.fromDockerHub('my/docker-image')],\n      dockerHubCredentials: {\n        usernameKey: 'username-key',\n        passwordKey: 'password-key',\n        secret: secrets.Secret.fromSecretPartialArn(stack, 'DockerhubSecret', 'arn:aws:secretsmanager:us-west-2:111122223333:secret:123aass'),\n      },\n      schedule: events.Schedule.cron({}),\n    });\n    new codebuild.Project(stack, 'UnrelatedProject', {\n      buildSpec: codebuild.BuildSpec.fromObject({}),\n      environment: {\n        buildImage: codebuild.LinuxBuildImage.fromDockerRegistry('unrelated/image'),\n      },\n    });\n    const template = Template.fromStack(stack);\n\n    // WHEN\n    Aspects.of(stack).add(new EcrMirrorAspect(mirror));\n\n    // THEN\n    template.hasResourceProperties('AWS::CodeBuild::Project', {\n      Environment: {\n        Image: 'unrelated/image',\n      },\n    });\n  });\n\n  test('can mirror multiple tags from same repository', () => {\n    // GIVEN\n    const stack = new Stack(undefined, 'Stack', {\n      env: { account: 'account', region: 'region' },\n    });\n    new EcrMirror(stack, 'Mirror', {\n      sources: [\n        MirrorSource.fromDockerHub('my/docker-image'),\n        MirrorSource.fromDockerHub('my/docker-image', 'some_tag'),\n      ],\n      dockerHubCredentials: {\n        usernameKey: 'username-key',\n        passwordKey: 'password-key',\n        secret: secrets.Secret.fromSecretPartialArn(stack, 'DockerhubSecret', 'arn:aws:secretsmanager:us-west-2:111122223333:secret:123aass'),\n      },\n      schedule: events.Schedule.cron({}),\n    });\n    const template = Template.fromStack(stack);\n\n    // THEN: one repo that mirrors both tags\n    template.hasResourceProperties('AWS::ECR::Repository', {\n      RepositoryName: 'my/docker-image',\n    });\n    template.resourceCountIs('AWS::ECR::Repository', 1);\n\n    // Have both pushes in the project buildspec\n    template.hasResourceProperties('AWS::CodeBuild::Project', {\n      Source: {\n        BuildSpec: Match.serializedJson(Match.objectLike({\n          phases: {\n            build: {\n              commands: Match.arrayWith([\n                'docker push account.dkr.ecr.region.amazonaws.com/my/docker-image:latest',\n                'docker push account.dkr.ecr.region.amazonaws.com/my/docker-image:some_tag',\n              ]),\n            },\n          },\n        })),\n      },\n    });\n  });\n});\n"
  },
  {
    "path": "lib/__tests__/registry-sync/mirror-source.test.ts",
    "content": "import * as path from 'path';\nimport {\n  Stack, App,\n  aws_codebuild as codebuild,\n} from 'aws-cdk-lib';\nimport { Template, Match } from 'aws-cdk-lib/assertions';\nimport { MirrorSource } from '../../../lib/registry-sync';\n\ndescribe(MirrorSource, () => {\n  describe(MirrorSource.fromDockerHub, () => {\n    test('default', () => {\n      // GIVEN\n      const stack = new Stack();\n      const ecrRegistry = 'myregistry';\n      const source = MirrorSource.fromDockerHub('jsii/superchain');\n\n      // WHEN\n      const result = source.bind({\n        scope: stack,\n        ecrRegistry,\n      });\n\n      // THEN\n      expect(result.repositoryName).toEqual('jsii/superchain');\n      expect(result.tag).toEqual('latest');\n      expect(result.commands).toEqual([\n        'docker pull jsii/superchain:latest',\n        'docker tag jsii/superchain:latest myregistry/jsii/superchain:latest',\n      ]);\n    });\n\n    test('explicit tag', () => {\n      // GIVEN\n      const stack = new Stack();\n      const ecrRegistry = 'myregistry';\n      const source = MirrorSource.fromDockerHub('jsii/superchain', 'mytag');\n\n      // WHEN\n      const result = source.bind({\n        scope: stack,\n        ecrRegistry,\n      });\n\n      // THEN\n      expect(result.repositoryName).toEqual('jsii/superchain');\n      expect(result.tag).toEqual('mytag');\n      expect(result.commands).toEqual([\n        'docker pull jsii/superchain:mytag',\n        'docker tag jsii/superchain:mytag myregistry/jsii/superchain:mytag',\n      ]);\n    });\n\n    test('official image', () => {\n      // GIVEN\n      const stack = new Stack();\n      const ecrRegistry = 'myregistry';\n      const source = MirrorSource.fromDockerHub('superchain');\n\n      // WHEN\n      const result = source.bind({\n        scope: stack,\n        ecrRegistry,\n      });\n\n      // THEN\n      expect(result.repositoryName).toEqual('library/superchain');\n      expect(result.commands).toEqual([\n        'docker pull library/superchain:latest',\n        'docker tag library/superchain:latest myregistry/library/superchain:latest',\n      ]);\n    });\n\n    test('fails if image includes tag', () => {\n      expect(() => MirrorSource.fromDockerHub('superchain:latest')).toThrow(/image must not include tag/);\n    });\n  });\n\n  describe(MirrorSource.fromPublicImage, () => {\n    test('default', () => {\n      // GIVEN\n      const stack = new Stack();\n      const ecrRegistry = 'myregistry';\n      const source = MirrorSource.fromPublicImage('jsii/superchain');\n\n      // WHEN\n      const result = source.bind({\n        scope: stack,\n        ecrRegistry,\n      });\n\n      // THEN\n      expect(result.repositoryName).toEqual('jsii/superchain');\n      expect(result.tag).toEqual('latest');\n      expect(result.commands).toEqual([\n        'docker pull jsii/superchain:latest',\n        'docker tag jsii/superchain:latest myregistry/jsii/superchain:latest',\n      ]);\n    });\n\n    test('ECR Public image with custom output repository name', () => {\n      // GIVEN\n      const stack = new Stack();\n      const ecrRegistry = 'myregistry';\n      const source = MirrorSource.fromPublicImage('public.ecr.aws/jsii/superchain', '1-bullseye-slim', 'jsii/superchain');\n\n      // WHEN\n      const result = source.bind({\n        scope: stack,\n        ecrRegistry,\n      });\n\n      // THEN\n      expect(result.repositoryName).toEqual('jsii/superchain');\n      expect(result.tag).toEqual('1-bullseye-slim');\n      expect(result.commands).toEqual([\n        'docker pull public.ecr.aws/jsii/superchain:1-bullseye-slim',\n        'docker tag public.ecr.aws/jsii/superchain:1-bullseye-slim myregistry/jsii/superchain:1-bullseye-slim',\n      ]);\n    });\n\n    test('explicit tag', () => {\n      // GIVEN\n      const stack = new Stack();\n      const ecrRegistry = 'myregistry';\n      const source = MirrorSource.fromPublicImage('jsii/superchain', 'mytag');\n\n      // WHEN\n      const result = source.bind({\n        scope: stack,\n        ecrRegistry,\n      });\n\n      // THEN\n      expect(result.repositoryName).toEqual('jsii/superchain');\n      expect(result.tag).toEqual('mytag');\n      expect(result.commands).toEqual([\n        'docker pull jsii/superchain:mytag',\n        'docker tag jsii/superchain:mytag myregistry/jsii/superchain:mytag',\n      ]);\n    });\n\n    test('official image', () => {\n      // GIVEN\n      const stack = new Stack();\n      const ecrRegistry = 'myregistry';\n      const source = MirrorSource.fromPublicImage('superchain');\n\n      // WHEN\n      const result = source.bind({\n        scope: stack,\n        ecrRegistry,\n      });\n\n      // THEN\n      expect(result.repositoryName).toEqual('library/superchain');\n      expect(result.commands).toEqual([\n        'docker pull library/superchain:latest',\n        'docker tag library/superchain:latest myregistry/library/superchain:latest',\n      ]);\n    });\n\n    test('fails if image includes tag', () => {\n      expect(() => MirrorSource.fromPublicImage('superchain:latest')).toThrow(/image must not include tag/);\n    });\n  });\n\n  describe(MirrorSource.fromDir, () => {\n    test('default', () => {\n      // GIVEN\n      const app = new App();\n      const stack = new Stack(app, 'Default', {\n        env: {\n          account: '111111111111',\n          region: 'us-east-1',\n        },\n      });\n      const ecrRegistry = 'myregistry';\n      const source = MirrorSource.fromDir(path.join(__dirname, 'docker-asset'), 'myrepository');\n\n      // WHEN\n      const result = source.bind({\n        scope: stack,\n        ecrRegistry,\n      });\n\n      // THEN\n      expect(result.repositoryName).toEqual('myrepository');\n      expect(result.tag).toEqual('latest');\n      expect(result.commands).toEqual([\n        'rm -rf myrepository.zip myrepository',\n        expect.stringMatching(/aws s3 cp s3:.* myrepository.zip/),\n        'unzip myrepository.zip -d myrepository',\n        'docker build --pull -t myregistry/myrepository:latest myrepository',\n      ]);\n    });\n\n    test('explicit tag', () => {\n      // GIVEN\n      const app = new App();\n      const stack = new Stack(app, 'Default', {\n        env: {\n          account: '111111111111',\n          region: 'us-east-1',\n        },\n      });\n      const ecrRegistry = 'myregistry';\n      const source = MirrorSource.fromDir(path.join(__dirname, 'docker-asset'), 'myrepository', { tag: 'mytag' });\n\n      // WHEN\n      const result = source.bind({\n        scope: stack,\n        ecrRegistry,\n      });\n\n      // THEN\n      expect(result.repositoryName).toEqual('myrepository');\n      expect(result.tag).toEqual('mytag');\n      expect(result.commands).toEqual([\n        'rm -rf myrepository.zip myrepository',\n        expect.stringMatching(/aws s3 cp s3:.* myrepository.zip/),\n        'unzip myrepository.zip -d myrepository',\n        'docker build --pull -t myregistry/myrepository:mytag myrepository',\n      ]);\n    });\n\n    test('syncJob is given permission to s3 asset', () => {\n      // GIVEN\n      const stack = new Stack();\n      const ecrRegistry = 'myregistry';\n      const source = MirrorSource.fromDir(path.join(__dirname, 'docker-asset'), 'myrepository');\n      const syncJob = new codebuild.Project(stack, 'SyncJob', {\n        buildSpec: codebuild.BuildSpec.fromObject({}),\n      });\n\n      // WHEN\n      source.bind({\n        scope: stack,\n        ecrRegistry,\n        syncJob,\n      });\n\n      const template = Template.fromStack(stack);\n\n      // THEN\n      template.hasResourceProperties('AWS::IAM::Policy', {\n        PolicyDocument: {\n          Statement: Match.arrayWith([{\n            Action: [\n              's3:GetObject*',\n              's3:GetBucket*',\n              's3:List*',\n            ],\n            Effect: 'Allow',\n            Resource: [\n              {\n                'Fn::Join': [\n                  '',\n                  [\n                    'arn:', { Ref: 'AWS::Partition' }, ':s3:::',\n                    {\n                      'Fn::Sub': 'cdk-hnb659fds-assets-${AWS::AccountId}-${AWS::Region}',\n                    },\n                  ],\n                ],\n              },\n              {\n                'Fn::Join': [\n                  '',\n                  [\n                    'arn:', { Ref: 'AWS::Partition' }, ':s3:::',\n                    {\n                      'Fn::Sub': 'cdk-hnb659fds-assets-${AWS::AccountId}-${AWS::Region}',\n                    },\n                    '/*',\n                  ],\n                ],\n              },\n            ],\n          }]),\n        },\n      });\n    });\n\n    test('build args', () => {\n      // GIVEN\n      const app = new App();\n      const stack = new Stack(app, 'Default', {\n        env: {\n          account: '111111111111',\n          region: 'us-east-1',\n        },\n      });\n      const ecrRegistry = 'myregistry';\n      const source = MirrorSource.fromDir(path.join(__dirname, 'docker-asset'), 'myrepository', {\n        buildArgs: {\n          arg1: 'val1',\n          arg2: 'val2',\n        },\n      });\n      const syncJob = new codebuild.Project(stack, 'SyncJob', {\n        buildSpec: codebuild.BuildSpec.fromObject({}),\n      });\n\n      // WHEN\n      const result = source.bind({\n        scope: stack,\n        ecrRegistry,\n        syncJob,\n      });\n\n\n      // THEN\n      expect(result.commands).toEqual([\n        'rm -rf myrepository.zip myrepository',\n        expect.stringMatching(/aws s3 cp s3:.* myrepository.zip/),\n        'unzip myrepository.zip -d myrepository',\n        'docker build --pull -t myregistry/myrepository:latest --build-arg arg1=val1 --build-arg arg2=val2 myrepository',\n      ]);\n    });\n\n    test('can bind the same directory twice if they have different build args', () => {\n      // GIVEN\n      const stack = new Stack();\n      const ecrRegistry = 'myregistry';\n      const source1 = MirrorSource.fromDir(path.join(__dirname, 'docker-asset'), 'myrepository');\n      const source2 = MirrorSource.fromDir(path.join(__dirname, 'docker-asset'), 'myrepository', {\n        buildArgs: {\n          arg1: 'val1',\n          arg2: 'val2',\n        },\n      });\n\n      // WHEN\n      source1.bind({ scope: stack, ecrRegistry });\n      source2.bind({ scope: stack, ecrRegistry });\n\n      // THEN -- didn't throw\n    });\n  });\n});\n"
  },
  {
    "path": "lib/__tests__/run-test.sh",
    "content": "#!/bin/bash\nset -euo pipefail\nscriptdir=$(cd $(dirname $0) && pwd)\n\ncdk_app=\"npx ts-node lib/__tests__/integ.delivlib.ts\"\n\nif [ \"${1:-}\" == \"diff\" ]; then\n  echo \"I have disabled snapshot tests here and I'm not apologizing for it [- huijbers@]\"\n  exit 0\nfi\n\nexport TEST_STACK_NAME=\"delivlib-test\"\n\nif [ \"${1:-}\" == \"update\" ]; then\n  npx cdk --no-version-reporting -a \"${cdk_app}\" deploy ${2:-} ${3:-} ${4:-}\n  echo \"Stack deployed, now, go to the console and wait for the pipeline to fully stabalize\"\nfi\n"
  },
  {
    "path": "lib/__tests__/shellable.test.ts",
    "content": "import * as path from 'path';\nimport * as cdk from 'aws-cdk-lib';\nimport { Template, Match } from 'aws-cdk-lib/assertions';\nimport * as codebuild from 'aws-cdk-lib/aws-codebuild';\nimport { Shellable, ShellPlatform, WindowsPlatform } from '../../lib';\n\n\n// tslint:disable:max-line-length\n\ntest('can assume a refreshable role', () => {\n  const stack = new cdk.Stack(new cdk.App(), 'TestStack');\n\n  new Shellable(stack, 'MyShellable', {\n    scriptDirectory: path.join(__dirname, 'delivlib-tests/linux'),\n    entrypoint: 'test.sh',\n    assumeRole: {\n      profileName: 'profile',\n      roleArn: 'arn',\n      sessionName: 'session',\n      refresh: true,\n    },\n  });\n\n  const template = Template.fromStack(stack);\n\n  template.hasResourceProperties('AWS::CodeBuild::Project', {\n    Source: {\n      BuildSpec: Match.serializedJson({\n        version: '0.2',\n        phases: Match.objectLike({\n          pre_build: {\n            commands: Match.arrayWith([\n              'echo \"Downloading scripts from s3://${SCRIPT_S3_BUCKET}/${SCRIPT_S3_KEY}\"',\n              'aws s3 cp s3://${SCRIPT_S3_BUCKET}/${SCRIPT_S3_KEY} /tmp',\n              'mkdir -p /tmp/scriptdir',\n              'unzip /tmp/$(basename $SCRIPT_S3_KEY) -d /tmp/scriptdir',\n              'mkdir -p ~/.aws',\n              'touch ~/.aws/credentials',\n              'config=~/.aws/config',\n              'echo [profile profile]>> ${config}',\n              'echo credential_source = EcsContainer >> ${config}',\n              'echo role_session_name = session >> ${config}',\n              'echo role_arn = arn >> $config',\n              'export AWS_PROFILE=profile',\n              'export AWS_SDK_LOAD_CONFIG=1',\n            ]),\n          },\n        }),\n      }),\n    },\n  });\n});\n\ntest('minimal configuration', () => {\n  const stack = new cdk.Stack(new cdk.App(), 'TestStack');\n\n  new Shellable(stack, 'MyShellable', {\n    scriptDirectory: path.join(__dirname, 'delivlib-tests/linux'),\n    entrypoint: 'test.sh',\n  });\n\n  const template = Template.fromStack(stack);\n\n  template.resourceCountIs('AWS::CodeBuild::Project', 1);\n});\n\ntest('assume role', () => {\n  const stack = new cdk.Stack(new cdk.App(), 'TestStack');\n\n  new Shellable(stack, 'MyShellable', {\n    scriptDirectory: path.join(__dirname, 'delivlib-tests/linux'),\n    entrypoint: 'test.sh',\n    assumeRole: {\n      roleArn: 'arn:aws:role:to:assume',\n      sessionName: 'my-session-name',\n    },\n  });\n\n  const template = Template.fromStack(stack);\n  template.hasResourceProperties('AWS::CodeBuild::Project', {\n    Source: {\n      BuildSpec: Match.serializedJson({\n        version: '0.2',\n        phases: Match.objectLike({\n          pre_build: {\n            commands: Match.arrayWith([\n              'AWS_STS_REGIONAL_ENDPOINTS=legacy aws sts assume-role --role-arn \\\"arn:aws:role:to:assume\\\" --role-session-name \\\"my-session-name\\\"  > $creds',\n            ]),\n          },\n        }),\n      }),\n    },\n  });\n});\n\ntest('assume role with external-id', () => {\n  const stack = new cdk.Stack(new cdk.App(), 'TestStack');\n\n  new Shellable(stack, 'MyShellable', {\n    scriptDirectory: path.join(__dirname, 'delivlib-tests/linux'),\n    entrypoint: 'test.sh',\n    assumeRole: {\n      roleArn: 'arn:aws:role:to:assume',\n      sessionName: 'my-session-name',\n      externalId: 'my-externa-id',\n    },\n  });\n\n  const template = Template.fromStack(stack);\n  template.hasResourceProperties('AWS::CodeBuild::Project', {\n    Source: {\n      BuildSpec: Match.serializedJson({\n        version: '0.2',\n        phases: Match.objectLike({\n          pre_build: {\n            commands: Match.arrayWith([\n              'AWS_STS_REGIONAL_ENDPOINTS=legacy aws sts assume-role --role-arn \\\"arn:aws:role:to:assume\\\" --role-session-name \\\"my-session-name\\\" --external-id \\\"my-externa-id\\\" > $creds',\n            ]),\n          },\n        }),\n      }),\n    },\n  });\n});\n\ntest('assume role with regional endpoints', () => {\n  const stack = new cdk.Stack(new cdk.App(), 'TestStack');\n\n  new Shellable(stack, 'MyShellable', {\n    scriptDirectory: path.join(__dirname, 'delivlib-tests/linux'),\n    entrypoint: 'test.sh',\n    assumeRole: {\n      roleArn: 'arn:aws:role:to:assume',\n      sessionName: 'my-session-name',\n    },\n    useRegionalStsEndpoints: true,\n  });\n\n  const template = Template.fromStack(stack);\n  template.hasResourceProperties('AWS::CodeBuild::Project', {\n    Source: {\n      BuildSpec: Match.serializedJson({\n        version: '0.2',\n        phases: Match.objectLike({\n          pre_build: {\n            commands: Match.arrayWith([\n              'AWS_STS_REGIONAL_ENDPOINTS=regional aws sts assume-role --role-arn \\\"arn:aws:role:to:assume\\\" --role-session-name \\\"my-session-name\\\"  > $creds',\n            ]),\n          },\n        }),\n      }),\n    },\n  });\n\n});\n\ntest('assume role with global endpoints', () => {\n  const stack = new cdk.Stack(new cdk.App(), 'TestStack');\n\n  new Shellable(stack, 'MyShellable', {\n    scriptDirectory: path.join(__dirname, 'delivlib-tests/linux'),\n    entrypoint: 'test.sh',\n    assumeRole: {\n      roleArn: 'arn:aws:role:to:assume',\n      sessionName: 'my-session-name',\n    },\n    useRegionalStsEndpoints: false,\n  });\n\n  const template = Template.fromStack(stack);\n  template.hasResourceProperties('AWS::CodeBuild::Project', {\n    Source: {\n      BuildSpec: Match.serializedJson({\n        version: '0.2',\n        phases: Match.objectLike({\n          pre_build: {\n            commands: Match.arrayWith([\n              'AWS_STS_REGIONAL_ENDPOINTS=legacy aws sts assume-role --role-arn \\\"arn:aws:role:to:assume\\\" --role-session-name \\\"my-session-name\\\"  > $creds',\n            ]),\n          },\n        }),\n      }),\n    },\n  });\n\n});\n\ntest('assume role not supported on windows', () => {\n  const stack = new cdk.Stack(new cdk.App(), 'TestStack');\n\n  expect(() => new Shellable(stack, 'MyShellable', {\n    scriptDirectory: path.join(__dirname, 'delivlib-tests/linux'),\n    platform: ShellPlatform.Windows,\n    entrypoint: 'test.sh',\n    assumeRole: {\n      roleArn: 'arn:aws:role:to:assume',\n      sessionName: 'my-session-name',\n    },\n  })).toThrow('assumeRole is not supported on Windows');\n});\n\ntest('alarm options - defaults', () => {\n  const stack = new cdk.Stack(new cdk.App(), 'TestStack');\n\n  new Shellable(stack, 'MyShellable', {\n    scriptDirectory: path.join(__dirname, 'delivlib-tests/linux'),\n    entrypoint: 'test.sh',\n  });\n\n  const template = Template.fromStack(stack);\n  template.hasResourceProperties('AWS::CloudWatch::Alarm', {\n    EvaluationPeriods: 1,\n    Threshold: 1,\n    Period: 300,\n  });\n});\n\ntest('alarm options - custom', () => {\n  const stack = new cdk.Stack(new cdk.App(), 'TestStack');\n\n  new Shellable(stack, 'MyShellable', {\n    scriptDirectory: path.join(__dirname, 'delivlib-tests/linux'),\n    entrypoint: 'test.sh',\n    alarmEvaluationPeriods: 2,\n    alarmThreshold: 5,\n    alarmPeriod: cdk.Duration.minutes(60),\n  });\n\n  const template = Template.fromStack(stack);\n  template.hasResourceProperties('AWS::CloudWatch::Alarm', {\n    EvaluationPeriods: 2,\n    Threshold: 5,\n    Period: 3600,\n  });\n});\n\ntest('privileged mode', () => {\n  const stack = new cdk.Stack(new cdk.App(), 'TestStack');\n\n  new Shellable(stack, 'AllowDocker', {\n    scriptDirectory: path.join(__dirname, 'delivlib-tests/linux'),\n    entrypoint: 'test.sh',\n    privileged: true,\n  });\n\n  const template = Template.fromStack(stack);\n  template.hasResourceProperties('AWS::CodeBuild::Project', {\n    Environment: {\n      PrivilegedMode: true,\n    },\n  });\n});\n\ntest('environment variables', () => {\n  const stack = new cdk.Stack(new cdk.App(), 'TestStack');\n\n  new Shellable(stack, 'EnvironmentVariables', {\n    scriptDirectory: path.join(__dirname, 'delivlib-tests/linux'),\n    entrypoint: 'test.sh',\n    environment: {\n      ENV_VAR: 'env-var-value',\n      UNDEFINED_VAR: undefined,\n      EMPTY_STRING: '',\n    },\n    environmentSecrets: {\n      ENV_VAR_SECRET: 'arn:test:secretsmanager:region:000000000000:secret:env-var-secret-name-abc123',\n    },\n    environmentParameters: {\n      ENV_VAR_PARAMETER: 'env-var-parameter-name',\n    },\n  });\n  const template = Template.fromStack(stack);\n\n  template.hasResourceProperties('AWS::CodeBuild::Project', {\n    Environment: {\n      EnvironmentVariables: [\n        {\n          Name: 'SCRIPT_S3_BUCKET',\n          Type: 'PLAINTEXT',\n          Value: {\n            'Fn::Sub': 'cdk-hnb659fds-assets-${AWS::AccountId}-${AWS::Region}',\n          },\n        },\n        {\n          Name: 'SCRIPT_S3_KEY',\n          Type: 'PLAINTEXT',\n          Value: '3d34b07ba871989d030649c646b3096ba7c78ca531897bcdb0670774d2f9d3e4.zip',\n        },\n        {\n          Name: 'ENV_VAR',\n          Type: 'PLAINTEXT',\n          Value: 'env-var-value',\n        },\n        {\n          Name: 'EMPTY_STRING',\n          Type: 'PLAINTEXT',\n          Value: '',\n        },\n        {\n          Name: 'ENV_VAR_SECRET',\n          Type: 'SECRETS_MANAGER',\n          Value: 'env-var-secret-name',\n        },\n        {\n          Name: 'ENV_VAR_PARAMETER',\n          Type: 'PARAMETER_STORE',\n          Value: 'env-var-parameter-name',\n        },\n      ],\n    },\n  });\n\n  template.hasResourceProperties('AWS::IAM::Policy', {\n    PolicyDocument: {\n      Statement: [\n        {\n          Action: 'ssm:GetParameters',\n          Effect: 'Allow',\n          Resource: {\n            'Fn::Join': [\n              '',\n              [\n                'arn:',\n                {\n                  Ref: 'AWS::Partition',\n                },\n                ':ssm:',\n                {\n                  Ref: 'AWS::Region',\n                },\n                ':',\n                {\n                  Ref: 'AWS::AccountId',\n                },\n                ':parameter/env-var-parameter-name',\n              ],\n            ],\n          },\n        },\n        {\n          Action: 'secretsmanager:GetSecretValue',\n          Effect: 'Allow',\n          Resource: {\n            'Fn::Join': [\n              '',\n              [\n                'arn:',\n                {\n                  Ref: 'AWS::Partition',\n                },\n                ':secretsmanager:',\n                {\n                  Ref: 'AWS::Region',\n                },\n                ':',\n                {\n                  Ref: 'AWS::AccountId',\n                },\n                ':secret:env-var-secret-name-??????',\n              ],\n            ],\n          },\n        },\n        {\n          Action: [\n            'logs:CreateLogGroup',\n            'logs:CreateLogStream',\n            'logs:PutLogEvents',\n          ],\n          Effect: 'Allow',\n          Resource: [\n            {\n              'Fn::Join': [\n                '',\n                [\n                  'arn:',\n                  {\n                    Ref: 'AWS::Partition',\n                  },\n                  ':logs:',\n                  {\n                    Ref: 'AWS::Region',\n                  },\n                  ':',\n                  {\n                    Ref: 'AWS::AccountId',\n                  },\n                  ':log-group:/aws/codebuild/',\n                  {\n                    Ref: 'EnvironmentVariablesD266B682',\n                  },\n                ],\n              ],\n            },\n            {\n              'Fn::Join': [\n                '',\n                [\n                  'arn:',\n                  {\n                    Ref: 'AWS::Partition',\n                  },\n                  ':logs:',\n                  {\n                    Ref: 'AWS::Region',\n                  },\n                  ':',\n                  {\n                    Ref: 'AWS::AccountId',\n                  },\n                  ':log-group:/aws/codebuild/',\n                  {\n                    Ref: 'EnvironmentVariablesD266B682',\n                  },\n                  ':*',\n                ],\n              ],\n            },\n          ],\n        },\n        {\n          Action: [\n            'codebuild:CreateReportGroup',\n            'codebuild:CreateReport',\n            'codebuild:UpdateReport',\n            'codebuild:BatchPutTestCases',\n            'codebuild:BatchPutCodeCoverages',\n          ],\n          Effect: 'Allow',\n          Resource: {\n            'Fn::Join': [\n              '',\n              [\n                'arn:',\n                {\n                  Ref: 'AWS::Partition',\n                },\n                ':codebuild:',\n                {\n                  Ref: 'AWS::Region',\n                },\n                ':',\n                {\n                  Ref: 'AWS::AccountId',\n                },\n                ':report-group/',\n                {\n                  Ref: 'EnvironmentVariablesD266B682',\n                },\n                '-*',\n              ],\n            ],\n          },\n        },\n        {\n          Action: [\n            'ssmmessages:CreateControlChannel',\n            'ssmmessages:CreateDataChannel',\n            'ssmmessages:OpenControlChannel',\n            'ssmmessages:OpenDataChannel',\n            'logs:DescribeLogGroups',\n            'logs:CreateLogStream',\n            'logs:PutLogEvents',\n            's3:GetEncryptionConfiguration',\n            's3:PutObject',\n          ],\n          Effect: 'Allow',\n          Resource: '*',\n        },\n        {\n          Action: [\n            's3:GetObject*',\n            's3:GetBucket*',\n            's3:List*',\n          ],\n          Effect: 'Allow',\n          Resource: [\n            {\n              'Fn::Join': [\n                '',\n                [\n                  'arn:',\n                  {\n                    Ref: 'AWS::Partition',\n                  },\n                  ':s3:::',\n                  {\n                    'Fn::Sub': 'cdk-hnb659fds-assets-${AWS::AccountId}-${AWS::Region}',\n                  },\n                ],\n              ],\n            },\n            {\n              'Fn::Join': [\n                '',\n                [\n                  'arn:',\n                  {\n                    Ref: 'AWS::Partition',\n                  },\n                  ':s3:::',\n                  {\n                    'Fn::Sub': 'cdk-hnb659fds-assets-${AWS::AccountId}-${AWS::Region}',\n                  },\n                  '/*',\n                ],\n              ],\n            },\n          ],\n        },\n        {\n          Action: [\n            'secretsmanager:GetSecretValue',\n            'secretsmanager:DescribeSecret',\n          ],\n          Effect: 'Allow',\n          Resource: 'arn:test:secretsmanager:region:000000000000:secret:env-var-secret-name-abc123',\n        },\n        {\n          Action: [\n            'ssm:DescribeParameters',\n            'ssm:GetParameters',\n            'ssm:GetParameter',\n            'ssm:GetParameterHistory',\n          ],\n          Effect: 'Allow',\n          Resource: {\n            'Fn::Join': [\n              '',\n              [\n                'arn:',\n                {\n                  Ref: 'AWS::Partition',\n                },\n                ':ssm:',\n                {\n                  Ref: 'AWS::Region',\n                },\n                ':',\n                {\n                  Ref: 'AWS::AccountId',\n                },\n                ':parameter/env-var-parameter-name',\n              ],\n            ],\n          },\n        },\n      ],\n      Version: '2012-10-17',\n    },\n    PolicyName: 'EnvironmentVariablesRoleDefaultPolicy1BCDD5D0',\n    Roles: [\n      {\n        Ref: 'EnvironmentVariablesRole93B5CD9F',\n      },\n    ],\n  });\n});\n\ntest('can exclude files from scriptDirectory', () => {\n  const app = new cdk.App();\n  const stack = new cdk.Stack(app, 'TestStack');\n\n  new Shellable(stack, 'EnvironmentVariables', {\n    scriptDirectory: path.join(__dirname, 'delivlib-tests/linux'),\n    // This should result in only `test.sh` being included\n    excludeFilePatterns: ['*.sh', '**/README', '!test.sh'],\n    entrypoint: 'test.sh',\n  });\n\n  const template = Template.fromStack(stack);\n  template.hasResourceProperties('AWS::CodeBuild::Project', {\n    Environment: {\n      EnvironmentVariables: [\n        {\n          Name: 'SCRIPT_S3_BUCKET',\n          Type: 'PLAINTEXT',\n          Value: {\n            'Fn::Sub': 'cdk-hnb659fds-assets-${AWS::AccountId}-${AWS::Region}',\n          },\n        },\n        {\n          Name: 'SCRIPT_S3_KEY',\n          Type: 'PLAINTEXT',\n          // This is the hash of a directory with only `test.sh` included\n          Value: 'f2ad7bd80137ae8bf3e86164ae8943f7ffbe8b99470f91eb3f24c3b83873a089.zip',\n        },\n      ],\n    },\n  });\n});\n\n\ntest('WindowsPlatform installs node via chocolatey by default', () => {\n  const platform = new WindowsPlatform(codebuild.WindowsBuildImage.WIN_SERVER_CORE_2019_BASE);\n\n  expect(platform.installCommands()).toEqual([\n    'Import-Module \"C:\\\\ProgramData\\\\chocolatey\\\\helpers\\\\chocolateyProfile.psm1\"',\n    'C:\\\\ProgramData\\\\chocolatey\\\\bin\\\\choco.exe upgrade nodejs-lts -y',\n  ]);\n});\n\ntest('WindowsPlatform can disable chocolatey node upgrade', () => {\n  const platform = new WindowsPlatform(codebuild.WindowsBuildImage.WIN_SERVER_CORE_2019_BASE, {\n    upgradeNodeWithChocolatey: false,\n  });\n\n  expect(platform.installCommands()).toBeUndefined();\n});\n"
  },
  {
    "path": "lib/__tests__/signing.test.ts",
    "content": "import { App, Stack } from 'aws-cdk-lib';\nimport { Match, Template } from 'aws-cdk-lib/assertions';\nimport { Repository } from 'aws-cdk-lib/aws-codecommit';\nimport { Role, ServicePrincipal } from 'aws-cdk-lib/aws-iam';\nimport { Function } from 'aws-cdk-lib/aws-lambda';\nimport { Bucket } from 'aws-cdk-lib/aws-s3';\nimport { Pipeline } from '../pipeline';\nimport { CodeCommitRepo } from '../repo';\n\ndescribe('with standard pipeline', () => {\n  let stack: Stack;\n  let pipeline: Pipeline;\n  beforeEach(() => {\n    const app = new App();\n    stack = new Stack(app, 'TestStack');\n\n    pipeline = new Pipeline(stack, 'TestPipeline', {\n      repo: new CodeCommitRepo(new Repository(stack, 'Repo', { repositoryName: 'test' })),\n    });\n  });\n\n  test('can configure project and sign stage for NuGet signing', () => {\n    // GIVEN\n    const signingBucket = Bucket.fromBucketName(stack, 'SigningBucket', 'signing-bucket');\n    const signingLambda = Function.fromFunctionName(stack, 'SigningLambda', 'signing-lambda');\n    const accessRole = Role.fromRoleName(stack, 'AccessRole', 'access-role');\n\n    // WHEN\n    pipeline.signNuGetWithSigner({\n      signingBucket,\n      signingLambda,\n      accessRole,\n    });\n\n    // THEN\n    // verify the sign codebuild project is configured correctly\n    Template.fromStack(stack).hasResourceProperties('AWS::CodeBuild::Project', {\n      Artifacts: {\n        Type: 'NO_ARTIFACTS',\n      },\n      Environment: {\n        ComputeType: 'BUILD_GENERAL1_MEDIUM',\n        EnvironmentVariables: [\n          {\n            Name: 'SCRIPT_S3_BUCKET',\n            Type: 'PLAINTEXT',\n            Value: {\n              'Fn::Sub': 'cdk-hnb659fds-assets-${AWS::AccountId}-${AWS::Region}',\n            },\n          },\n          {\n            Name: 'SCRIPT_S3_KEY',\n            Type: 'PLAINTEXT',\n            Value: 'a04bdf56b18c26031d8d67e4d1f9acbd9f2f0126d20ae0bb88be1491f63b18bf.zip',\n          },\n          {\n            Name: 'SIGNING_BUCKET_NAME',\n            Type: 'PLAINTEXT',\n            Value: 'signing-bucket',\n          },\n          {\n            Name: 'SIGNING_LAMBDA_ARN',\n            Type: 'PLAINTEXT',\n            Value: {\n              'Fn::Join': [\n                '',\n                [\n                  'arn:',\n                  {\n                    Ref: 'AWS::Partition',\n                  },\n                  ':lambda:',\n                  {\n                    Ref: 'AWS::Region',\n                  },\n                  ':',\n                  {\n                    Ref: 'AWS::AccountId',\n                  },\n                  ':function:signing-lambda',\n                ],\n              ],\n            },\n          },\n          {\n            Name: 'ACCESS_ROLE_ARN',\n            Type: 'PLAINTEXT',\n            Value: {\n              'Fn::Join': [\n                '',\n                [\n                  'arn:',\n                  {\n                    Ref: 'AWS::Partition',\n                  },\n                  ':iam::',\n                  {\n                    Ref: 'AWS::AccountId',\n                  },\n                  ':role/access-role',\n                ],\n              ],\n            },\n          },\n        ],\n        Image: 'public.ecr.aws/jsii/superchain:1-bookworm-slim-node22',\n        ImagePullCredentialsType: 'SERVICE_ROLE',\n        PrivilegedMode: false,\n        Type: 'LINUX_CONTAINER',\n      },\n      ServiceRole: {\n        'Fn::GetAtt': [\n          'TestPipelineNuGetSigningRole00994E45',\n          'Arn',\n        ],\n      },\n      Source: {\n        BuildSpec: '{\\n  \\\"version\\\": \\\"0.2\\\",\\n  \\\"phases\\\": {\\n    \\\"install\\\": {\\n      \\\"commands\\\": [\\n        \\\"command -v yarn > /dev/null || npm install --global yarn\\\"\\n      ]\\n    },\\n    \\\"pre_build\\\": {\\n      \\\"commands\\\": [\\n        \\\"echo \\\\\\\"Downloading scripts from s3://${SCRIPT_S3_BUCKET}/${SCRIPT_S3_KEY}\\\\\\\"\\\",\\n        \\\"aws s3 cp s3://${SCRIPT_S3_BUCKET}/${SCRIPT_S3_KEY} /tmp\\\",\\n        \\\"mkdir -p /tmp/scriptdir\\\",\\n        \\\"unzip /tmp/$(basename $SCRIPT_S3_KEY) -d /tmp/scriptdir\\\"\\n      ]\\n    },\\n    \\\"build\\\": {\\n      \\\"commands\\\": [\\n        \\\"export SCRIPT_DIR=/tmp/scriptdir\\\",\\n        \\\"echo \\\\\\\"Running sign.sh\\\\\\\"\\\",\\n        \\\"/bin/bash /tmp/scriptdir/sign.sh\\\"\\n      ]\\n    }\\n  },\\n  \\\"artifacts\\\": {\\n    \\\"files\\\": [\\n      \\\"**/*\\\"\\n    ],\\n    \\\"base-directory\\\": \\\".\\\"\\n  }\\n}',\n        Type: 'NO_SOURCE',\n      },\n      Cache: {\n        Type: 'NO_CACHE',\n      },\n      EncryptionKey: {\n        'Fn::GetAtt': [\n          'TestPipelineBuildPipelineArtifactsBucketEncryptionKeyCD151124',\n          'Arn',\n        ],\n      },\n    });\n\n    // verify the sign stage is added to pipeline\n    Template.fromStack(stack).hasResourceProperties('AWS::CodePipeline::Pipeline', {\n      Stages: [\n        {\n          Actions: [\n            {\n              ActionTypeId: {\n                Category: 'Source',\n                Owner: 'AWS',\n                Provider: 'CodeCommit',\n                Version: '1',\n              },\n              Configuration: {\n                RepositoryName: {\n                  'Fn::GetAtt': [\n                    'Repo02AC86CF',\n                    'Name',\n                  ],\n                },\n                BranchName: 'master',\n                PollForSourceChanges: false,\n              },\n              Name: 'Pull',\n              OutputArtifacts: [\n                {\n                  Name: 'Source',\n                },\n              ],\n              RoleArn: {\n                'Fn::GetAtt': [\n                  'TestPipelineBuildPipelineSourcePullCodePipelineActionRoleE3FDD1B5',\n                  'Arn',\n                ],\n              },\n              RunOrder: 1,\n            },\n          ],\n          Name: 'Source',\n        },\n        {\n          Actions: [\n            {\n              ActionTypeId: {\n                Category: 'Build',\n                Owner: 'AWS',\n                Provider: 'CodeBuild',\n                Version: '1',\n              },\n              Configuration: {\n                ProjectName: {\n                  Ref: 'TestPipelineBuildProject799CEA07',\n                },\n              },\n              InputArtifacts: [\n                {\n                  Name: 'Source',\n                },\n              ],\n              RoleArn: {\n                'Fn::GetAtt': [\n                  'TestPipelineBuildPipelineBuildCodePipelineActionRole7BE59F77',\n                  'Arn',\n                ],\n              },\n              RunOrder: 1,\n            },\n          ],\n          Name: 'Build',\n        },\n        {\n          Actions: [\n            {\n              ActionTypeId: {\n                Category: 'Build',\n                Owner: 'AWS',\n                Provider: 'CodeBuild',\n                Version: '1',\n              },\n              Configuration: {\n                ProjectName: {\n                  Ref: 'TestPipelineNuGetSigningCE9AB81F',\n                },\n              },\n              InputArtifacts: [\n                {\n                  Name: 'Artifact_Build_Build',\n                },\n              ],\n              Name: 'NuGetSigningSign',\n              OutputArtifacts: [\n                {\n                  Name: 'Artifact_Sign_NuGetSigningSign',\n                },\n              ],\n              RoleArn: {\n                'Fn::GetAtt': [\n                  'TestPipelineBuildPipelineSignNuGetSigningSignCodePipelineActionRoleDD2CA5AF',\n                  'Arn',\n                ],\n              },\n              RunOrder: 1,\n            },\n          ],\n          Name: 'Sign',\n        },\n      ],\n    });\n  });\n\n  test('can specify signerProfileName and signerProfileOwner for environment variables', () => {\n    // GIVEN\n    const signingBucket = Bucket.fromBucketName(stack, 'SigningBucket', 'signing-bucket');\n    const signingLambda = Function.fromFunctionName(stack, 'SigningLambda', 'signing-lambda');\n    const accessRole = Role.fromRoleName(stack, 'AccessRole', 'access-role');\n\n    // WHEN\n    pipeline.signNuGetWithSigner({\n      signingBucket,\n      signingLambda,\n      accessRole,\n      signerProfileName: 'test-profile-name',\n      signerProfileOwner: 'test-profile-owner',\n    });\n\n    // THEN\n    Template.fromStack(stack).hasResourceProperties('AWS::CodeBuild::Project', Match.objectLike({\n      Environment: Match.objectLike({\n        EnvironmentVariables: [\n          {\n            Name: 'SCRIPT_S3_BUCKET',\n            Type: 'PLAINTEXT',\n            Value: {\n              'Fn::Sub': 'cdk-hnb659fds-assets-${AWS::AccountId}-${AWS::Region}',\n            },\n          },\n          {\n            Name: 'SCRIPT_S3_KEY',\n            Type: 'PLAINTEXT',\n            Value: 'a04bdf56b18c26031d8d67e4d1f9acbd9f2f0126d20ae0bb88be1491f63b18bf.zip',\n          },\n          {\n            Name: 'SIGNING_BUCKET_NAME',\n            Type: 'PLAINTEXT',\n            Value: 'signing-bucket',\n          },\n          {\n            Name: 'SIGNING_LAMBDA_ARN',\n            Type: 'PLAINTEXT',\n            Value: {\n              'Fn::Join': [\n                '',\n                [\n                  'arn:',\n                  {\n                    Ref: 'AWS::Partition',\n                  },\n                  ':lambda:',\n                  {\n                    Ref: 'AWS::Region',\n                  },\n                  ':',\n                  {\n                    Ref: 'AWS::AccountId',\n                  },\n                  ':function:signing-lambda',\n                ],\n              ],\n            },\n          },\n          {\n            Name: 'ACCESS_ROLE_ARN',\n            Type: 'PLAINTEXT',\n            Value: {\n              'Fn::Join': [\n                '',\n                [\n                  'arn:',\n                  {\n                    Ref: 'AWS::Partition',\n                  },\n                  ':iam::',\n                  {\n                    Ref: 'AWS::AccountId',\n                  },\n                  ':role/access-role',\n                ],\n              ],\n            },\n          },\n          {\n            Name: 'SIGNER_PROFILE_NAME',\n            Type: 'PLAINTEXT',\n            Value: 'test-profile-name',\n          },\n          {\n            Name: 'SIGNER_PROFILE_OWNER',\n            Type: 'PLAINTEXT',\n            Value: 'test-profile-owner',\n          },\n        ],\n      }),\n    }));\n  });\n\n  test('can provide a service role used for signing codebuild operations', () => {\n    // GIVEN\n    const signingBucket = Bucket.fromBucketName(stack, 'SigningBucket', 'signing-bucket');\n    const signingLambda = Function.fromFunctionName(stack, 'SigningLambda', 'signing-lambda');\n    const accessRole = Role.fromRoleName(stack, 'AccessRole', 'access-role');\n    const serviceRole = new Role(stack, 'ServiceRole', {\n      roleName: 'signing-codebuild-role',\n      assumedBy: new ServicePrincipal('codebuild.amazonaws.com'),\n    });\n\n    // WHEN\n    pipeline.signNuGetWithSigner({\n      signingBucket,\n      signingLambda,\n      accessRole,\n      serviceRole,\n    });\n\n    // THEN\n    Template.fromStack(stack).hasResourceProperties('AWS::IAM::Role', {\n      AssumeRolePolicyDocument: {\n        Statement: [\n          {\n            Action: 'sts:AssumeRole',\n            Effect: 'Allow',\n            Principal: {\n              Service: 'codebuild.amazonaws.com',\n            },\n          },\n        ],\n        Version: '2012-10-17',\n      },\n      ManagedPolicyArns: [\n        {\n          'Fn::Join': [\n            '',\n            [\n              'arn:',\n              {\n                Ref: 'AWS::Partition',\n              },\n              ':iam::aws:policy/AmazonElasticContainerRegistryPublicReadOnly',\n            ],\n          ],\n        },\n      ],\n      RoleName: 'signing-codebuild-role',\n    });\n  });\n});\n"
  },
  {
    "path": "lib/__tests__/test-stack.ts",
    "content": "import * as path from 'path';\nimport {\n  App, Stack, StackProps,\n  aws_events as events,\n  aws_iam as iam,\n  aws_kms as kms,\n} from 'aws-cdk-lib';\nimport { LinuxBuildImage } from 'aws-cdk-lib/aws-codebuild';\nimport * as delivlib from '../../lib';\n\n\nconst testDir = path.join(__dirname, 'delivlib-tests');\n\nexport class TestStack extends Stack {\n  constructor(parent: App, id: string, props: StackProps = { }) {\n    super(parent, id, props);\n\n    //\n    // SOURCE\n    //\n\n    const githubRepo = new delivlib.WritableGitHubRepo({\n      repository: process.env.REPO_NAME ?? 'awslabs/aws-delivlib-sample',\n      tokenSecretArn: process.env.TOKEN_SECRET_ARN ?? 'arn:aws:secretsmanager:us-east-1:712950704752:secret:github-token-QDP6QX',\n      sshKeySecret: { secretArn: process.env.SSH_KEY_SECRET ?? 'arn:aws:secretsmanager:us-east-1:712950704752:secret:delivlib/github-ssh-okGazo' },\n      commitEmail: 'foo@bar.com',\n      commitUsername: 'foobar',\n    });\n\n    //\n    // BUILD\n    //\n\n    const pipeline = new delivlib.Pipeline(this, 'CodeCommitPipeline', {\n      title: 'aws-delivlib test pipeline',\n      repo: githubRepo,\n      notificationEmail: 'aws-cdk-dev+delivlib-test@amazon.com',\n      environment: {\n        DELIVLIB_ENV_TEST: 'MAGIC_1924',\n      },\n      dryRun: true,\n      buildImage: LinuxBuildImage.fromDockerRegistry('public.ecr.aws/jsii/superchain:1-bullseye-slim-node18'),\n    });\n\n    //\n    // TEST\n    //\n\n    // add a test that runs on an ubuntu linux\n    pipeline.addTest('HelloLinux', {\n      platform: delivlib.ShellPlatform.LinuxUbuntu,\n      entrypoint: 'test.sh',\n      scriptDirectory: path.join(testDir, 'linux'),\n    });\n\n    // This test takes a lot of time (~10 minutes), which is annoying during testing\n    const WINDOWS = false;\n    if (WINDOWS) {\n      // add a test that runs on Windows\n      pipeline.addTest('HelloWindows', {\n        platform: delivlib.ShellPlatform.Windows,\n        entrypoint: 'test.ps1',\n        scriptDirectory: path.join(testDir, 'windows'),\n      });\n    }\n\n    const externalId = 'require-me-please';\n\n    const role = new iam.Role(this, 'AssumeMe', {\n      assumedBy: new iam.AccountPrincipal(Stack.of(this).account),\n      externalIds: [externalId],\n    });\n\n    pipeline.addTest('AssumeRole', {\n      entrypoint: 'test.sh',\n      scriptDirectory: path.join(testDir, 'assume-role'),\n      assumeRole: {\n        roleArn: role.roleArn,\n        sessionName: 'assume-role-test',\n        externalId,\n      },\n      environment: {\n        EXPECTED_ROLE_NAME: role.roleName,\n      },\n    });\n\n    const action = pipeline.addShellable('Test', 'GenerateTwoArtifacts', {\n      entrypoint: 'void.sh',\n      scriptDirectory: path.join(testDir, 'linux'),\n      buildSpec: delivlib.BuildSpec.simple({\n        build: [\n          'mkdir -p output1 output2',\n          'echo \\'{\"name\": \"output1\", \"version\": \"1.2.3\", \"commit\": \"abcdef\"}\\' > output1/build.json',\n          'echo \\'{\"name\": \"output2\", \"version\": \"1.2.3\", \"commit\": \"abcdef\"}\\' > output2/build.json',\n        ],\n        artifactDirectory: 'output1',\n        additionalArtifactDirectories: {\n          artifact2: 'output2',\n        },\n      }),\n    }).action;\n    const shellableArtifacts = action.actionProperties.outputs;\n\n    //\n    // CANARY\n    //\n\n    pipeline.addCanary('HelloCanary', {\n      schedule: events.Schedule.expression('rate(1 minute)'),\n      scriptDirectory: path.join(testDir, 'linux'),\n      entrypoint: 'test.sh',\n    });\n\n    //\n    // PUBLISH\n    //\n\n    const dryRun = false;\n\n    pipeline.publishToNpm({\n      npmTokenSecret: { secretArn: 'arn:aws:secretsmanager:us-east-1:712950704752:secret:delivlib/npm-MhaWgx' },\n      access: delivlib.NpmAccess.RESTRICTED,\n      ssmPrefix: '/published/jsii-sample/npm',\n      dryRun,\n    });\n\n    // this creates a self-signed certificate\n    const codeSign = new delivlib.CodeSigningCertificate(this, 'X509CodeSigningKey', {\n      distinguishedName: {\n        commonName: 'delivlib-test',\n        country: 'IL',\n        emailAddress: 'aws-cdk-dev+delivlib-test@amazon.com',\n        locality: 'Zity',\n        organizationName: 'Amazon Test',\n        organizationalUnitName: 'AWS',\n        stateOrProvince: 'Ztate',\n      },\n      retainPrivateKey: false,\n    });\n\n    pipeline.publishToNuGet({\n      nugetApiKeySecret: { secretArn: 'arn:aws:secretsmanager:us-east-1:712950704752:secret:delivlib/nuget-jDbgrN' },\n      codeSign,\n      ssmPrefix: '/published/jsii-sample/nuget',\n      dryRun,\n    });\n\n    const signingKey = new delivlib.OpenPGPKeyPair(this, 'CodeSign', {\n      email: 'aws-cdk-dev+delivlib@amazon.com',\n      encryptionKey: new kms.Key(this, 'CodeSign-CMK'),\n      expiry: '4y',\n      identity: 'aws-cdk-dev',\n      keySizeBits: 4_096,\n      pubKeyParameterName: `/${this.node.path}/CodeSign.pub`,\n      secretName: this.node.path + '/CodeSign',\n      version: 0,\n      removalPolicy: delivlib.OpenPGPKeyPairRemovalPolicy.DESTROY_IMMEDIATELY,\n    });\n\n    pipeline.publishToMaven({\n      mavenLoginSecret: { secretArn: 'arn:aws:secretsmanager:us-east-1:712950704752:secret:delivlib/maven-S4Q2y3' },\n      mavenEndpoint: 'https://aws.oss.sonatype.org:443/',\n      signingKey,\n      stagingProfileId: '68a05363083174',\n      ssmPrefix: '/published/jsii-sample/maven',\n      dryRun,\n    });\n\n    pipeline.publishToGitHub({\n      githubRepo,\n      signingKey,\n      additionalInputArtifacts: shellableArtifacts,\n      ssmPrefix: '/published/jsii-sample/github',\n      dryRun,\n    });\n\n    pipeline.publishToGitHubPages({\n      githubRepo,\n      dryRun,\n    });\n\n    pipeline.publishToPyPI({\n      loginSecret: { secretArn: 'arn:aws:secretsmanager:us-east-1:712950704752:secret:delivlib/pypi-tp8M57' },\n      ssmPrefix: '/published/jsii-sample/pypi',\n      dryRun,\n    });\n\n    // publish go bindings to awslabs/aws-delivlib-sample under the \"golang\"\n    // branch (repository is derived from \"go.moduleName\" in package.json)\n    pipeline.publishToGolang({\n      githubTokenSecret: { secretArn: githubRepo.tokenSecretArn },\n      gitBranch: 'golang',\n      gitUserEmail: 'aws-cdk-dev+delivlib@amazon.com',\n      gitUserName: 'Delivlib Tests',\n      ssmPrefix: '/published/jsii-sample/golang',\n      dryRun,\n    });\n\n    //\n    // BUMP\n\n    pipeline.autoBump({\n      bumpCommand: 'npm i && npm run bump',\n    });\n\n    //\n    // CHANGE CONTROL\n    //\n\n    pipeline.addChangeControl();\n  }\n}\n"
  },
  {
    "path": "lib/__tests__/watcher-handler.test.ts",
    "content": "import { LambdaActionStateChangeEvent, LambdaExecutionStateChangeEvent, cloudwatch, handler } from '../../lib/pipeline-watcher/handler/watcher-handler';\n\ncloudwatch.putMetricData = jest.fn();\n\ndescribe('watcher-handler', () => {\n  beforeEach(() => {\n    process.env.METRIC_NAME = 'metricName';\n    process.env.METRIC_NAMESPACE = 'metricNamespace';\n  });\n\n  test('throws an error if PutMetricData fails', async () => {\n    expect.assertions(1);\n    cloudwatch.putMetricData = jest.fn(_request => {\n      return new Promise((_, reject) => reject(new Error('fail')));\n    }) as any;\n    try {\n      await handler(actionExecutionEvent());\n    } catch (err: any) {\n      expect(err.message).toEqual('fail');\n    }\n  });\n\n  test('throws an error if METRIC_NAME is undefined', async () => {\n    delete process.env.METRIC_NAME;\n    expect.assertions(1);\n    try {\n      await handler(actionExecutionEvent());\n    } catch (err: any) {\n      expect(err.message).toMatch(/environment variables must be set/);\n    }\n  });\n\n  test('throws an error if METRIC_NAMESPACE is undefined', async () => {\n    delete process.env.METRIC_NAMESPACE;\n    expect.assertions(1);\n    try {\n      await handler(actionExecutionEvent());\n    } catch (err: any) {\n      expect(err.message).toMatch(/environment variables must be set/);\n    }\n  });\n\n  describe('Action Execution State Change', () => {\n    test('throws an error if state is not SUCCEEDED or FAILED', async () => {\n      expect.assertions(1);\n      try {\n        await handler(actionExecutionEvent('STARTED'));\n      } catch (err: any) {\n        expect(err.message).toMatch(/Unsupported/);\n      }\n    });\n\n    test('reports FAILED state metrics', async () => {\n      expect.assertions(1);\n      cloudwatch.putMetricData = jest.fn(request => {\n        expect(request).toEqual({\n          Namespace: 'metricNamespace',\n          MetricData: [\n            {\n              MetricName: 'metricName',\n              Value: 1,\n              Dimensions: [\n                { Name: 'Pipeline', Value: 'some-pipeline' },\n                { Name: 'Action', Value: 'some-action' },\n              ],\n              Timestamp: new Date(1611751440000),\n            },\n          ],\n        });\n        return {\n          promise: () => new Promise((resolve, _) => resolve({})),\n        };\n      }) as any;\n      await handler(actionExecutionEvent('FAILED'));\n    });\n\n    test('reports SUCCEEDED state metrics', async () => {\n      expect.assertions(1);\n      cloudwatch.putMetricData = jest.fn(request => {\n        expect(request).toEqual({\n          Namespace: 'metricNamespace',\n          MetricData: [\n            {\n              MetricName: 'metricName',\n              Value: 0,\n              Dimensions: [\n                { Name: 'Pipeline', Value: 'some-pipeline' },\n                { Name: 'Action', Value: 'some-action' },\n              ],\n              Timestamp: new Date(1611751440000),\n            },\n          ],\n        });\n        return {\n          promise: () => new Promise((resolve, _) => resolve({})),\n        };\n      }) as any;\n      await handler(actionExecutionEvent('SUCCEEDED'));\n    });\n  });\n\n  describe('Pipeline Execution State Change', () => {\n    test('throws an error if state is not SUCCEEDED or FAILED', async () => {\n      expect.assertions(1);\n      try {\n        await handler(pipelineExecutionEvent('STARTED'));\n      } catch (err: any) {\n        expect(err.message).toMatch(/Unsupported/);\n      }\n    });\n\n    test('reports FAILED state metrics', async () => {\n      expect.assertions(1);\n      cloudwatch.putMetricData = jest.fn(request => {\n        expect(request).toEqual({\n          Namespace: 'metricNamespace',\n          MetricData: [\n            {\n              MetricName: 'metricName',\n              Value: 1,\n              Dimensions: [\n                { Name: 'Pipeline', Value: 'some-pipeline' },\n              ],\n              Timestamp: new Date(1611751440000),\n            },\n          ],\n        });\n        return {\n          promise: () => new Promise((resolve, _) => resolve({})),\n        };\n      }) as any;\n      await handler(pipelineExecutionEvent('FAILED'));\n    });\n\n    test('reports SUCCEEDED state metrics', async () => {\n      expect.assertions(1);\n      cloudwatch.putMetricData = jest.fn(request => {\n        expect(request).toEqual({\n          Namespace: 'metricNamespace',\n          MetricData: [\n            {\n              MetricName: 'metricName',\n              Value: 0,\n              Dimensions: [\n                { Name: 'Pipeline', Value: 'some-pipeline' },\n              ],\n              Timestamp: new Date(1611751440000),\n            },\n          ],\n        });\n        return {\n          promise: () => new Promise((resolve, _) => resolve({})),\n        };\n      }) as any;\n      await handler(pipelineExecutionEvent('SUCCEEDED'));\n    });\n  });\n});\n\nfunction actionExecutionEvent(\n  state: 'STARTED' | 'CANCELED' | 'FAILED' | 'SUCCEEDED' = 'SUCCEEDED',\n): LambdaActionStateChangeEvent {\n  return {\n    'id': 'some-id',\n    'version': '1',\n    'account': '0123456789',\n    'resources': ['some-resource'],\n    'time': '2021-01-27T12:44:00Z',\n    'detail-type': 'CodePipeline Action Execution State Change',\n    'region': 'us-east-1',\n    'source': 'aws.codepipeline',\n    'detail': {\n      action: 'some-action',\n      pipeline: 'some-pipeline',\n      state,\n    },\n  };\n}\n\nfunction pipelineExecutionEvent(\n  state: 'STARTED' | 'CANCELED' | 'FAILED' | 'SUCCEEDED' = 'SUCCEEDED',\n): LambdaExecutionStateChangeEvent {\n  return {\n    'id': 'some-id',\n    'version': '1',\n    'account': '0123456789',\n    'resources': ['some-resource'],\n    'time': '2021-01-27T12:44:00Z',\n    'detail-type': 'CodePipeline Pipeline Execution State Change',\n    'region': 'us-east-1',\n    'source': 'aws.codepipeline',\n    'detail': {\n      pipeline: 'some-pipeline',\n      state,\n    },\n  };\n}\n"
  },
  {
    "path": "lib/__tests__/watcher.test.ts",
    "content": "import { Stack } from 'aws-cdk-lib';\nimport { Template } from 'aws-cdk-lib/assertions';\nimport { Pipeline } from 'aws-cdk-lib/aws-codepipeline';\nimport { PipelineWatcher } from '../../lib/pipeline-watcher';\n\nconst props = {\n  metricNamespace: 'Namespace',\n  failureMetricName: 'FailureMetricName',\n};\n\ndescribe('PipelineWatcher', () => {\n  test('default', () => {\n    const stack = new Stack();\n    const pipeline = Pipeline.fromPipelineArn(stack, 'Pipeline', 'arn:aws:codepipeline:us-east-1:012345789:MyPipeline');\n    new PipelineWatcher(stack, 'Watcher', { pipeline, ...props });\n    const template = Template.fromStack(stack);\n\n    template.resourceCountIs('AWS::Events::Rule', 1);\n    template.resourceCountIs('AWS::Lambda::Function', 1);\n    template.hasResourceProperties('AWS::CloudWatch::Alarm', {\n      ComparisonOperator: 'GreaterThanOrEqualToThreshold',\n      EvaluationPeriods: 1,\n      AlarmDescription: 'Pipeline MyPipeline has failed stages',\n      Dimensions: [\n        {\n          Name: 'Pipeline',\n          Value: 'MyPipeline',\n        },\n      ],\n      MetricName: 'FailureMetricName',\n      Namespace: 'Namespace',\n      Period: 300,\n      Statistic: 'Maximum',\n      Threshold: 1,\n    });\n  });\n\n  test('title option is correctly handled', () => {\n    const stack = new Stack();\n    const pipeline = Pipeline.fromPipelineArn(stack, 'Pipeline', 'arn:aws:codepipeline:us-east-1:012345789:MyPipeline');\n    new PipelineWatcher(stack, 'Watcher', { pipeline, title: 'MyTitle', ...props });\n    const template = Template.fromStack(stack);\n\n    template.hasResourceProperties('AWS::CloudWatch::Alarm', {\n      AlarmDescription: 'Pipeline MyTitle has failed stages',\n    });\n  });\n\n  test('lambda function has the expected policy', () => {\n    const stack = new Stack();\n    const pipeline = Pipeline.fromPipelineArn(stack, 'Pipeline', 'arn:aws:codepipeline:us-east-1:012345789:MyPipeline');\n    new PipelineWatcher(stack, 'Watcher', { pipeline, ...props });\n\n    const template = Template.fromStack(stack);\n    template.hasResourceProperties('AWS::IAM::Policy', {\n      PolicyDocument: {\n        Statement: [\n          {\n            Action: 'cloudwatch:PutMetricData',\n            Condition: {\n              StringEquals: {\n                'cloudwatch:namespace': 'Namespace',\n              },\n            },\n            Effect: 'Allow',\n            Resource: '*',\n          },\n        ],\n      },\n      Roles: [\n        {\n          Ref: 'WatcherPollerServiceRole04A8CDED',\n        },\n      ],\n    });\n  });\n\n  test('missing data should be treated as ignore', () => {\n    const stack = new Stack();\n    const pipeline = Pipeline.fromPipelineArn(stack, 'Pipeline', 'arn:aws:codepipeline:us-east-1:012345789:MyPipeline');\n    new PipelineWatcher(stack, 'Watcher', { pipeline, ...props });\n\n    const template = Template.fromStack(stack);\n    template.hasResourceProperties('AWS::CloudWatch::Alarm', {\n      TreatMissingData: 'ignore',\n    });\n  });\n});\n"
  },
  {
    "path": "lib/auto-build.ts",
    "content": "import {\n  SecretValue,\n  aws_codebuild as codebuild,\n  aws_iam as iam,\n  aws_sam as serverless,\n} from 'aws-cdk-lib';\nimport { Construct } from 'constructs';\nimport { BuildEnvironmentProps, createBuildEnvironment } from './build-env';\nimport { IRepo } from './repo';\n\nexport interface AutoBuildOptions {\n  /**\n   * Build environment.\n   * @default - see defaults in `BuildEnvironmentProps`\n   */\n  readonly environment?: BuildEnvironmentProps;\n\n  /**\n   * The name of the CodeBuild project.\n   *\n   * @default - a name will be generated by CloudFormation.\n   */\n  readonly projectName?: string;\n\n  /**\n   * Make build logs public and publishes a link to GitHub PR discussion.\n   *\n   * @see https://github.com/jlhood/github-codebuild-logs\n   *\n   * @default false\n   */\n  readonly publicLogs?: boolean;\n\n  /**\n   * Configure the project to respond to webhooks.\n   *\n   * @default true\n   */\n  readonly webhook?: boolean;\n\n  /**\n   * Whether to publish a link to build logs when build is successful.\n   *\n   * @see https://github.com/jlhood/github-codebuild-logs#app-parameters\n   *\n   * @default true\n   */\n  readonly publicLogsOnSuccess?: boolean;\n\n  /**\n   * Whether to delete previously published links to build logs\n   * before posting a new one.\n   *\n   * @see https://github.com/jlhood/github-codebuild-logs#app-parameters\n   *\n   * @default true\n   */\n  readonly deletePreviousPublicLogsLinks?: boolean;\n\n  /* tslint:disable:max-line-length */\n  /**\n   * Build spec file to use for AutoBuild\n   *\n   * @default @see https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codebuild-project-source.html#cfn-codebuild-project-source-buildspec\n   */\n  readonly buildSpec?: codebuild.BuildSpec;\n  /* tslint:enable:max-line-length */\n\n  /**\n   * ARTIFACTS\n   */\n  readonly artifacts?: codebuild.IArtifacts;\n}\n\nexport interface AutoBuildProps extends AutoBuildOptions {\n  /**\n   * The repository to monitor.\n   *\n   * Must be a GitHub repository for `publicLogs` to have any effect.\n   */\n  readonly repo: IRepo;\n\n  /**\n   * The specific branch to be considered for auto-builds.\n   *\n   * Specify at most one of `branch` and `branches`.\n   *\n   * @default - any & all branches.\n   * @deprecated Use `branches` instead.\n   */\n  readonly branch?: string;\n\n  /**\n   * The specific branch to be considered for auto-builds.\n   *\n   * Specify at most one of `branch` and `branches`.\n   *\n   * @default - any & all branches.\n   */\n  readonly branches?: string[];\n}\n\nexport class AutoBuild extends Construct {\n\n  /**\n   * The underlying `CodeBuild` project.\n   */\n  public readonly project: codebuild.Project;\n\n  constructor(scope: Construct, id: string, props: AutoBuildProps) {\n    super(scope, id);\n\n    this.project = new codebuild.Project(this, 'Project', {\n      projectName: props.projectName,\n      description: `Automatic PR build for ${props.repo.describe()}`,\n      source: props.repo.createBuildSource(this, props.webhook ?? true, { branch: props.branch, branches: props.branches }),\n      environment: createBuildEnvironment(props.environment ?? {}),\n      badge: props.repo.allowsBadge,\n      buildSpec: props.buildSpec,\n      artifacts: props.artifacts,\n      ssmSessionPermissions: true,\n    });\n    this.project.role!.addManagedPolicy(iam.ManagedPolicy.fromAwsManagedPolicyName('AmazonElasticContainerRegistryPublicReadOnly'));\n\n    const publicLogs = props.publicLogs !== undefined ? props.publicLogs : false;\n    const githubToken = props.repo.tokenSecretArn ? SecretValue.secretsManager(props.repo.tokenSecretArn) : undefined;\n\n    if (publicLogs) {\n      new serverless.CfnApplication(this, 'GitHubCodeBuildLogsSAR', {\n        location: {\n          applicationId: 'arn:aws:serverlessrepo:us-east-1:277187709615:applications/github-codebuild-logs',\n          semanticVersion: '1.6.0',\n        },\n        parameters: {\n          CodeBuildProjectName: this.project.projectName,\n          DeletePreviousComments: (props.deletePreviousPublicLogsLinks ?? true).toString(),\n          CommentOnSuccess: (props.publicLogsOnSuccess ?? true).toString(),\n          ...githubToken ? { GitHubOAuthToken: githubToken.unsafeUnwrap() } : undefined,\n        },\n      });\n    }\n  }\n}\n"
  },
  {
    "path": "lib/build-env.ts",
    "content": "import { aws_codebuild as cbuild } from 'aws-cdk-lib';\nimport { DEFAULT_SUPERCHAIN_IMAGE } from './constants';\n\nexport interface BuildEnvironmentProps {\n  computeType?: cbuild.ComputeType;\n  privileged?: boolean;\n  /** @deprecated */\n  env?: { [key: string]: string };\n  environment?: { [key: string]: string };\n  buildImage?: cbuild.IBuildImage;\n}\n\nexport function createBuildEnvironment(props: BuildEnvironmentProps) {\n  const environment: cbuild.BuildEnvironment = {\n    computeType: props.computeType || cbuild.ComputeType.SMALL,\n    privileged: props.privileged,\n    environmentVariables: renderEnvironmentVariables({ ...props.environment, ...props.env }),\n    buildImage: props.buildImage || cbuild.LinuxBuildImage.fromDockerRegistry(DEFAULT_SUPERCHAIN_IMAGE),\n  };\n\n  return environment;\n}\n\nfunction renderEnvironmentVariables(env?: { [key: string]: string }) {\n  if (!env) {\n    return undefined;\n  }\n\n  const out: { [key: string]: cbuild.BuildEnvironmentVariable } = { };\n  for (const [key, value] of Object.entries(env)) {\n    out[key] = { value };\n  }\n  return out;\n}\n"
  },
  {
    "path": "lib/build-spec.ts",
    "content": "import { mapValues, noUndefined } from './util';\n\n\nconst MAGIC_ARTIFACT_NAME = 'PRIMARY';\n\n/**\n * Class to model a buildspec version 0.2\n *\n * Artifact handling is a little special: CodeBuild will interpret the\n * 'artifacts' section differently depending on whether there are secondary\n * artifacts or not.\n *\n * If there is only one artifact, the single artifact must go into the top-level\n * 'artifacts' section. If there are multiple artifacts, all of them must go\n * into the 'secondary-artifacts' section. Upon rendering to JSON, the caller\n * must supply the name of the primary artifact (it's determined by\n * the CodePipeline Action that invokes the CodeBuild Project that uses this\n * buildspec).\n *\n * INVARIANT: in-memory, the BuildSpec will treat all artifacts the same (as\n * a bag of secondary artifacts). At the edges (construction or rendering),\n * if there's only a single artifact it will be rendered to the primary\n * artifact.\n */\nexport class BuildSpec {\n  public static literal(struct: BuildSpecStruct) {\n    return new BuildSpec(struct);\n  }\n\n  public static simple(props: SimpleBuildSpecProps) {\n    // We merge the primary artifact into the secondary artifacts under a special key\n    // They will be compacted back together during rendering.\n    const artifactDirectories = Object.assign({},\n      props.additionalArtifactDirectories || {},\n      props.artifactDirectory ? { [MAGIC_ARTIFACT_NAME]: props.artifactDirectory } : {},\n    );\n\n    let artifacts: PrimaryArtifactStruct | undefined;\n    if (Object.keys(artifactDirectories || {}).length > 0) {\n      artifacts = {\n        'secondary-artifacts': mapValues(artifactDirectories!, d => ({\n          'base-directory': d,\n          'files': ['**/*'],\n        })),\n      };\n    }\n\n    return new BuildSpec({\n      version: '0.2',\n      phases: noUndefined({\n        install: props.install !== undefined ? { commands: props.install } : undefined,\n        pre_build: props.preBuild !== undefined ? { commands: props.preBuild } : undefined,\n        build: props.build !== undefined ? { commands: props.build } : undefined,\n      }),\n      artifacts,\n      reports: props.reports,\n    });\n  }\n\n  public static empty() {\n    return new BuildSpec({ version: '0.2' });\n  }\n\n  private constructor(private readonly spec: BuildSpecStruct) {\n  }\n\n  public get additionalArtifactNames(): string[] {\n    return Object.keys(this.spec.artifacts && this.spec.artifacts['secondary-artifacts'] || {}).filter(n => n !== MAGIC_ARTIFACT_NAME);\n  }\n\n  public merge(other: BuildSpec): BuildSpec {\n    return new BuildSpec({\n      'version': '0.2',\n      'run-as': mergeObj(this.spec['run-as'], other.spec['run-as'], equalObjects),\n      'env': mergeObj(this.spec.env, other.spec.env, (a, b) => ({\n        'parameter-store': mergeDict(a['parameter-store'], b['parameter-store'], equalObjects),\n        'variables': mergeDict(a.variables, b.variables, equalObjects),\n      })),\n      'phases': mergeDict(this.spec.phases, other.spec.phases, (a, b, phase) => {\n        const merged: PhaseStruct = {\n          'run-as': mergeObj(a['run-as'], b['run-as'], equalObjects),\n          'on-failure': mergeObj(a['on-failure'], b['on-failure'], equalObjects),\n          'commands': mergeList(a.commands, b.commands)!,\n          'finally': mergeList(a.finally, b.finally),\n        };\n\n        if (phase === 'install') {\n          (merged as InstallPhaseStruct)['runtime-versions'] = mergeDict(\n            (a as InstallPhaseStruct)['runtime-versions'],\n            (b as InstallPhaseStruct)['runtime-versions'],\n            equalObjects,\n          );\n        }\n\n        return noUndefined(merged);\n      }),\n      'artifacts': mergeObj(this.spec.artifacts, other.spec.artifacts, mergeArtifacts),\n      'cache': mergeObj(this.spec.cache, other.spec.cache, (a, b) => ({\n        paths: mergeList(a.paths, b.paths)!,\n      })),\n      'reports': mergeDict(this.spec.reports, other.spec.reports, (a, b) => {\n        throw new Error(`Reports must have unique names, got ${a} and ${b}`);\n      }),\n    });\n\n    function mergeArtifacts(a: PrimaryArtifactStruct, b: PrimaryArtifactStruct): PrimaryArtifactStruct {\n      if (a.files || b.files) {\n        throw new Error('None of the BuildSpecs may have a primary artifact.');\n      }\n\n      const artifacts = Object.assign({}, a['secondary-artifacts'] || {});\n      for (const [k, v] of Object.entries(b['secondary-artifacts'] || {})) {\n        if (k in artifacts) {\n          throw new Error(`There is already an artifact with name ${k}`);\n        }\n        artifacts[k] = v;\n      }\n      return Object.assign({}, a, { 'secondary-artifacts': artifacts });\n    }\n\n\n    function equalObjects(a: string, b: string) {\n      if (a !== b) {\n        throw new Error(`Can't merge two different values for the same key: ${JSON.stringify(a)}, ${JSON.stringify(b)}`);\n      }\n      return b;\n    }\n\n    function mergeObj<T>(a: T | undefined, b: T | undefined, fn: (a: T, b: T) => T): T | undefined {\n      if (a === undefined) { return b; }\n      if (b === undefined) { return a; }\n      return fn(a, b);\n    }\n\n    function mergeDict<T>(as: { [k: string]: T } | undefined, bs: { [k: string]: T } | undefined, fn: (a: T, b: T, k: string) => T) {\n      return mergeObj(as, bs, (a, b) => {\n        const ret = Object.assign({}, a);\n        for (const [k, v] of Object.entries(b)) {\n          if (ret[k]) {\n            ret[k] = fn(ret[k], v, k);\n          } else {\n            ret[k] = v;\n          }\n        }\n        return ret;\n      });\n    }\n\n    function mergeList<T>(as: T[] | undefined, bs: T[] | undefined): T[] | undefined {\n      return mergeObj(as, bs, (a, b) => a.concat(b));\n    }\n  }\n\n  public render(options: BuildSpecRenderOptions = {}): BuildSpecStruct {\n    return Object.assign({}, this.spec, { artifacts: this.renderArtifacts(options) });\n  }\n\n  private renderArtifacts(options: BuildSpecRenderOptions): PrimaryArtifactStruct | undefined {\n    if (!this.spec.artifacts || !this.spec.artifacts['secondary-artifacts']) { return this.spec.artifacts; }\n\n    // Simplify a single \"secondary-artifacts\" to a single primary artifact (regardless of the name)\n    const singleArt = dictSingletonValue(this.spec.artifacts['secondary-artifacts']);\n    if (singleArt) { return singleArt; }\n\n    // Otherwise rename a 'PRIMARY' key if it exists\n    if (MAGIC_ARTIFACT_NAME in this.spec.artifacts['secondary-artifacts']) {\n      if (!options.primaryArtifactName) {\n        throw new Error(`Replacement name for ${MAGIC_ARTIFACT_NAME} artifact not supplied`);\n      }\n\n      return { 'secondary-artifacts': renameKey(this.spec.artifacts['secondary-artifacts'], MAGIC_ARTIFACT_NAME, options.primaryArtifactName) };\n    }\n\n    return this.spec.artifacts;\n  }\n}\n\nexport interface SimpleBuildSpecProps {\n  install?: string[];\n  preBuild?: string[];\n  build?: string[];\n  reports?: { [key: string]: ReportStruct };\n  artifactDirectory?: string;\n\n  /**\n   * Where the directories for each artifact are\n   *\n   * Use special name PRIMARY to refer to the primary artifact. Will be\n   * replaced with the actual artifact name when the build spec is synthesized.\n   */\n  additionalArtifactDirectories?: { [id: string]: string };\n}\n\nexport interface BuildSpecStruct {\n  'version': '0.2';\n  'run-as'?: string;\n  'env'?: EnvStruct;\n  'phases'?: {\n    install?: InstallPhaseStruct;\n    pre_build?: PhaseStruct;\n    build?: PhaseStruct;\n    post_build?: PhaseStruct;\n  };\n  'artifacts'?: PrimaryArtifactStruct;\n  'cache'?: CacheStruct;\n  'reports'?: { [key: string]: ReportStruct };\n}\n\nexport interface EnvStruct {\n  'variables'?: { [key: string]: string };\n  'parameter-store'?: { [key: string]: string };\n  'exported-variables'?: string[];\n}\n\nexport interface PhaseStruct {\n  'run-as'?: string;\n  'on-failure'?: string;\n  'commands': string[];\n  'finally'?: string[];\n}\n\nexport interface InstallPhaseStruct extends PhaseStruct {\n  'runtime-versions'?: { [key: string]: string };\n}\n\nexport interface ReportStruct {\n  'files'?: string[];\n  'base-directory'?: string;\n  'discard-paths'?: 'yes' | 'no';\n  'file-format'?: 'CucumberJson' | 'JunitXml' | 'NunitXml' | 'TestNGXml' | 'VisualStudioTrx';\n}\n\nexport interface ArtifactStruct {\n  'files'?: string[];\n  'name'?: string;\n  'base-directory'?: string;\n  'discard-paths'?: 'yes' | 'no';\n}\n\nexport interface PrimaryArtifactStruct extends ArtifactStruct {\n  'secondary-artifacts'?: { [key: string]: ArtifactStruct };\n}\n\nexport interface CacheStruct {\n  paths: string[];\n}\n\nexport interface BuildSpecRenderOptions {\n  /**\n   * Replace PRIMARY artifact name with this\n   *\n   * Cannot use the special term PRIMARY if this is not supplied.\n   *\n   * @default  Cannot use PRIMARY\n   */\n  primaryArtifactName?: string;\n}\n\n/**\n * If the dict is a singleton dict, return the value of the first key, otherwise return undefined\n */\nfunction dictSingletonValue<T>(xs: { [key: string]: T }): T | undefined {\n  const keys = Object.keys(xs);\n  if (keys.length === 1) {\n    return xs[keys[0]];\n  }\n  return undefined;\n}\n\nfunction renameKey<T>(xs: { [key: string]: T }, orig: string, rename: string): { [key: string]: T } {\n  const ret = Object.assign({}, xs);\n  if (orig in ret) {\n    ret[rename] = ret[orig];\n    delete ret[orig];\n  }\n  return ret;\n}\n"
  },
  {
    "path": "lib/canary.ts",
    "content": "import {\n  aws_cloudwatch as cloudwatch,\n  aws_codebuild as cbuild,\n  aws_events as events,\n  aws_events_targets as events_targets,\n} from 'aws-cdk-lib';\nimport { Construct } from 'constructs';\nimport { Shellable, ShellableProps } from './shellable';\n\n\nexport interface CanaryProps extends ShellableProps {\n  /**\n   * Rate at which to run the canary test.\n   *\n   * @default every 1 minute\n   */\n  schedule: events.Schedule;\n}\n\n/**\n * Schedules a script to run periodically in CodeBuild and exposes an alarm\n * for failures. Ideal for running 'canary' scripts.\n *\n * If not explicitly defined in `environmentVariables`, IS_CANARY is set to \"true\".\n */\nexport class Canary extends Construct {\n  public readonly alarm: cloudwatch.IAlarm;\n  public readonly project: cbuild.IProject;\n\n  constructor(scope: Construct, id: string, props: CanaryProps) {\n    super(scope, id);\n\n    const env = props.environment || { };\n    if (!('IS_CANARY' in env)) {\n      env.IS_CANARY = 'true';\n    }\n\n    const shellable = new Shellable(this, 'Shellable', {\n      ...props,\n      environment: env,\n    });\n\n    new events.Rule(this, 'Schedule', {\n      schedule: props.schedule || events.Schedule.expression('rate(1 minute)'),\n      targets: [new events_targets.CodeBuildProject(shellable.project)],\n    });\n\n    this.alarm = shellable.alarm;\n    this.project = shellable.project;\n  }\n}\n"
  },
  {
    "path": "lib/change-control-lambda/disable-transition.ts",
    "content": "// eslint-disable-next-line import/no-extraneous-dependencies\n\n\n// eslint-disable-next-line import/no-extraneous-dependencies\nimport { CodePipeline } from '@aws-sdk/client-codepipeline';\nconst pipeline = new CodePipeline();\n\n/**\n * Disables a CodePipeline transition into a given stage.\n * @param pipelineName the name of the pipeline on which a transition will be disabled.\n * @param stageName    the name of the stage into which a transition will be disabled.\n * @param reason       the reason to tag on the disabled transition\n */\nexport async function disableTransition(pipelineName: string, stageName: string, reason: string): Promise<void> {\n  // Make sure the reason contains no illegal characters, and isn't too long\n  // See https://docs.aws.amazon.com/codepipeline/latest/APIReference/API_DisableStageTransition.html\n  reason = reason.replace(/[^a-zA-Z0-9!@ \\(\\)\\.\\*\\?\\-]/g, '-').slice(0, 300);\n  await pipeline.disableStageTransition({\n    pipelineName,\n    reason,\n    stageName,\n    transitionType: 'Inbound',\n  });\n}\n\n/**\n * Enables a CodePipeline transition into a given stage.\n * @param pipelineName the name of the pipeline on which a transition will be enabled.\n * @param stageName    the name of the stage into which a transition will be enabled.\n */\nexport async function enableTransition(pipelineName: string, stageName: string): Promise<void> {\n  await pipeline.enableStageTransition({\n    pipelineName,\n    stageName,\n    transitionType: 'Inbound',\n  });\n}\n"
  },
  {
    "path": "lib/change-control-lambda/index.ts",
    "content": "// eslint-disable-next-line import/no-extraneous-dependencies\n\n\n// eslint-disable-next-line import/no-extraneous-dependencies\nimport { S3 } from '@aws-sdk/client-s3';\nimport { disableTransition, enableTransition } from './disable-transition';\nimport { shouldBlockPipeline } from './time-window';\n\n// tslint:disable:no-console\nconst s3 = new S3();\n\n/**\n * Handler for a lambda function that can be called periodically in order to enforce Change Control calendars. It\n * expects the following environment variables to be available:\n *\n * CHANGE_CONTROL_BUCKET_NAME - the name of the S3 Bucket containing the change control calendar\n * CHANGE_CONTROL_OBJECT_KEY  - the key in which the change control calendar is at in CHANGE_CONTROL_BUCKET_NAME\n * PIPELINE_NAME              - the name of the pipeline in which promotions will be managed\n * STAGE_NAME                 - the name of the stage into which transitions are managed\n */\nexport async function handler(/*_event: any, _context: any*/) {\n  const bucketName = env('CHANGE_CONTROL_BUCKET_NAME');\n  const objectKey = env('CHANGE_CONTROL_OBJECT_KEY');\n  const stageName = env('STAGE_NAME');\n  const pipelineName = env('PIPELINE_NAME');\n\n  console.log(`CHANGE_CONTROL_BUCKET_NAME: ${bucketName}`);\n  console.log(`CHANGE_CONTROL_OBJECT_KEY:  ${bucketName}`);\n  console.log(`STAGE_NAME:                 ${bucketName}`);\n  console.log(`PIPELINE_NAME:              ${bucketName}`);\n\n  try {\n    const icsData = await tryGetCalendarData(bucketName, objectKey);\n    const blockingEvent = shouldBlockPipeline(icsData, new Date());\n    if (blockingEvent) {\n      console.log(`Disabling transition into ${pipelineName}.${stageName} with reason ${blockingEvent.summary}`);\n      await disableTransition(pipelineName, stageName, blockingEvent.summary);\n    } else {\n      console.log(`Enabling transition into ${pipelineName}.${stageName}`);\n      await enableTransition(pipelineName, stageName);\n    }\n    console.log('All Done!');\n  } catch (e: any) {\n    console.log(`Error: ${e.message} - ${e.stack}`);\n    throw e;\n  }\n}\n\nfunction env(name: string) {\n  const x = process.env[name];\n  if (x === undefined) {\n    throw new Error(`Environment variable \"${name}\" is required`);\n  }\n  return x;\n}\n\nasync function tryGetCalendarData(Bucket: string, Key: string) {\n  try {\n    const icsFile = await s3.getObject({ Bucket, Key });\n    console.log(`Calendar object version ID: ${icsFile.VersionId || '<unversioned>'}`);\n    return icsFile.Body!.toString();\n  } catch (e: any) {\n    // If the bucket or key don't exist, default to closed all the time!\n    if (e.code === 'NoSuchBucket' || e.code === 'NoSuchKey') {\n      console.log(`Calendar object could not be found (${e.message}), defaulting to closed.`);\n      return `\nBEGIN:VCALENDAR\nVERSION:2.0\nPRODID:-//Events Calendar//iCal4j 1.0//EN\nBEGIN:VEVENT\nDTSTAMP:20190215T095737Z\nDTSTART:19700101T000000Z\nDTEND:99991231T235959Z\nSUMMARY:No change control calendar was found in s3://${Bucket}/${Key} !\nEND:VEVENT\nEND:VCALENDAR\n      `;\n    }\n    throw e;\n  }\n}\n"
  },
  {
    "path": "lib/change-control-lambda/time-window.ts",
    "content": "// eslint-disable-next-line import/no-extraneous-dependencies\nimport { RRule } from 'rrule';\n// eslint-disable-next-line @typescript-eslint/no-require-imports,import/no-extraneous-dependencies\nconst ical = require('node-ical');\n\n/**\n * A calendar event describing a \"blocked\" time window.\n */\nexport interface CalendarEvent {\n  /** The description of the event */\n  summary: string;\n  /** The time at which the block starts */\n  start: Date;\n  /** The time at which the block ends */\n  end: Date;\n  /** The time at which the event was last modified. */\n  dtstamp?: Date;\n  /** The type of a calendar event */\n  type: 'VEVENT' | string;\n  /** Parameters to the event, if any. */\n  params?: any[];\n  /** The type of the boundaries for the event */\n  datetype: 'date-time';\n  /** A recurrence rule for the event. */\n  rrule?: RRule;\n}\ntype Events = { [uuid: string]: CalendarEvent };\n\n/**\n * Evaluates whether a deployment pipeline should have promotions suspended due to the imminent start of a blocked\n * time window.\n *\n * @param ical is an iCal document that describes \"blocked\" time windows (there needs to be an event only for times\n *             during which promotions should not happen).\n * @param now  is the reference time considered when assessing the need to block or not.\n * @param advanceMarginSec how many seconds from `now` should be free of any \"blocked\" time window for the pipeline to\n *             not be blocked (defaults to 1 hour).\n *\n * @returns the events that represent the blocked time, or `undefined` if `now` is not \"blocked\".\n */\nexport function shouldBlockPipeline(icalData: string | Buffer, now = new Date(), advanceMarginSec = 3600): CalendarEvent | undefined {\n  validateTz();\n  const events: Events = ical.parseICS(icalData.toString('utf8'));\n  const blocks = containingEventsWithMargin(events, now, advanceMarginSec);\n  return blocks.length > 0 ? blocks[0] : undefined;\n}\n\n/**\n * A function to build a CalendarEvent given a start date and a duration.\n *\n * @param start a start date for the event\n * @param duration a duration for the event in milliseconds\n * @param summary a summary to apply to the event\n */\nfunction buildEventForDuration(start: Date, duration: number, summary: string): CalendarEvent {\n  const end = new Date(start.getTime() + duration);\n  return {\n    summary,\n    start,\n    end,\n    datetype: 'date-time',\n    type: 'VEVENT',\n  };\n}\n\n/**\n * If the event is not recurring (i.e. event.rrule is null or undefined), then\n * the event will be returned.\n *\n * If the event is recurring, this method calculates the recurring events surrounding\n * the provided date. If the date provided is equal to the start of an event,\n * the event for that date and the following event will be returend. If\n * CalendarEvent.rrule is not null, then the event is considered recurring.\n *\n * @param event a calendar event.\n * @param date the date for which the previous and next event should be returned.\n */\nfunction flattenEvent(event: CalendarEvent, date: Date): CalendarEvent[] {\n  if (event.rrule) {\n    const events: CalendarEvent[] = [];\n\n    // Calculate the duration of initial event in the recurring series.\n    const duration = new Date(event.end).getTime() - new Date(event.start).getTime();\n\n    // Obtain the start date of the most recent event in the series, inclusive of\n    // 'date' and calculate a new event based on the duration of the initial.\n    const previousEventStart = event.rrule.before(date, true);\n    if (previousEventStart) {\n      events.push(buildEventForDuration(previousEventStart, duration, event.summary));\n    }\n\n    // Obtain the start date of the next event in the series, exclusive of\n    // 'date' and calculate a new event based on the duration of the initial.\n    const nextEventStart = event.rrule.after(date, false);\n    if (nextEventStart) {\n      events.push(buildEventForDuration(nextEventStart, duration, event.summary));\n    }\n\n    return events;\n  } else {\n    return [event];\n  }\n}\n\nfunction containingEventsWithMargin(events: Events, date: Date, advanceMarginSec: number): CalendarEvent[] {\n  const bufferedDate = new Date(date.getTime() + advanceMarginSec * 1_000);\n\n  return Object.values(events)\n    .filter(e => e.type === 'VEVENT')\n    .reduce((arr, e) => {\n      arr.push(...flattenEvent(e, date));\n      return arr;\n    }, [] as CalendarEvent[])\n    .filter(e => overlaps(e, { start: date, end: bufferedDate }));\n}\n\n/**\n * Checks whether an event occurs within a specified time period, which should match the following:\n * |------------------<=========LEFT=========>------------------------->\n *                         <WITHIN LEFT>\n *            <OVERLAP AT START>\n *                                      <OVERLAP AT END>\n *               <===COMPLETELY INCLUDES LEFT=====>\n * |------------------<=========LEFT=========>------------------------->\n *\n * @param left  the first time window.\n * @param right the second time window.\n *\n * @returns true if `left` and `right` overlap\n */\nfunction overlaps(left: { start: Date; end: Date }, right: { start: Date; end: Date }): boolean {\n  // Neutering out the milliseconds portions, so they don't interfere\n  [left.start, left.end, right.start, right.end].forEach(d => d.setMilliseconds(0));\n\n  return isBetween(right.start, left.start, left.end)\n    || isBetween(right.end, left.start, left.end)\n    || isBetween(left.start, right.start, right.end)\n    || isBetween(left.end, right.start, right.end);\n}\n\nfunction isBetween(date: Date, left: Date, right: Date): boolean {\n  return date >= left && date <= right;\n}\n\nfunction validateTz() {\n  if (new Date().getTimezoneOffset() !== 0) {\n    throw new Error('Because of a bug in \"node-ical\", this module can only be used when the system time zone is set to UTC. Run this command again with \"TZ=UTC\"');\n  }\n}\n"
  },
  {
    "path": "lib/change-controller.ts",
    "content": "import * as path from 'path';\nimport {\n  CfnOutput, Duration, RemovalPolicy,\n  aws_cloudwatch as cloudwatch,\n  aws_codepipeline as cp,\n  aws_events as events,\n  aws_events_targets as events_targets,\n  aws_iam as iam,\n  aws_lambda as lambda,\n  aws_s3 as s3,\n  aws_s3_notifications as s3_notifications,\n  aws_lambda_nodejs as nodejs,\n} from 'aws-cdk-lib';\nimport { Construct } from 'constructs';\n\nexport interface ChangeControllerProps {\n  /**\n   * The bucket in which the ChangeControl iCal document will be stored.\n   *\n   * @default a new versioned bucket will be provisioned.\n   */\n  changeControlBucket?: s3.IBucket;\n\n  /**\n   * The key in which the iCal fille will be stored.\n   *\n   * @default 'change-control.ical'\n   */\n  changeControlObjectKey?: string;\n\n  /**\n   * Name of the stage\n   */\n  pipelineStage: cp.IStage;\n\n  /**\n   * Schedule to run the change controller on\n   *\n   * @default once every 15 minutes\n   */\n  schedule?: events.Schedule;\n\n  /**\n   * Whether to create outputs to inform of the S3 bucket name and keys where the change control calendar should be\n   * stored.\n   *\n   * @defaults true\n   */\n  createOutputs?: boolean;\n}\n\n/**\n * Controls enabling and disabling a CodePipeline promotion into a particular stage based on \"blocking\" windows that are\n * configured in an iCal document stored in an S3 bucket. If the document is not present or the bucket does not exist,\n * the transition will be disabled.\n */\nexport class ChangeController extends Construct {\n  /**\n   * The alarm that will fire in case the change controller has failed.\n   */\n  public readonly failureAlarm: cloudwatch.Alarm;\n\n  constructor(scope: Construct, id: string, props: ChangeControllerProps) {\n    super(scope, id);\n\n    let changeControlBucket = props.changeControlBucket;\n    let ownBucket: s3.Bucket | undefined;\n\n    if (!changeControlBucket) {\n      changeControlBucket = ownBucket = new s3.Bucket(this, 'Calendar', {\n        removalPolicy: RemovalPolicy.DESTROY,\n        versioned: true,\n      });\n    }\n\n    // const changeControlBucket = props.changeControlBucket || new s3.Bucket(this, 'Bucket', { versioned: true });\n    const changeControlObjectKey = props.changeControlObjectKey || 'change-control.ics';\n\n    const fn = new nodejs.NodejsFunction(this, 'Function', {\n      description: `Enforces a Change Control Policy into CodePipeline's ${props.pipelineStage.stageName} stage`,\n      entry: path.join(__dirname, 'change-control-lambda', 'index.ts'),\n      runtime: lambda.Runtime.NODEJS_20_X,\n      environment: {\n        // CAPITAL punishment 👌🏻\n        CHANGE_CONTROL_BUCKET_NAME: changeControlBucket.bucketName,\n        CHANGE_CONTROL_OBJECT_KEY: changeControlObjectKey,\n        PIPELINE_NAME: props.pipelineStage.pipeline.pipelineName,\n        STAGE_NAME: props.pipelineStage.stageName,\n      },\n      timeout: Duration.seconds(300),\n    });\n\n    fn.addToRolePolicy(new iam.PolicyStatement({\n      resources: [`${props.pipelineStage.pipeline.pipelineArn}/${props.pipelineStage.stageName}`],\n      actions: ['codepipeline:EnableStageTransition', 'codepipeline:DisableStageTransition'],\n    }));\n\n    changeControlBucket.grantRead(fn, props.changeControlObjectKey);\n\n    if (ownBucket) {\n      ownBucket.addObjectCreatedNotification(new s3_notifications.LambdaDestination(fn), {\n        prefix: changeControlObjectKey,\n      });\n    }\n\n    this.failureAlarm = new cloudwatch.Alarm(this, 'Failed', {\n      metric: fn.metricErrors({\n        period: Duration.seconds(300),\n      }),\n      threshold: 1,\n      datapointsToAlarm: 1,\n      evaluationPeriods: 1,\n    });\n\n    const schedule = props.schedule || events.Schedule.expression('rate(15 minutes)');\n\n    // Run this on a schedule\n    new events.Rule(this, 'Rule', {\n      // tslint:disable-next-line:max-line-length\n      description: `Run the change controller for promotions into ${props.pipelineStage.pipeline.pipelineName}'s ${props.pipelineStage.stageName} on a ${schedule} schedule`,\n      schedule,\n      targets: [new events_targets.LambdaFunction(fn)],\n    });\n\n    if (props.createOutputs !== false) {\n      new CfnOutput(this, 'ChangeControlBucketKey', {\n        value: changeControlObjectKey,\n      });\n\n      new CfnOutput(this, 'ChangeControlBucket', {\n        value: changeControlBucket.bucketName,\n      });\n    }\n  }\n}\n"
  },
  {
    "path": "lib/chime-notifier/chime-notifier.ts",
    "content": "import * as path from 'path';\nimport {\n  Duration,\n  aws_codepipeline as cpipeline,\n  aws_iam as iam,\n  aws_lambda as lambda,\n  aws_events as events,\n  aws_events_targets as events_targets,\n} from 'aws-cdk-lib';\nimport { Construct } from 'constructs';\n\nexport interface ChimeNotifierOptions {\n  /**\n   * Chime webhook URLs to send to\n   */\n  readonly webhookUrls: string[];\n\n  /**\n   * The message to send to the channels.\n   *\n   * Can use the following placeholders:\n   *\n   * - $PIPELINE: the name of the pipeline\n   * - $REVISION: description of the failing revision\n   * - $ACTION: name of failing action\n   * - $URL: link to failing action details\n   *\n   * @default - A default message\n   */\n  readonly message?: string;\n}\n\n/**\n * Properties for a ChimeNotifier\n */\nexport interface ChimeNotifierProps extends ChimeNotifierOptions {\n  /**\n   * Code Pipeline to listen to\n   */\n  readonly pipeline: cpipeline.IPipeline;\n}\n\n/**\n * Send a message to a Chime room when a pipeline fails\n */\nexport class ChimeNotifier extends Construct {\n  constructor(scope: Construct, id: string, props: ChimeNotifierProps) {\n    super(scope, id);\n\n    const message = props.message ?? '/md @All Pipeline **$PIPELINE** failed in action **$ACTION**. Latest change:\\n```\\n$REVISION\\n```\\n([Failure details]($URL))';\n\n    if (props.webhookUrls.length > 0) {\n      // Reuse the same Lambda code for all pipelines, we will move the Lambda parameterizations into\n      // the CloudWatch Event Input.\n      const notifierLambda = new lambda.SingletonFunction(this, 'Default', {\n        handler: 'index.handler',\n        uuid: '0f4a3ee0-692e-4249-932f-a46a833886d8',\n        code: lambda.Code.fromAsset(path.join(__dirname, 'handler')),\n        runtime: lambda.Runtime.NODEJS_20_X,\n        timeout: Duration.minutes(5),\n      });\n\n      notifierLambda.addToRolePolicy(new iam.PolicyStatement({\n        actions: ['codepipeline:GetPipelineExecution', 'codepipeline:ListActionExecutions'],\n        resources: [props.pipeline.pipelineArn],\n      }));\n\n      props.pipeline.onStateChange(`${this.node.path}-ChimeNotifier`, {\n        target: new events_targets.LambdaFunction(notifierLambda, {\n          event: events.RuleTargetInput.fromObject({\n            // Add parameters\n            message,\n            webhookUrls: props.webhookUrls,\n            // Copy over \"detail\" field\n            detail: events.EventField.fromPath('$.detail'),\n          }),\n        }),\n        eventPattern: {\n          detail: {\n            state: ['FAILED'],\n          },\n        },\n      });\n    }\n  }\n}\n"
  },
  {
    "path": "lib/chime-notifier/handler/notifier-handler.ts",
    "content": "import * as https from 'https';\n// eslint-disable-next-line import/no-extraneous-dependencies\nimport { ArtifactRevision, CodePipeline } from '@aws-sdk/client-codepipeline';\n\n\n// export for tests\nexport const codePipeline = new CodePipeline();\n\n/**\n * Lambda handler for the codepipeline state change events\n *\n * {\n *     \"version\": \"0\",\n *     \"id\": event_Id,\n *     \"detail-type\": \"CodePipeline Pipeline Execution State Change\",\n *     \"source\": \"aws.codepipeline\",\n *     \"account\": Pipeline_Account,\n *     \"time\": TimeStamp,\n *     \"region\": \"us-east-1\",\n *     \"resources\": [\n *         \"arn:aws:codepipeline:us-east-1:account_ID:myPipeline\"\n *     ],\n *     \"detail\": {\n *         \"pipeline\": \"myPipeline\",\n *         \"version\": \"1\",\n *         \"state\": \"STARTED\",\n *         \"execution-id\": execution_Id\n *     }\n * }\n */\nexport async function handler(event: any) {\n  // Log the event so we can have a look in CloudWatch logs\n  process.stdout.write(`${JSON.stringify(event)}\\n`);\n\n  const webhookUrls: string[] = event.webhookUrls || [];\n  if (webhookUrls.length === 0) { throw new Error(\"Expected event field 'webhookUrls'\"); }\n\n  const messageTemplate = event.message;\n  if (!messageTemplate) { throw new Error(\"Expected event field 'message'\"); }\n\n  const details = event.detail || {};\n  const pipelineName = details.pipeline;\n  const pipelineExecutionId = details['execution-id'];\n\n  if (!pipelineName || !pipelineExecutionId) {\n    process.stderr.write('Malformed event!\\n');\n    return;\n  }\n\n  // Describe the revision that caused the pipeline to fail\n  const response = await codePipeline.getPipelineExecution({ pipelineName, pipelineExecutionId });\n  process.stdout.write(`${JSON.stringify(response)}\\n`);\n  const firstArtifact: ArtifactRevision | undefined = (response.pipelineExecution?.artifactRevisions ?? [])[0];\n  const revisionSummary = firstArtifact?.revisionSummary ?? firstArtifact?.revisionId ?? `execution ${pipelineExecutionId}`;\n\n  // Find the action that caused the pipeline to fail (no pagination for now)\n  const actionResponse = await codePipeline.listActionExecutions({ pipelineName, filter: { pipelineExecutionId } });\n  process.stdout.write(`${JSON.stringify(actionResponse)}\\n`);\n  const failingActionDetails = actionResponse.actionExecutionDetails?.find(d => d.status === 'Failed');\n  const failingAction = failingActionDetails?.actionName || 'UNKNOWN';\n  const failureUrl = failingActionDetails?.output?.executionResult?.externalExecutionUrl || '???';\n\n  const message = messageTemplate\n    .replace(/\\$PIPELINE/g, pipelineName)\n    .replace(/\\$REVISION/g, revisionSummary)\n    .replace(/\\$ACTION/g, failingAction)\n    .replace(/\\$URL/g, failureUrl);\n\n  // Post the failure to all given Chime webhook URLs\n  await Promise.all(webhookUrls.map(url => sendChimeNotification(url, message)));\n}\n\nasync function sendChimeNotification(url: string, message: string) {\n  return new Promise((ok, ko) => {\n    const req = https.request(url, {\n      method: 'POST',\n      headers: {\n        'Content-Type': 'application/json',\n      },\n    }, (res) => {\n      if (res.statusCode !== 200) {\n        ko(new Error(`Server responded with ${res.statusCode}: ${JSON.stringify(res.headers)}`));\n      }\n\n      res.setEncoding('utf8');\n      res.on('data', () => { /* gobble gobble and ignore */ });\n      res.on('error', ko);\n      res.on('end', ok);\n    });\n\n    req.on('error', ko);\n    req.write(JSON.stringify({ Content: message }));\n    req.end();\n  });\n}\n"
  },
  {
    "path": "lib/chime-notifier/index.ts",
    "content": "export * from './chime-notifier';"
  },
  {
    "path": "lib/code-signing/certificate-signing-request.ts",
    "content": "import * as path from 'path';\nimport {\n  Duration,\n  CustomResource,\n  aws_lambda as lambda,\n  aws_s3 as s3,\n  RemovalPolicy,\n} from 'aws-cdk-lib';\nimport { Platform } from 'aws-cdk-lib/aws-ecr-assets';\nimport { Construct } from 'constructs';\nimport { RsaPrivateKeySecret } from './private-key';\nimport { hashFileOrDirectory } from '../util';\n\n\nexport interface CertificateSigningRequestProps {\n  /**\n   * The RSA Private Key to use for this CSR.\n   */\n  privateKey: RsaPrivateKeySecret;\n  /**\n   * The Distinguished Name for this CSR.\n   */\n  dn: DistinguishedName;\n  /**\n   * The key usage requests for this CSR.\n   *\n   * @example critical,digitalSignature\n   */\n  keyUsage: string;\n  /**\n   * The extended key usage requests for this CSR.\n   *\n   * @example critical,codeSigning\n   */\n  extendedKeyUsage?: string;\n}\n\n/**\n * Creates a Certificate Signing Request (CSR), which will allow a Certificate Authority to provide a signed certificate\n * that uses the specified RSA Private Key. A CSR document can usually be shared publicly, however it must be noted that\n * the information provided in the ``dn`` fields, information about the public key and the intended ley usage will be\n * readable by anyone who can access the CSR.\n *\n * @see https://www.openssl.org/docs/manmaster/man1/req.html\n */\nexport class CertificateSigningRequest extends Construct {\n  /**\n   * The S3 URL to the CSR document.\n   */\n  public readonly pemRequest: string;\n\n  /**\n   * The S3 URL to a self-signed certificate that corresponds with this CSR.\n   */\n  public readonly selfSignedPemCertificate: string;\n\n  /**\n   * The S3 bucket where the self-signed certificate is stored.\n   */\n  public readonly outputBucket: s3.IBucket;\n\n  constructor(parent: Construct, id: string, props: CertificateSigningRequestProps) {\n    super(parent, id);\n\n    const codeLocation = path.resolve(__dirname, '..', 'custom-resource-handlers');\n    // change the resource id to force deleting existing function, and create new one, as Package type change is not allowed\n    const customResource = new lambda.SingletonFunction(this, 'ResourceHandlerV2', {\n      // change the uuid to force deleting existing function, and create new one, as Package type change is not allowed\n      uuid: 'F0641C15-2BC0-481E-94BA-7BF43F8BBDE3',\n      lambdaPurpose: 'CreateCSR',\n      description: 'Creates a Certificate Signing Request document for an x509 certificate',\n      architecture: lambda.Architecture.X86_64,\n      runtime: lambda.Runtime.FROM_IMAGE,\n      handler: lambda.Handler.FROM_IMAGE,\n      code: new lambda.AssetImageCode(codeLocation, {\n        file: 'Dockerfile',\n        platform: Platform.LINUX_AMD64,\n        buildArgs: {\n          FUN_SRC_DIR: 'certificate-signing-request',\n        },\n        invalidation: {\n          buildArgs: true,\n        },\n      }),\n      timeout: Duration.seconds(300),\n    });\n\n    const outputBucket = new s3.Bucket(this, 'Bucket', {\n      // CSRs can be easily re-created if lost or corrupt, so we can let those get to a black hole, no worries.\n      autoDeleteObjects: true,\n      removalPolicy: RemovalPolicy.DESTROY,\n      encryption: s3.BucketEncryption.S3_MANAGED,\n      enforceSSL: true,\n    });\n    outputBucket.grantReadWrite(customResource);\n    this.outputBucket = outputBucket;\n\n    //change the custom resource id to force recreating new one because the change of the underneath lambda function\n    const csr = new CustomResource(this, 'ResourceV2', {\n      serviceToken: customResource.functionArn,\n      resourceType: 'Custom::CertificateSigningRequest',\n      pascalCaseProperties: true,\n      properties: {\n        resourceVersion: hashFileOrDirectory(codeLocation),\n        // Private key\n        privateKeySecretId: props.privateKey.secretArn,\n        // Distinguished name\n        dnCommonName: props.dn.commonName,\n        dnCountry: props.dn.country,\n        dnStateOrProvince: props.dn.stateOrProvince,\n        dnLocality: props.dn.locality,\n        dnOrganizationName: props.dn.organizationName,\n        dnOrganizationalUnitName: props.dn.organizationalUnitName,\n        dnEmailAddress: props.dn.emailAddress,\n        // Key Usage\n        extendedKeyUsage: props.extendedKeyUsage || '',\n        keyUsage: props.keyUsage,\n        // Ouput location\n        outputBucket: outputBucket.bucketName,\n      },\n    });\n    if (customResource.role) {\n      // Make sure the permissions are all good before proceeding\n      csr.node.addDependency(customResource.role);\n      props.privateKey.grantGetSecretValue(customResource.role);\n    }\n\n    this.pemRequest = csr.getAtt('CSR').toString();\n    this.selfSignedPemCertificate = csr.getAtt('SelfSignedCertificate').toString();\n  }\n}\n\n/**\n * Fields that compose the distinguished name of a certificate\n */\nexport interface DistinguishedName {\n  /** The Common Name (CN) */\n  commonName: string;\n  /** The email address (emailAddress) */\n  emailAddress: string;\n\n  /** The Country (C) */\n  country: string;\n  /** The State or Province (ST) */\n  stateOrProvince: string;\n  /** The locality (L) */\n  locality: string;\n\n  /** The organization name (O) */\n  organizationName: string;\n  /** The organizational unit name (OU) */\n  organizationalUnitName: string;\n}\n"
  },
  {
    "path": "lib/code-signing/code-signing-certificate.ts",
    "content": "import {\n  CfnOutput, RemovalPolicy, Stack,\n  aws_iam as iam,\n  aws_kms as kms,\n  aws_s3 as s3,\n  aws_secretsmanager as secretsManager,\n  aws_ssm as ssm,\n  ArnFormat,\n} from 'aws-cdk-lib';\nimport { Construct, IConstruct } from 'constructs';\nimport { CertificateSigningRequest, DistinguishedName } from './certificate-signing-request';\nimport { RsaPrivateKeySecret } from './private-key';\nimport { ICredentialPair } from '../credential-pair';\nimport * as permissions from '../permissions';\n\n\nexport { DistinguishedName } from './certificate-signing-request';\n\ninterface CodeSigningCertificateProps {\n  /**\n   * The number of bits to compose the modulus of the generated private key for this certificate.\n   *\n   * @default 2048\n   */\n  rsaKeySize?: number;\n\n  /**\n   * The KMS CMK to use for encrypting the Private Key secret.\n   * @default A new KMS key will be allocated for you\n   */\n  secretEncryptionKey?: kms.IKey;\n\n  /**\n   * The PEM-encoded certificate that was signed by the relevant authority.\n   *\n   * @default If a certificate is not provided, a self-signed certificate will\n   * be generated and a CSR (certificate signing request) will by available in\n   * the stack output.\n   */\n  pemCertificate?: string;\n\n  /**\n   * Whether a CSR should be generated, even if the certificate is provided.\n   * This can be useful if one wants to renew a certificate that is close to\n   * expiry without generating a new private key (for example, to avoid breaking\n   * clients that make use of certificate pinning).\n   *\n   * @default false\n   */\n  forceCertificateSigningRequest?: boolean;\n\n  /**\n   * When enabled, the Private Key secret will have a DeletionPolicy of\n   * \"RETAIN\", making sure the Private Key is not inadvertently destroyed.\n   *\n   * @default true\n   */\n  retainPrivateKey?: boolean;\n\n  /**\n   * The Distinguished Name for this CSR.\n   */\n  distinguishedName: DistinguishedName;\n\n  /**\n   * Base names for the private key and output SSM parameter\n   *\n   * @default - Automatically generated\n   */\n  readonly baseName?: string;\n}\n\nexport interface ICodeSigningCertificate extends IConstruct, ICredentialPair {\n  /**\n   * The S3 bucket where the self-signed certificate is stored.\n   */\n  readonly certificateBucket?: s3.IBucket;\n\n  /**\n   * Grant the IAM principal permissions to read the private key and\n   * certificate.\n   */\n  grantDecrypt(principal?: iam.IPrincipal): void;\n}\n\n/**\n * A Code-Signing certificate, that will use a private key that is generated by a Lambda function. The Certificate will\n * not be usable until the ``pemCertificate`` value has been provided. A typical workflow to use this Construct would be:\n *\n * 1. Add an instance of the construct to your app, without providing the ``pemCertificate`` property\n * 2. Deploy the stack to provision a Private Key and obtain the CSR (you can surface it using a Output, for example)\n * 3. Submit the CSR to your Certificate Authority of choice.\n * 4. Populate the ``pemCertificate`` property with the PEM-encoded certificate provided by your CA of coice.\n * 5. Re-deploy the stack so make the certificate usable\n *\n * In order to renew the certificate, if you do not wish to retain the same private key (your clients do not rely on\n * public key pinning), simply add a new instance of the construct to your app and follow the process listed above. If\n * you wish to retain the private key, you can set ``forceCertificateSigningRequest`` to ``true`` in order to obtain a\n * new CSR document.\n */\nexport class CodeSigningCertificate extends Construct implements ICodeSigningCertificate {\n  /**\n   * The AWS Secrets Manager secret that holds the private key for this CSC\n   */\n  public readonly credential: secretsManager.ISecret;\n\n  /**\n   * The AWS SSM Parameter that holds the certificate for this CSC.\n   */\n  public readonly principal: ssm.IStringParameter;\n\n  /**\n   * The S3 bucket where the self-signed certificate is stored.\n   */\n  public readonly certificateBucket?: s3.IBucket;\n\n  constructor(parent: Construct, id: string, props: CodeSigningCertificateProps) {\n    super(parent, id);\n\n    // The construct path of this construct with respect to the containing stack, without any leading /\n    const stack = Stack.of(this);\n    const baseName = props.baseName ?? `${stack.stackName}${this.node.path.substr(stack.node.path.length)}`;\n\n    const privateKey = new RsaPrivateKeySecret(this, 'RSAPrivateKey', {\n      removalPolicy: props.retainPrivateKey === false ? RemovalPolicy.DESTROY : RemovalPolicy.RETAIN,\n      description: 'The PEM-encoded private key of the x509 Code-Signing Certificate',\n      keySize: props.rsaKeySize || 2048,\n      secretEncryptionKey: props.secretEncryptionKey,\n      // rename the secret name, as since this resource will be deleted and create a new resource,\n      // so the new resource will be created before the old one got deleted, and so we will not be able\n      // to create a new secrete with the same name, and even we could not reuse it, as it will be deleted once\n      // the old resource got deleted.\n      secretName: `${baseName}/RSAPrivateKeyV2`,\n    });\n\n    // this change to keep the permissions to access the old secret for the custom resource Lambda function role, so it can\n    // delete the old secret.\n    const oldSecretArnLike = Stack.of(this).formatArn({\n      service: 'secretsmanager',\n      resource: 'secret',\n      arnFormat: ArnFormat.COLON_RESOURCE_NAME,\n      // The ARN of a secret has \"-\" followed by 6 random characters appended at the end\n      resourceName: `${baseName}/RSAPrivateKey-??????`,\n    });\n    privateKey.customResource.addToRolePolicy(new iam.PolicyStatement({\n      actions: [\n        'secretsmanager:CreateSecret',\n        'secretsmanager:DeleteSecret',\n        'secretsmanager:UpdateSecret',\n      ],\n      resources: [oldSecretArnLike],\n    }));\n\n    if (props.secretEncryptionKey) {\n      props.secretEncryptionKey.addToResourcePolicy(new iam.PolicyStatement({\n        // description: `Allow use via AWS Secrets Manager by CustomResource handler ${customResource.functionName}`,\n        principals: [new iam.ArnPrincipal(privateKey.customResource.role!.roleArn)],\n        actions: ['kms:Decrypt', 'kms:GenerateDataKey'],\n        resources: ['*'],\n        conditions: {\n          StringEquals: {\n            'kms:ViaService': `secretsmanager.${Stack.of(this).region}.amazonaws.com`,\n          },\n          ArnLike: {\n            'kms:EncryptionContext:SecretARN': oldSecretArnLike,\n          },\n        },\n      }));\n    }\n\n    this.credential = secretsManager.Secret.fromSecretAttributes(this, 'Credential', {\n      encryptionKey: props.secretEncryptionKey,\n      secretCompleteArn: privateKey.secretArn,\n    });\n\n    let certificate = props.pemCertificate;\n\n    if (!certificate || props.forceCertificateSigningRequest) {\n      const csr: CertificateSigningRequest = privateKey.newCertificateSigningRequest('CertificateSigningRequest',\n        props.distinguishedName,\n        'critical,digitalSignature',\n        'critical,codeSigning');\n\n      this.certificateBucket = csr.outputBucket;\n\n      new CfnOutput(this, 'CSR', {\n        description: 'A PEM-encoded Certificate Signing Request for a Code-Signing Certificate',\n        value: csr.pemRequest,\n      });\n\n      if (!certificate) {\n        certificate = csr.selfSignedPemCertificate;\n      }\n    }\n\n    this.principal = new ssm.StringParameter(this, 'Resource', {\n      description: `A PEM-encoded Code-Signing Certificate (private key in ${privateKey.secretArn})`,\n      parameterName: `/${baseName}/Certificate`,\n      stringValue: certificate!,\n    });\n  }\n\n  /**\n   * Grant the IAM principal permissions to read the private key and\n   * certificate.\n   */\n  public grantDecrypt(principal?: iam.IPrincipal) {\n    if (!principal) { return; }\n\n    permissions.grantSecretRead({\n      keyArn: this.credential.encryptionKey && this.credential.encryptionKey.keyArn,\n      secretArn: this.credential.secretArn,\n    }, principal);\n\n    principal.addToPrincipalPolicy(new iam.PolicyStatement({\n      actions: ['ssm:GetParameter'],\n      resources: [Stack.of(this).formatArn({\n        // TODO: This is a workaround until https://github.com/awslabs/aws-cdk/pull/1726 is released\n        service: 'ssm',\n        resource: `parameter${this.principal.parameterName}`,\n      })],\n    }));\n\n    this.certificateBucket?.grantRead(principal);\n  }\n}\n"
  },
  {
    "path": "lib/code-signing/index.ts",
    "content": "export * from './code-signing-certificate';\n"
  },
  {
    "path": "lib/code-signing/private-key.ts",
    "content": "import * as path from 'path';\nimport {\n  Duration, RemovalPolicy, Stack,\n  ArnFormat, CustomResource,\n  aws_iam as iam,\n  aws_kms as kms,\n  aws_lambda as lambda,\n} from 'aws-cdk-lib';\nimport { Platform } from 'aws-cdk-lib/aws-ecr-assets';\nimport { Construct } from 'constructs';\nimport { CertificateSigningRequest, DistinguishedName } from './certificate-signing-request';\nimport { hashFileOrDirectory } from '../util';\n\n\nexport interface RsaPrivateKeySecretProps {\n  /**\n   * The modulus size of the RSA key that will be generated.\n   *\n   * The NIST publishes a document that provides guidance on how to select an appropriate key size:\n   * @see https://csrc.nist.gov/publications/detail/sp/800-57-part-1/rev-4/final\n   */\n  keySize: number;\n\n  /**\n   * The name of the AWS Secrets Manager entity that will be created to hold the private key.\n   */\n  secretName: string;\n\n  /**\n   * The description to attach to the AWS Secrets Manager entity that will hold the private key.\n   */\n  description?: string;\n\n  /**\n   * The KMS key to be used for encrypting the AWS Secrets Manager entity.\n   *\n   * @default the default KMS key will be used in accordance with AWS Secrets Manager default behavior.\n   */\n  secretEncryptionKey?: kms.IKey;\n\n  /**\n   * The deletion policy to apply on the Private Key secret.\n   *\n   * @default Retain\n   */\n  removalPolicy?: RemovalPolicy;\n}\n\n/**\n * An OpenSSL-generated RSA Private Key. It can for example be used to obtain a Certificate signed by a Certificate\n * Authority through the use of the ``CertificateSigningRequest`` construct (or via the\n * ``#newCertificateSigningRequest``) method.\n */\nexport class RsaPrivateKeySecret extends Construct {\n  /**\n   * The ARN of the secret that holds the private key.\n   */\n  public secretArn: string;\n  public customResource: lambda.SingletonFunction;\n\n  private secretArnLike: string;\n  private masterKey?: kms.IKey;\n\n  constructor(parent: Construct, id: string, props: RsaPrivateKeySecretProps) {\n    super(parent, id);\n\n    const codeLocation = path.resolve(__dirname, '..', 'custom-resource-handlers');\n    // change the resource id to force deleting existing function, and create new one, as Package type change is not allowed\n    this.customResource = new lambda.SingletonFunction(this, 'ResourceHandlerV2', {\n      lambdaPurpose: 'RSAPrivate-Key',\n      // change the uuid to force deleting existing function, and create new one, as Package type change is not allowed\n      uuid: '517D342F-A590-447B-B525-5D06E403A406',\n      description: 'Generates an RSA Private Key and stores it in AWS Secrets Manager',\n      runtime: lambda.Runtime.FROM_IMAGE,\n      handler: lambda.Handler.FROM_IMAGE,\n      code: new lambda.AssetImageCode(codeLocation, {\n        file: 'Dockerfile',\n        platform: Platform.LINUX_AMD64,\n        buildArgs: {\n          FUN_SRC_DIR: 'private-key',\n        },\n        invalidation: {\n          buildArgs: true,\n        },\n      }),\n      timeout: Duration.seconds(300),\n    });\n\n    this.secretArnLike = Stack.of(this).formatArn({\n      service: 'secretsmanager',\n      resource: 'secret',\n      arnFormat: ArnFormat.COLON_RESOURCE_NAME,\n      // The ARN of a secret has \"-\" followed by 6 random characters appended at the end\n      resourceName: `${props.secretName}-??????`,\n    });\n    this.customResource.addToRolePolicy(new iam.PolicyStatement({\n      actions: [\n        'secretsmanager:CreateSecret',\n        'secretsmanager:DeleteSecret',\n        'secretsmanager:UpdateSecret',\n      ],\n      resources: [this.secretArnLike],\n    }));\n\n    if (props.secretEncryptionKey) {\n      props.secretEncryptionKey.addToResourcePolicy(new iam.PolicyStatement({\n        // description: `Allow use via AWS Secrets Manager by CustomResource handler ${customResource.functionName}`,\n        principals: [new iam.ArnPrincipal(this.customResource.role!.roleArn)],\n        actions: ['kms:Decrypt', 'kms:GenerateDataKey'],\n        resources: ['*'],\n        conditions: {\n          StringEquals: {\n            'kms:ViaService': `secretsmanager.${Stack.of(this).region}.amazonaws.com`,\n          },\n          ArnLike: {\n            'kms:EncryptionContext:SecretARN': this.secretArnLike,\n          },\n        },\n      }));\n    }\n\n    //change the custom resource id to force recreating new one because the change of the underneath lambda function\n    const privateKey = new CustomResource(this, 'ResourceV2', {\n      serviceToken: this.customResource.functionArn,\n      resourceType: 'Custom::RsaPrivateKeySecret',\n      pascalCaseProperties: true,\n      properties: {\n        resourceVersion: hashFileOrDirectory(codeLocation),\n        description: props.description,\n        keySize: props.keySize,\n        secretName: props.secretName,\n        kmsKeyId: props.secretEncryptionKey && props.secretEncryptionKey.keyArn,\n      },\n      removalPolicy: props.removalPolicy || RemovalPolicy.RETAIN,\n    });\n    if (this.customResource.role) {\n      privateKey.node.addDependency(this.customResource.role);\n      if (props.secretEncryptionKey) {\n        // Modeling as a separate Policy to evade a dependency cycle (Role -> Key -> Role), as the Key refers to the\n        // role in it's resource policy.\n        privateKey.node.addDependency(new iam.Policy(this, 'GrantLambdaRoleKeyAccess', {\n          roles: [this.customResource.role],\n          statements: [\n            new iam.PolicyStatement({\n              // description: `AWSSecretsManager${props.secretName.replace(/[^0-9A-Za-z]/g, '')}CMK`,\n              actions: ['kms:Decrypt', 'kms:GenerateDataKey'],\n              resources: [props.secretEncryptionKey.keyArn],\n              conditions: {\n                StringEquals: {\n                  'kms:ViaService': `secretsmanager.${Stack.of(this).region}.amazonaws.com`,\n                },\n                StringLike: { 'kms:EncryptionContext:SecretARN': [this.secretArnLike, 'RequestToValidateKeyAccess'] },\n              },\n            }),\n          ],\n        }));\n      }\n    }\n\n    this.masterKey = props.secretEncryptionKey;\n    this.secretArn = privateKey.getAtt('SecretArn').toString();\n  }\n\n  /**\n   * Creates a new CSR resource using this private key.\n   *\n   * @param id               the ID of the construct in the construct tree.\n   * @param dn               the distinguished name to record on the CSR.\n   * @param keyUsage         the intended key usage (for example: \"critical,digitalSignature\")\n   * @param extendedKeyUsage the indended extended key usage, if any (for example: \"critical,digitalSignature\")\n   *\n   * @returns a new ``CertificateSigningRequest`` instance that can be used to access the actual CSR document.\n   */\n  public newCertificateSigningRequest(id: string, dn: DistinguishedName, keyUsage: string, extendedKeyUsage?: string) {\n    return new CertificateSigningRequest(this, id, {\n      privateKey: this,\n      dn,\n      keyUsage,\n      extendedKeyUsage,\n    });\n  }\n\n  /**\n   * Allows a given IAM Role to read the secret value.\n   *\n   * @param grantee the principal to which permissions should be granted.\n   */\n  public grantGetSecretValue(grantee: iam.IPrincipal): void {\n    grantee.addToPrincipalPolicy(new iam.PolicyStatement({\n      actions: ['secretsmanager:GetSecretValue'],\n      resources: [this.secretArn],\n    }));\n    if (this.masterKey) {\n      // Add a key grant since we're using a CMK\n      this.masterKey.addToResourcePolicy(new iam.PolicyStatement({\n        actions: ['kms:Decrypt'],\n        resources: ['*'],\n        principals: [grantee.grantPrincipal],\n        conditions: {\n          StringEquals: {\n            'kms:ViaService': `secretsmanager.${Stack.of(this).region}.amazonaws.com`,\n          },\n          ArnLike: {\n            'kms:EncryptionContext:SecretARN': this.secretArnLike,\n          },\n        },\n      }));\n      grantee.addToPrincipalPolicy(new iam.PolicyStatement({\n        actions: ['kms:Decrypt'],\n        resources: [this.masterKey.keyArn],\n        conditions: {\n          StringEquals: {\n            'kms:ViaService': `secretsmanager.${Stack.of(this).region}.amazonaws.com`,\n          },\n          ArnEquals: {\n            'kms:EncryptionContext:SecretARN': this.secretArn,\n          },\n        },\n      }));\n    }\n  }\n}\n"
  },
  {
    "path": "lib/constants.ts",
    "content": "/**\n * The default superchain image that will be used all across delivlib if no override is supplied.\n */\nexport const DEFAULT_SUPERCHAIN_IMAGE = 'public.ecr.aws/jsii/superchain:1-bookworm-slim-node22';"
  },
  {
    "path": "lib/credential-pair.ts",
    "content": "import {\n  aws_ssm as ssm,\n  aws_secretsmanager as secretsManager,\n} from 'aws-cdk-lib';\n\n\n/**\n * A Credential Pair combines a secret element (the credential) and a public\n * element (the principal). The public element is stored in an SSM Parameter,\n * while the secret element is stored in AWS Secrets Manager.\n *\n * For example, this can be:\n * - A username and a password\n * - A private key and a certificate\n * - An OpenPGP Private key and its public part\n */\nexport interface ICredentialPair {\n  /**\n   * The public part of this credential pair.\n   */\n  readonly principal: ssm.IStringParameter;\n\n  /**\n   * The secret part of this credential pair.\n   */\n  readonly credential: secretsManager.ISecret;\n}\n"
  },
  {
    "path": "lib/custom-resource-handlers/Dockerfile",
    "content": "# Use a NodeJS 20.x runtime\nFROM public.ecr.aws/lambda/nodejs:20-x86_64\n\nARG FUN_SRC_DIR\n\n# install openssel\nRUN dnf install -y openssl\nENV LD_LIBRARY_PATH=\"\"\nRUN openssl version\n\n# remove gnupg2-minimal pacakge, install gnupg2-full package, and then start the gpg-agent\nRUN rpm -e gnupg2-minimal --nodeps && dnf -y install gnupg2-full && gpg-agent --daemon\n\n# copy lambda function source code  to LAMBDA_TASK_ROOT directory\nCOPY bin/${FUN_SRC_DIR}/* /var/task/\n\n# Set the CMD to the function handler\nCMD [ \"index.handler\" ]"
  },
  {
    "path": "lib/custom-resource-handlers/src/_cloud-formation.ts",
    "content": "import * as https from 'https';\nimport * as url from 'url';\nimport * as lambda from './_lambda';\n\nexport type LambdaHandler = (event: Event, context: lambda.Context) => Promise<void>;\nexport type ResourceHandler = (event: Event, context: lambda.Context) => Promise<ResourceAttributes>;\n\n/**\n * Implements a Lambda CloudFormation custom resource handler.\n *\n * @param handleEvent  the handler function that creates, updates and deletes the resource.\n * @param refAttribute the name of the attribute holindg the Physical ID of the resource.\n * @returns a handler function.\n */\nexport function customResourceHandler(handleEvent: ResourceHandler): LambdaHandler {\n  return async (event, context) => {\n    try {\n      // eslint-disable-next-line no-console\n      console.log(`Input event: ${JSON.stringify(event)}`);\n\n      const attributes = await handleEvent(event, context);\n\n      // eslint-disable-next-line no-console\n      console.log(`Attributes: ${JSON.stringify(attributes)}`);\n\n      await exports.sendResponse(event, Status.SUCCESS, attributes.Ref, attributes);\n    } catch (e: any) {\n      // eslint-disable-next-line no-console\n      console.error(e);\n      await exports.sendResponse(event, Status.FAILED, event.PhysicalResourceId, {}, e.message);\n    }\n  };\n}\n\n/**\n * General shape of custom resource attributes.\n */\nexport interface ResourceAttributes {\n  /** The physical reference to this resource instance. */\n  Ref: string;\n\n  /** Other attributes of the resource. */\n  [key: string]: string | undefined;\n}\n\n/**\n * @see https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/crpg-ref-responses.html\n */\nexport function sendResponse(event: Event,\n  status: Status,\n  physicalResourceId: string = event.PhysicalResourceId || event.LogicalResourceId,\n  data: { [name: string]: string | undefined },\n  reason?: string) {\n  const responseBody = JSON.stringify({\n    Data: data,\n    LogicalResourceId: event.LogicalResourceId,\n    PhysicalResourceId: physicalResourceId,\n    Reason: reason,\n    RequestId: event.RequestId,\n    StackId: event.StackId,\n    Status: status,\n  }, null, 2);\n\n  // eslint-disable-next-line no-console\n  console.log(`Response body: ${responseBody}`);\n\n  const parsedUrl = url.parse(event.ResponseURL);\n  const options: https.RequestOptions = {\n    headers: {\n      'content-length': responseBody.length,\n      'content-type': '',\n    },\n    hostname: parsedUrl.hostname,\n    method: 'PUT',\n    path: parsedUrl.path,\n    port: parsedUrl.port || 443,\n  };\n\n  return new Promise<void>((ok, ko) => {\n    // eslint-disable-next-line no-console\n    console.log('Sending response...');\n\n    const req = https.request(options, resp => {\n      // eslint-disable-next-line no-console\n      console.log(`Received HTTP ${resp.statusCode} (${resp.statusMessage})`);\n      if (resp.statusCode === 200) {\n        return ok();\n      }\n      ko(new Error(`Unexpected error sending resopnse to CloudFormation: HTTP ${resp.statusCode} (${resp.statusMessage})`));\n    });\n\n    req.once('error', ko);\n    req.write(responseBody);\n\n    req.end();\n  });\n}\n\nexport enum Status {\n  SUCCESS = 'SUCCESS',\n  FAILED = 'FAILED',\n}\n\nexport enum RequestType {\n  CREATE = 'Create',\n  UPDATE = 'Update',\n  DELETE = 'Delete',\n}\n\n/** @see https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/crpg-ref-requests.html */\nexport type Event = CreateEvent | UpdateEvent | DeleteEvent;\n\nexport interface CloudFormationEventBase {\n  readonly RequestType: RequestType;\n  readonly ResponseURL: string;\n  readonly StackId: string;\n  readonly RequestId: string;\n  readonly ResourceType: string;\n  readonly LogicalResourceId: string;\n  readonly ResourceProperties: { [name: string]: any };\n}\n\nexport interface CreateEvent extends CloudFormationEventBase {\n  readonly RequestType: RequestType.CREATE;\n  readonly PhysicalResourceId: undefined;\n}\n\nexport interface UpdateEvent extends CloudFormationEventBase {\n  readonly RequestType: RequestType.UPDATE;\n  readonly PhysicalResourceId: string;\n  readonly OldResourceProperties: { [name: string]: any };\n}\n\nexport interface DeleteEvent extends CloudFormationEventBase {\n  readonly RequestType: RequestType.DELETE;\n  readonly PhysicalResourceId: string;\n}\n\n/**\n * Validates that all required properties are present, and that no extraneous properties are provided.\n *\n * @param props      the properties to be validated.\n * @param validProps a mapping of valid property names to a boolean instructing whether the property is required or not.\n */\nexport function validateProperties(props: { [name: string]: any }, validProps: { [name: string]: boolean }) {\n  // ServiceToken is always accepted (technically required, but we don't care about it internally, unless the caller said we do)\n  validProps.ServiceToken = validProps.ServiceToken || false;\n  // ResourceVersion is injected by the Lambda handler, and is permitted but not required, unless the caller said it is.\n  validProps.ResourceVersion = validProps.ResourceVersion || false;\n\n  for (const property of Object.keys(props)) {\n    if (!(property in validProps)) {\n      throw new Error(`Unexpected property: ${property}`);\n    }\n  }\n  for (const property of Object.keys(validProps)) {\n    if (validProps[property] && !(property in props)) {\n      throw new Error(`Missing required property: ${property}`);\n    }\n  }\n  return props;\n}\n"
  },
  {
    "path": "lib/custom-resource-handlers/src/_exec.ts",
    "content": "import * as childProcess from 'child_process';\nimport * as process from 'process';\n\nexport = function _exec(command: string, ...args: string[]): Promise<string> {\n  return new Promise<string>((ok, ko) => {\n    const child = childProcess.spawn(command, args, { env: process.env, shell: false, stdio: ['ignore', 'pipe', 'inherit'] });\n    const chunks = new Array<Buffer>();\n\n    child.stdout.on('data', chunk => chunks.push(Buffer.from(chunk)));\n\n    child.once('error', ko);\n    child.once('exit', (code, signal) => {\n      if (code === 0) {\n        return ok(Buffer.concat(chunks).toString('utf8'));\n      }\n      ko(new Error(signal != null ? `Killed by ${signal}` : `Exited with status ${code}`));\n    });\n  });\n};\n"
  },
  {
    "path": "lib/custom-resource-handlers/src/_lambda.ts",
    "content": "/**\n * @see https://docs.aws.amazon.com/lambda/latest/dg/nodejs-prog-model-context.html\n */\nexport interface Context {\n  /**\n   * The name of the Lambda function\n   */\n  readonly functionName: string;\n\n  /**\n   * The version of the function\n   */\n  readonly functionVersion: string;\n\n  /**\n   * The Amazon Resource Name (ARN) used to invoke the function. Indicates if the invoker specified a version number\n   * or alias.\n   */\n  readonly invokedFunctionArn: string;\n\n  /**\n   * The amount of memory configured on the function.\n   */\n  readonly memoryLimitInMB: number;\n\n  /**\n   * The identifier of the invocation request?\n   */\n  readonly awsRequestId: string;\n\n  /**\n   * The log group for the function.\n   */\n  readonly logGroupName: string;\n\n  /**\n   * The log stream for the function instance.\n   */\n  readonly logStreamName: string;\n\n  /**\n   * Set to false to send the response right away when the callback executes, instead of waiting for the Node.js event\n   * loop to be empty. If false, any outstanding events will continue to run during the next invocation.\n   */\n  callbackWaitsForEmptyEventLoop: boolean;\n\n  /**\n   * For mobile apps, information about the Amazon Cognito identity that authorized the request.\n   */\n  identity?: {\n    /**\n     * The authenticated Amazon Cognito identity.\n     */\n    cognitoIdentityId: string;\n\n    /**\n     * The Amazon Cognito identity pool that authorized the invocation.\n     */\n    cognitoIdentityPoolId: string;\n  };\n\n  /**\n   * For mobile apps, client context provided to the Lambda invoker by the client application.\n   */\n  clientContext?: {\n    client: {\n      installation_id: string;\n      app_title: string;\n      app_version_name: string;\n      app_version_code: string;\n      app_package_name: string;\n    };\n    env: {\n      platform_version: string;\n      platform: string;\n      make: string;\n      model: string;\n      locale: string;\n    };\n    /**\n     * Custom values set by the mobile application.\n     */\n    Custom: { [name: string]: any };\n  };\n\n  /**\n   * Returns the number of milliseconds left before the execution times out.\n   */\n  getRemainingTimeInMillis(): number;\n}\n"
  },
  {
    "path": "lib/custom-resource-handlers/src/_rmrf.ts",
    "content": "import * as fs from 'fs';\nimport * as path from 'path';\nimport * as util from 'util';\n\nconst readdir = util.promisify(fs.readdir);\nconst rmdir = util.promisify(fs.rmdir);\nconst stat = util.promisify(fs.stat);\nconst unlink = util.promisify(fs.unlink);\n\nexport = async function _rmrf(filePath: string): Promise<void> {\n  // All of this is best-effort\n  try {\n    const fstat = await stat(filePath);\n    if (fstat.isDirectory()) {\n      for (const child of await readdir(filePath)) {\n        await _rmrf(path.join(filePath, child));\n      }\n      await rmdir(filePath);\n    } else {\n      await unlink(filePath);\n    }\n  } catch (e: any) {\n    // If deleting fails, too bad.\n  }\n};\n"
  },
  {
    "path": "lib/custom-resource-handlers/src/certificate-signing-request.ts",
    "content": "import * as fs from 'fs';\nimport * as os from 'os';\nimport * as path from 'path';\nimport * as util from 'util';\n// eslint-disable-next-line import/no-extraneous-dependencies\nimport { S3 } from '@aws-sdk/client-s3';\n// eslint-disable-next-line import/no-extraneous-dependencies\nimport { SecretsManager } from '@aws-sdk/client-secrets-manager';\n\nimport * as cfn from './_cloud-formation';\n// eslint-disable-next-line @typescript-eslint/no-require-imports\nimport _exec = require('./_exec');\nimport * as lambda from './_lambda';\n// eslint-disable-next-line @typescript-eslint/no-require-imports\nimport _rmrf = require('./_rmrf');\n\nconst mkdtemp = util.promisify(fs.mkdtemp);\nconst readFile = util.promisify(fs.readFile);\nconst writeFile = util.promisify(fs.writeFile);\n\nconst secretsManager = new SecretsManager();\n\nexports.handler = cfn.customResourceHandler(handleEvent);\n\ninterface ResourceAttributes extends cfn.ResourceAttributes {\n  CSR: string;\n  SelfSignedCertificate: string;\n}\n\nasync function handleEvent(event: cfn.Event, _context: lambda.Context): Promise<cfn.ResourceAttributes> {\n  if (event.RequestType !== cfn.RequestType.DELETE) {\n    cfn.validateProperties(event.ResourceProperties, {\n      DnCommonName: true,\n      DnCountry: true,\n      DnEmailAddress: true,\n      DnLocality: true,\n      DnOrganizationName: true,\n      DnOrganizationalUnitName: true,\n      DnStateOrProvince: true,\n      ExtendedKeyUsage: false,\n      KeyUsage: true,\n      PrivateKeySecretId: true,\n      OutputBucket: true,\n    });\n  }\n\n  switch (event.RequestType) {\n    case cfn.RequestType.CREATE:\n    case cfn.RequestType.UPDATE:\n      return _createSelfSignedCertificate(event);\n    case cfn.RequestType.DELETE:\n    // Nothing to do - this is not a \"Physical\" resource\n      return { Ref: event.LogicalResourceId };\n  }\n}\n\nasync function _createSelfSignedCertificate(event: cfn.Event): Promise<ResourceAttributes> {\n  const tempDir = await mkdtemp(path.join(os.tmpdir(), 'x509CSR-'));\n  try {\n    const configFile = await _makeCsrConfig(event, tempDir);\n    const pkeyFile = await _retrievePrivateKey(event, tempDir);\n    const csrFile = path.join(tempDir, 'csr.pem');\n    await _exec('openssl', 'req', '-config', configFile,\n      '-key', pkeyFile,\n      '-out', csrFile,\n      '-new');\n    const certFile = path.join(tempDir, 'cert.pem');\n    await _exec('openssl', 'x509', '-in', csrFile,\n      '-out', certFile,\n      '-req',\n      '-signkey', pkeyFile,\n      '-days', '365');\n\n    const s3 = new S3();\n    const bucketName: string = event.ResourceProperties.OutputBucket;\n    await s3.putObject({\n      Bucket: bucketName,\n      Key: 'certificate-signing-request.pem',\n      Body: await readFile(csrFile, { encoding: 'utf8' }),\n      ContentType: 'application/x-pem-file',\n    });\n    await s3.putObject({\n      Bucket: bucketName,\n      Key: 'self-signed-certificate.pem',\n      Body: await readFile(certFile, { encoding: 'utf8' }),\n      ContentType: 'application/x-pem-file',\n    });\n\n    return {\n      Ref: event.LogicalResourceId,\n      CSR: `s3://${bucketName}/certificate-signing-request.pem`,\n      SelfSignedCertificate: `s3://${bucketName}/self-signed-certificate.pem`,\n    };\n  } finally {\n    await _rmrf(tempDir);\n  }\n}\n\nasync function _makeCsrConfig(event: cfn.Event, dir: string): Promise<string> {\n  const file = path.join(dir, 'csr.config');\n  await writeFile(file, [\n    '[ req ]',\n    'default_md           = sha256',\n    'distinguished_name   = dn',\n    'prompt               = no',\n    'req_extensions       = extensions',\n    'string_mask          = utf8only',\n    'utf8                 = yes',\n    '',\n    '[ dn ]',\n    `CN                   = ${event.ResourceProperties.DnCommonName}`,\n    `C                    = ${event.ResourceProperties.DnCountry}`,\n    `ST                   = ${event.ResourceProperties.DnStateOrProvince}`,\n    `L                    = ${event.ResourceProperties.DnLocality}`,\n    `O                    = ${event.ResourceProperties.DnOrganizationName}`,\n    `OU                   = ${event.ResourceProperties.DnOrganizationalUnitName}`,\n    `emailAddress         = ${event.ResourceProperties.DnEmailAddress}`,\n    '',\n    '[ extensions ]',\n    `extendedKeyUsage     = ${event.ResourceProperties.ExtendedKeyUsage}`,\n    `keyUsage             = ${event.ResourceProperties.KeyUsage}`,\n    'subjectKeyIdentifier = hash',\n  ].join('\\n'), { encoding: 'utf8' });\n  return file;\n}\n\nasync function _retrievePrivateKey(event: cfn.Event, dir: string): Promise<string> {\n  const file = path.join(dir, 'private_key.pem');\n  const secret = await secretsManager.getSecretValue({\n    SecretId: event.ResourceProperties.PrivateKeySecretId,\n    VersionStage: 'AWSCURRENT',\n  });\n  await writeFile(file, secret.SecretString!, { encoding: 'utf8' });\n  return file;\n}\n"
  },
  {
    "path": "lib/custom-resource-handlers/src/pgp-secret.ts",
    "content": "import * as crypto from 'crypto';\nimport * as fs from 'fs';\nimport * as os from 'os';\nimport * as path from 'path';\nimport * as util from 'util';\n// eslint-disable-next-line import/no-extraneous-dependencies\nimport { SecretsManager } from '@aws-sdk/client-secrets-manager';\n// eslint-disable-next-line import/no-extraneous-dependencies\nimport { SSM } from '@aws-sdk/client-ssm';\n\nimport * as cfn from './_cloud-formation';\n// eslint-disable-next-line @typescript-eslint/no-require-imports\nimport _exec = require('./_exec');\nimport * as lambda from './_lambda';\n// eslint-disable-next-line @typescript-eslint/no-require-imports\nimport _rmrf = require('./_rmrf');\n\nconst mkdtemp = util.promisify(fs.mkdtemp);\nconst writeFile = util.promisify(fs.writeFile);\n\nconst secretsManager = new SecretsManager();\nconst ssm = new SSM();\n\nexports.handler = cfn.customResourceHandler(handleEvent);\n\nconst GPG_BIN = 'gpg';\n\n\ninterface ResourceAttributes extends cfn.ResourceAttributes {\n  SecretArn: string;\n  PublicKey: string;\n}\n\nasync function handleEvent(event: cfn.Event, context: lambda.Context): Promise<cfn.ResourceAttributes> {\n  const props = event.ResourceProperties;\n\n  if (event.RequestType !== cfn.RequestType.DELETE) {\n    cfn.validateProperties(props, {\n      Description: false,\n      Email: true,\n      Expiry: true,\n      Identity: true,\n      KeyArn: false,\n      KeySizeBits: true,\n      SecretName: true,\n      Version: false,\n      DeleteImmediately: false,\n    });\n  }\n\n  let newKey = event.RequestType === cfn.RequestType.CREATE;\n\n  if (event.RequestType === cfn.RequestType.UPDATE) {\n    const oldProps = event.OldResourceProperties;\n    const immutableFields = ['Email', 'Expiry', 'Identity', 'KeySizeBits', 'SecretName', 'Version'];\n    for (const key of immutableFields) {\n      if (props[key] !== oldProps[key]) {\n        // eslint-disable-next-line no-console\n        console.log(`New key required: ${key} changed from ${oldProps[key]} to ${props[key]}`);\n        newKey = true;\n      }\n    }\n  }\n\n  switch (event.RequestType) {\n    case cfn.RequestType.CREATE:\n    case cfn.RequestType.UPDATE:\n    // If we're UPDATE and get a new key, we'll issue a new Physical ID.\n      return newKey\n        ? _createNewKey(event, context)\n        : _updateExistingKey(event as cfn.UpdateEvent, context);\n    case cfn.RequestType.DELETE:\n      return _deleteSecret(event);\n  }\n}\n\nasync function _createNewKey(event: cfn.CreateEvent | cfn.UpdateEvent, context: lambda.Context): Promise<ResourceAttributes> {\n  const passPhrase = crypto.randomBytes(32).toString('base64');\n  const tempDir = await mkdtemp(path.join(os.tmpdir(), 'OpenPGP-'));\n  try {\n    process.env.GNUPGHOME = tempDir;\n\n    const keyConfig = path.join(tempDir, 'key.config');\n    await writeFile(keyConfig, [\n      'Key-Type: RSA',\n      `Key-Length: ${event.ResourceProperties.KeySizeBits}`,\n      `Name-Real: ${event.ResourceProperties.Identity}`,\n      `Name-Email: ${event.ResourceProperties.Email}`,\n      `Expire-Date: ${event.ResourceProperties.Expiry}`,\n      `Passphrase: ${passPhrase}`,\n      '%commit',\n      '%echo done',\n    ].join('\\n'), { encoding: 'utf8' });\n\n    const gpgCommonArgs = [`--homedir=${tempDir}`, '--agent-program=/bin/gpg-agent'];\n    await _exec(GPG_BIN, ...gpgCommonArgs, '--batch', '--gen-key', keyConfig);\n    // Need the passphrase to export the private key\n    const keyMaterial = await _exec(GPG_BIN, ...gpgCommonArgs, '--batch', '--yes', '--export-secret-keys', '--armor', '--pinentry-mode=loopback', `--passphrase=${passPhrase}`);\n    const publicKey = await _exec(GPG_BIN, ...gpgCommonArgs, '--batch', '--yes', '--export', '--armor');\n    const secretOpts = {\n      ClientRequestToken: context.awsRequestId,\n      Description: event.ResourceProperties.Description,\n      KmsKeyId: event.ResourceProperties.KeyArn,\n      SecretString: JSON.stringify({\n        PrivateKey: keyMaterial,\n        Passphrase: passPhrase,\n      }),\n    };\n    const secret = event.RequestType === cfn.RequestType.CREATE\n      ? await secretsManager.createSecret({ ...secretOpts, Name: event.ResourceProperties.SecretName })\n      : await secretsManager.updateSecret({ ...secretOpts, SecretId: event.PhysicalResourceId });\n\n    return {\n      Ref: secret.ARN!,\n      SecretArn: secret.ARN!,\n      PublicKey: publicKey,\n    };\n  } finally {\n    await _rmrf(tempDir);\n  }\n}\n\nasync function _updateExistingKey(event: cfn.UpdateEvent, context: lambda.Context): Promise<ResourceAttributes> {\n  const publicKey = await _getPublicKey(event.PhysicalResourceId);\n  const result = await secretsManager.updateSecret({\n    ClientRequestToken: context.awsRequestId,\n    Description: event.ResourceProperties.Description,\n    KmsKeyId: event.ResourceProperties.KeyArn,\n    SecretId: event.PhysicalResourceId,\n  });\n\n  if (event.OldResourceProperties.ParameterName) {\n    // Migrating from a version that did create the SSM Parameter from the Custom Resource, so we'll delete that now in\n    // order to allow the \"external\" creation to happen without problems...\n    try {\n      await ssm.deleteParameter({ Name: event.OldResourceProperties.ParameterName });\n    } catch (e: any) {\n      // Allow the parameter to already not exist, just in case!\n      if (e.name !== 'ParameterNotFound') {\n        throw e;\n      }\n    }\n  }\n\n  return {\n    Ref: result.ARN!,\n    SecretArn: result.ARN!,\n    PublicKey: publicKey,\n  };\n}\n\nasync function _getPublicKey(secretArn: string): Promise<string> {\n  const secretValue = await secretsManager.getSecretValue({ SecretId: secretArn });\n  const keyData = JSON.parse(secretValue.SecretString!);\n  const tempDir = await mkdtemp(path.join(os.tmpdir(), 'OpenPGP-'));\n  try {\n    process.env.GNUPGHOME = tempDir;\n    const privateKeyFile = path.join(tempDir, 'private.key');\n    await writeFile(privateKeyFile, keyData.PrivateKey, { encoding: 'utf-8' });\n    const gpgCommonArgs = [`--homedir=${tempDir}`, '--agent-program=/bin/gpg-agent'];\n    // Note: importing a private key does NOT require entering it's passphrase!\n    await _exec(GPG_BIN, ...gpgCommonArgs, '--batch', '--yes', '--import', privateKeyFile);\n    return await _exec(GPG_BIN, ...gpgCommonArgs, '--batch', '--yes', '--export', '--armor');\n  } finally {\n    await _rmrf(tempDir);\n  }\n}\n\nasync function _deleteSecret(event: cfn.DeleteEvent): Promise<cfn.ResourceAttributes> {\n  await secretsManager.deleteSecret({\n    SecretId: event.PhysicalResourceId,\n    ForceDeleteWithoutRecovery: !!event.ResourceProperties.DeleteImmediately,\n  });\n  return { Ref: event.PhysicalResourceId };\n}\n"
  },
  {
    "path": "lib/custom-resource-handlers/src/private-key.ts",
    "content": "import * as fs from 'fs';\nimport * as os from 'os';\nimport * as path from 'path';\nimport * as util from 'util';\n// eslint-disable-next-line import/no-extraneous-dependencies\nimport { SecretsManager } from '@aws-sdk/client-secrets-manager';\n\nimport * as cfn from './_cloud-formation';\n// eslint-disable-next-line @typescript-eslint/no-require-imports\nimport _exec = require('./_exec');\nimport * as lambda from './_lambda';\n// eslint-disable-next-line @typescript-eslint/no-require-imports\nimport _rmrf = require('./_rmrf');\n\nconst mkdtemp = util.promisify(fs.mkdtemp);\nconst readFile = util.promisify(fs.readFile);\n\nconst secretsManager = new SecretsManager();\n\nexports.handler = cfn.customResourceHandler(handleEvent);\n\nasync function handleEvent(event: cfn.Event, context: lambda.Context): Promise<cfn.ResourceAttributes> {\n  if (event.RequestType !== cfn.RequestType.DELETE) {\n    cfn.validateProperties(event.ResourceProperties, {\n      Description: false,\n      KeySize: true,\n      KmsKeyId: false,\n      SecretName: true,\n    });\n  }\n\n  switch (event.RequestType) {\n    case cfn.RequestType.CREATE:\n      return _createSecret(event, context);\n    case cfn.RequestType.UPDATE:\n      return _updateSecret(event, context);\n    case cfn.RequestType.DELETE:\n      return _deleteSecret(event);\n  }\n}\n\ninterface ResourceAttributes extends cfn.ResourceAttributes {\n  SecretArn: string;\n}\n\nasync function _createSecret(event: cfn.CreateEvent, context: lambda.Context): Promise<ResourceAttributes> {\n  const tmpDir = await mkdtemp(path.join(os.tmpdir(), 'x509PrivateKey-'));\n  try {\n    const pkeyFile = path.join(tmpDir, 'private_key.pem');\n    await _exec('openssl', 'genrsa', '-out', pkeyFile, event.ResourceProperties.KeySize);\n    const result = await secretsManager.createSecret({\n      ClientRequestToken: context.awsRequestId,\n      Description: event.ResourceProperties.Description,\n      KmsKeyId: event.ResourceProperties.KmsKeyId,\n      Name: event.ResourceProperties.SecretName,\n      SecretString: await readFile(pkeyFile, { encoding: 'utf8' }),\n    });\n    return {\n      Ref: result.ARN!,\n      SecretArn: result.ARN!,\n    };\n  } finally {\n    await _rmrf(tmpDir);\n  }\n}\n\nasync function _deleteSecret(event: cfn.DeleteEvent): Promise<cfn.ResourceAttributes> {\n  if (event.PhysicalResourceId.startsWith('arn:')) {\n    await secretsManager.deleteSecret({\n      SecretId: event.PhysicalResourceId,\n      ForceDeleteWithoutRecovery: true,\n    });\n  }\n  return { Ref: event.PhysicalResourceId };\n}\n\nasync function _updateSecret(event: cfn.UpdateEvent, context: lambda.Context): Promise<ResourceAttributes> {\n  const props = event.ResourceProperties;\n  const oldProps = event.OldResourceProperties;\n  for (const key of ['KeySize', 'SecretName']) {\n    if (oldProps[key] !== props[key]) {\n      throw new Error(`The ${key} property cannot be updated, but it was changed from ${oldProps[key]} to ${props[key]}`);\n    }\n  }\n  const result = await secretsManager.updateSecret({\n    ClientRequestToken: context.awsRequestId,\n    Description: props.Description,\n    KmsKeyId: props.KmsKeyId,\n    SecretId: event.PhysicalResourceId,\n  });\n\n  return {\n    Ref: result.ARN!,\n    SecretArn: result.ARN!,\n  };\n}\n"
  },
  {
    "path": "lib/index.ts",
    "content": "export * from './auto-build';\nexport * from './canary';\nexport * from './build-spec';\nexport * from './code-signing';\nexport * from './credential-pair';\nexport * from './open-pgp-key-pair';\nexport * from './permissions';\nexport * from './pipeline';\nexport * from './publishing';\nexport * from './registry-sync';\nexport * from './repo';\nexport * from './shellable';\nexport * from './signing-key';\nexport * from './code-signing';\nexport * from './pull-request';\nexport * from './chime-notifier';\nexport * from './pipeline-notifications';\nexport * from './package-integrity';\nexport * from './pipeline-watcher';\n"
  },
  {
    "path": "lib/open-pgp-key-pair.ts",
    "content": "import * as path from 'path';\nimport {\n  Duration, Stack, RemovalPolicy,\n  CustomResource,\n  aws_iam as iam,\n  aws_kms as kms,\n  aws_lambda as lambda,\n  aws_secretsmanager as secretsManager,\n  aws_ssm as ssm,\n  ArnFormat,\n} from 'aws-cdk-lib';\nimport { Platform } from 'aws-cdk-lib/aws-ecr-assets';\nimport { Construct } from 'constructs';\nimport { ICredentialPair } from './credential-pair';\nimport { hashFileOrDirectory } from './util';\n\n/**\n * The type of the {@link OpenPGPKeyPairProps.removalPolicy} property.\n */\nexport enum OpenPGPKeyPairRemovalPolicy {\n  /**\n   * Keep the secret when this resource is deleted from the stack.\n   * This is the default setting.\n   */\n  RETAIN,\n\n  /**\n   * Remove the secret when this resource is deleted from the stack,\n   * but leave a grace period of a few days that allows you to cancel the deletion from the AWS Console.\n   */\n  DESTROY_SAFELY,\n\n  /**\n   * Remove the secret when this resource is deleted from the stack immediately.\n   * Note that if you don't have a backup of this key somewhere,\n   * this means it will be gone forever!\n   */\n  DESTROY_IMMEDIATELY,\n}\n\ninterface OpenPGPKeyPairProps {\n  /**\n   * Identity to put into the key\n   */\n  identity: string;\n\n  /**\n   * Email address to attach to the key\n   */\n  email: string;\n\n  /**\n   * Key size in bits (1024, 2048, 4096)\n   */\n  keySizeBits: number;\n\n  /**\n   * GPG expiry specifier\n   *\n   * Example: '1y'\n   */\n  expiry: string;\n\n  /**\n   * Name of secret to create in AWS Secrets Manager\n   */\n  secretName: string;\n\n  /**\n   * Name of SSM parameter to store public key\n   */\n  pubKeyParameterName: string;\n\n  /**\n   * KMS Key ARN to use to encrypt Secrets Manager Secret\n   */\n  encryptionKey?: kms.IKey;\n\n  /**\n   * Version of the key\n   *\n   * Bump this number to regenerate the key\n   */\n  version: number;\n\n  /**\n   * A description to attach to the AWS SecretsManager secret.\n   */\n  description?: string;\n\n  /**\n   * What happens to the SecretsManager secret when this resource is removed from the stack.\n   * The default is to keep the secret.\n   *\n   * @default OpenPGPKeyPairRemovalPolicy.RETAIN\n   */\n  removalPolicy?: OpenPGPKeyPairRemovalPolicy;\n}\n\n/**\n * A PGP key that is stored in Secrets Manager.\n * The SecretsManager secret is by default retained when the resource is deleted,\n * you can change that with the `removalPolicy` property.\n *\n * The string in secrets manager will be a JSON struct of\n *\n * { \"PrivateKey\": \"... ASCII repr of key...\", \"Passphrase\": \"passphrase of the key\" }\n */\nexport class OpenPGPKeyPair extends Construct implements ICredentialPair {\n  public readonly principal: ssm.IStringParameter;\n  public readonly credential: secretsManager.ISecret;\n\n  constructor(parent: Construct, name: string, props: OpenPGPKeyPairProps) {\n    super(parent, name);\n\n    const codeLocation = path.resolve(__dirname, 'custom-resource-handlers');\n\n    const fn = new lambda.SingletonFunction(this, 'Lambda', {\n      // change the uuid to force deleting existing function, and create new one, as Package type change is not allowed\n      uuid: '2422BDC2-DBB0-47C1-B701-5599E0849C54',\n      description: 'Generates an OpenPGP Key and stores the private key in Secrets Manager and the public key in an SSM Parameter',\n      code: new lambda.AssetImageCode(codeLocation, {\n        file: 'Dockerfile',\n        platform: Platform.LINUX_AMD64,\n        buildArgs: {\n          FUN_SRC_DIR: 'pgp-secret',\n        },\n        invalidation: {\n          buildArgs: true,\n        },\n      }),\n      handler: lambda.Handler.FROM_IMAGE,\n      timeout: Duration.seconds(300),\n      runtime: lambda.Runtime.FROM_IMAGE,\n    });\n\n    fn.addToRolePolicy(new iam.PolicyStatement({\n      actions: [\n        'secretsmanager:CreateSecret',\n        'secretsmanager:GetSecretValue',\n        'secretsmanager:UpdateSecret',\n        'secretsmanager:DeleteSecret',\n      ],\n      resources: [Stack.of(this).formatArn({\n        service: 'secretsmanager',\n        resource: 'secret',\n        arnFormat: ArnFormat.COLON_RESOURCE_NAME,\n        resourceName: `${props.secretName}-??????`,\n      })],\n    }));\n\n    // To allow easy migration from verison that handled the SSM parameter in the custom resource\n    fn.addToRolePolicy(new iam.PolicyStatement({\n      actions: ['ssm:DeleteParameter'],\n      resources: ['*'],\n    }));\n\n    if (props.encryptionKey) {\n      props.encryptionKey.addToResourcePolicy(new iam.PolicyStatement({\n        actions: ['kms:Decrypt', 'kms:GenerateDataKey'],\n        resources: ['*'],\n        principals: [fn.role!.grantPrincipal],\n        conditions: {\n          StringEquals: {\n            'kms:ViaService': `secretsmanager.${Stack.of(this).region}.amazonaws.com`,\n          },\n        },\n      }));\n    }\n\n    //change the custom resource id to force recreating new one because the change of the underneath lambda function\n    const secret = new CustomResource(this, 'ResourceV2', {\n      serviceToken: fn.functionArn,\n      pascalCaseProperties: true,\n      properties: {\n        resourceVersion: hashFileOrDirectory(codeLocation),\n        identity: props.identity,\n        email: props.email,\n        expiry: props.expiry,\n        keySizeBits: props.keySizeBits,\n        secretName: props.secretName,\n        keyArn: props.encryptionKey && props.encryptionKey.keyArn,\n        version: props.version,\n        description: props.description,\n        deleteImmediately: props.removalPolicy === OpenPGPKeyPairRemovalPolicy.DESTROY_IMMEDIATELY,\n      },\n      removalPolicy: openPgpKeyPairRemovalPolicyToCoreRemovalPolicy(props.removalPolicy),\n    });\n    secret.node.addDependency(fn);\n\n    this.credential = secretsManager.Secret.fromSecretAttributes(this, 'Credential', {\n      encryptionKey: props.encryptionKey,\n      secretCompleteArn: secret.getAtt('SecretArn').toString(),\n    });\n    this.principal = new ssm.StringParameter(this, 'Principal', {\n      description: `The public part of the OpenPGP key in ${this.credential.secretArn}`,\n      parameterName: props.pubKeyParameterName,\n      stringValue: secret.getAtt('PublicKey').toString(),\n    });\n  }\n\n  public grantRead(grantee: iam.IPrincipal): void {\n    // Secret grant, identity-based only\n    grantee.addToPrincipalPolicy(new iam.PolicyStatement({\n      resources: [this.credential.secretArn],\n      actions: ['secretsmanager:ListSecrets', 'secretsmanager:DescribeSecret', 'secretsmanager:GetSecretValue'],\n    }));\n\n    // Key grant\n    if (this.credential.encryptionKey) {\n      grantee.addToPrincipalPolicy(new iam.PolicyStatement({\n        resources: [this.credential.encryptionKey.keyArn],\n        actions: ['kms:Decrypt'],\n      }));\n\n      this.credential.encryptionKey.addToResourcePolicy(new iam.PolicyStatement({\n        resources: ['*'],\n        principals: [grantee.grantPrincipal],\n        actions: ['kms:Decrypt'],\n      }));\n    }\n  }\n}\n\nfunction openPgpKeyPairRemovalPolicyToCoreRemovalPolicy(removalPolicy?: OpenPGPKeyPairRemovalPolicy): RemovalPolicy {\n  if (removalPolicy === undefined) {\n    return RemovalPolicy.RETAIN;\n  }\n  return removalPolicy === OpenPGPKeyPairRemovalPolicy.RETAIN\n    ? RemovalPolicy.RETAIN\n    : RemovalPolicy.DESTROY;\n}\n"
  },
  {
    "path": "lib/package-integrity/handler/JSONStream.d.ts",
    "content": "/**\n * Partial hand-written declarations for the JSONStream module. Refer to the JS\n * module's documentation for additional operations, and more explanations on\n * possible usage.\n *\n * @see https://github.com/dominictarr/JSONStream\n */\n declare module 'JSONStream' {\n\n  export function parse(pattern: any, map?: (value: any) => any): JSONStream;\n  export function parse(patterns: any[], map?: (value: any) => any): JSONStream;\n\n  interface JSONStream extends NodeJS.ReadWriteStream {\n    on(event: 'header', handler: (value: any) => void): this;\n    once(event: 'header', handler: (value: any) => void): this;\n\n    on(event: 'data', handler: (value: any) => void): this;\n    once(event: 'data', handler: (value: any) => void): this;\n\n    on(event: 'footer', handler: (value: any) => void): this;\n    once(event: 'footer', handler: (value: any) => void): this;\n\n    on(event: 'error', handler: (cause: any) => void): this;\n    once(event: 'error', handler: (cause: any) => void): this;\n\n    on(event: 'end', handler: () => void): this;\n    once(event: 'end', handler: () => void): this;\n  }\n}"
  },
  {
    "path": "lib/package-integrity/handler/integrity.ts",
    "content": "import { execSync } from 'child_process';\nimport type { RequestOptions, IncomingMessage } from 'http';\nimport * as os from 'os';\nimport * as path from 'path';\n// eslint-disable-next-line import/no-extraneous-dependencies\nimport AdmZip from 'adm-zip';\n// eslint-disable-next-line import/no-extraneous-dependencies\nimport * as follow from 'follow-redirects';\n// eslint-disable-next-line import/no-extraneous-dependencies\nimport * as fs from 'fs-extra';\n// eslint-disable-next-line import/no-extraneous-dependencies\nimport * as jstream from 'JSONStream';\n// eslint-disable-next-line import/no-extraneous-dependencies\nimport * as tar from 'tar';\nimport { Repository } from './repository';\n\n\n/**\n * Published package.\n */\nexport interface PublishedPackage {\n\n  /**\n   * Name of the package as stored in the package manager.\n   */\n  readonly name: string;\n\n  /**\n   * Version of the package as stored in the package manager.\n   */\n  readonly version: string;\n}\n\n/**\n * Integrity class for validating a local artifact against its published counterpart.\n *\n * Implementations differ based on the package manager in question.\n */\nexport abstract class ArtifactIntegrity {\n\n  /**\n   * The file extenstion of artifacts produced for this check. (e.g 'whl')\n   */\n  protected abstract readonly ext: string;\n\n  /**\n   * Download a package to the target file.\n   *\n   * @param pkg The package to download.\n   * @param targetFile The file path to download the package to.\n   */\n  protected abstract download(pkg: PublishedPackage, targetFile: string): Promise<void>;\n\n  /**\n   * Extract the artifact into the target directory.\n   *\n   * @param artifact Path to an artifact file.\n   * @param targetDir The directory to extract to. It will exist by the time this method is invoked.\n   */\n  protected abstract extract(artifact: string, targetDir: string): Promise<void>;\n\n  /**\n   * Parse a local artifact file name into a structured package.\n   *\n   * @param artifactName Base name of the local artifact file.\n   * @returns The package this artifact correlates to.\n   */\n  protected abstract parseArtifactName(artifactName: string): PublishedPackage;\n\n  /**\n   * Validate a local artifact against its published counterpart.\n   *\n   * @param localArtifactDir The directory of the local artifact. Must contain exactly one file with the appropriate extenstion.\n   */\n  public async validate(localArtifactDir: string) {\n\n    const artifactPath = this.findOne(localArtifactDir);\n    const name = this.constructor.name;\n\n    this.log(`Validating ${artifactPath}`);\n    const workdir = fs.mkdtempSync(path.join(os.tmpdir(), 'integrity-check'));\n\n    try {\n      const downloaded = path.join(workdir, `${name}.downloaded`);\n      const remote = path.join(workdir, `${name}.remote`);\n      const local = path.join(workdir, `${name}.local`);\n\n      // parse the artifact name into a package.\n      const pkg = this.parseArtifactName(path.basename(artifactPath));\n\n      fs.mkdirSync(remote);\n      fs.mkdirSync(local);\n\n      // download the package\n      this.log(`Downloading ${pkg.name}@${pkg.version} to ${downloaded}`);\n      await this.download(pkg, downloaded);\n\n      // extract the downlaoded package\n      this.log(`Extracting remote artifact from ${downloaded} to ${remote}`);\n      await this.extract(downloaded, remote);\n\n      // extract the local artfiact\n      this.log(`Extracting local artifact from ${artifactPath} to ${local}`);\n      await this.extract(artifactPath, local);\n\n      this.log(`Comparing ${local} <> ${remote}`);\n      try {\n        execSync(`diff ${local} ${remote}`, { stdio: ['ignore', 'inherit', 'inherit'] });\n      } catch (error) {\n        throw new Error(`${name} validation failed`);\n      }\n      this.log('Success');\n\n    } finally {\n      fs.removeSync(workdir);\n    }\n\n  }\n\n  protected log(message: string) {\n    console.log(`${this.constructor.name} | ${message} `);\n  }\n\n  private findOne(dir: string): string {\n    const files = fs.readdirSync(dir).filter(f => f.endsWith(this.ext));\n    if (files.length === 0) {\n      throw new Error(`No files found in ${dir} with extension ${this.ext}`);\n    }\n    const [first, ...rest] = files;\n    if (rest.length > 0) {\n      throw new Error(`Multiple files found in ${dir} with extension ${this.ext}: ${first}, ${rest.join(', ')}`);\n    }\n    return path.join(dir, first);\n  }\n\n}\n\n/**\n * Properties for `RepositoryIntegrity`.\n */\nexport interface RepositoryIntegrityProps {\n  /**\n   * Repository to validate.\n   */\n  readonly repository: Repository;\n\n  /**\n   * The command that produces the local artifacts.\n   *\n   * @default 'npx projen release'\n   */\n  readonly packCommand?: string;\n}\n\n/**\n * Integrity class for validating the artifacts produced by this repository against their published counterparts.\n */\nexport class RepositoryIntegrity {\n\n  public constructor(private readonly props: RepositoryIntegrityProps) {}\n\n  /**\n   * Validate the artifacts of this repo against its published counterpart.\n   */\n  public async validate() {\n\n    // note that run 'release' by default to preserve the version number.\n    // this won't do a bump since the commit we are on is already tagged.\n    const artifacts = this.props.repository.pack(this.props.packCommand ?? 'npx projen release');\n\n    let integrity = undefined;\n    for (const artifact of artifacts) {\n      switch (artifact.lang) {\n        case 'js':\n          integrity = new NpmArtifactIntegrity();\n          break;\n        case 'python':\n          integrity = new PyPIArtifactIntegrity();\n          break;\n        case 'java':\n        case 'dotnet':\n        case 'go':\n          // we don't have integrity checks for these\n          // artifacts yet.\n          break;\n        default:\n          throw new Error(`Unsupported artifact language: ${artifact.lang}`);\n      }\n      if (integrity) {\n        await integrity.validate(artifact.directory);\n      }\n    }\n    console.log('Validation done');\n  }\n\n}\n\n/**\n * NpmIntegrity is able to perform integrity checks against packages stored on npmjs.com\n */\nexport class NpmArtifactIntegrity extends ArtifactIntegrity {\n\n  protected readonly ext = 'tgz';\n\n  protected async download(pkg: PublishedPackage, target: string): Promise<void> {\n    const tarballUrl = await jsonGet(`https://registry.npmjs.org/${encodeURIComponent(pkg.name)}/${encodeURIComponent(pkg.version)}`, ['dist', 'tarball']);\n    await download(tarballUrl, target);\n  }\n\n  public async extract(file: string, targetDir: string): Promise<void> {\n    return tar.x({ cwd: targetDir, file: file, strip: 1 });\n  }\n\n  protected parseArtifactName(artifactName: string): PublishedPackage {\n\n    // cdk8s@1.0.0-beta.59.jsii.tgz\n    const jsiiArtifact = /(.*)@(.*)\\.jsii./;\n\n    // npm artifact: cdk8s-cli-1.0.0-beta59.tgz\n    // yarn artifact: cdk8s-cli-v1.0.0-beta59.tgz (add a 'v' before the version)\n    const npmOrYarnArtifact = /(.*)-v?(\\d.*).(tgz|tar.gz)/;\n\n    const regex = artifactName.includes('.jsii.') ? jsiiArtifact : npmOrYarnArtifact;\n\n    const match = artifactName.match(regex);\n    if (!match) {\n      throw new Error(`Unable to parse artifact: ${artifactName}`);\n    }\n\n    return { name: match[1], version: match[2] };\n\n  }\n\n}\n\n/**\n * PyPIIntegiry is able to perform integiry checks against packages stored on pypi.org\n */\nexport class PyPIArtifactIntegrity extends ArtifactIntegrity {\n\n  protected readonly ext = 'whl';\n\n  protected async download(pkg: PublishedPackage, target: string): Promise<void> {\n\n    const files = await jsonGet(`https://pypi.org/pypi/${encodeURIComponent(pkg.name)}/json`, ['releases', pkg.version]);\n    const wheels: string[] = files.filter((f: any) => f.url.endsWith('whl')).map((f: any) => f.url);\n\n    if (wheels.length === 0) {\n      throw new Error(`No wheels found for package ${pkg.name}-${pkg.version}`);\n    }\n\n    if (wheels.length > 1) {\n      throw new Error(`Multiple wheels found for package ${pkg.name}-${pkg.version}: ${wheels.join(',')}`);\n    }\n\n    await download(wheels[0], target);\n  }\n\n  public async extract(artifact: string, target: string): Promise<void> {\n    const zip = new AdmZip(artifact);\n    return zip.extractAllTo(target);\n  }\n\n  protected parseArtifactName(artifactName: string): PublishedPackage {\n\n    // cdk8s-1.0.0b63-py3-none-any.whl\n    const regex = /(.*)-v?(\\d.*)-py.*.whl/;\n\n    const match = artifactName.match(regex);\n    if (!match) {\n      throw new Error(`Unable to parse artifact: ${artifactName}`);\n    }\n\n    return { name: match[1], version: match[2] };\n\n  }\n\n}\n\nexport function jsonGet(url: string, jsonPath?: string[]): Promise<any> {\n  return get(url, (res, ok, ko) => {\n    const json = jstream.parse(jsonPath);\n    json.once('data', ok);\n    json.once('error', ko);\n\n    res.pipe(json, { end: true });\n  }, { headers: { 'Accept': 'application/json', 'Accept-Encoding': 'identity' } });\n}\n\nexport async function download(url: string, targetFile: string): Promise<any> {\n  return get(url, (res, ok, ko) => {\n    const file = fs.createWriteStream(targetFile);\n    file.on('finish', ok);\n    file.on('error', ko);\n    res.pipe(file, { end: true });\n  });\n}\n\nexport async function get(\n  url: string,\n  handler: (res: IncomingMessage, ok: (value: unknown) => void, ko: (err: Error) => void) => void,\n  options: RequestOptions = {}) {\n\n  return new Promise((ok, ko) => {\n    const request = follow.https.get(url, options, (res: IncomingMessage) => {\n      if (res.statusCode !== 200) {\n        const error = new Error(`GET ${url} - HTTP ${res.statusCode} (${res.statusMessage})`);\n        Error.captureStackTrace(error);\n        return ko(error);\n      }\n      res.once('error', ko);\n      handler(res, ok, ko);\n    });\n    request.on('error', ko);\n  });\n\n}\n"
  },
  {
    "path": "lib/package-integrity/handler/repository.ts",
    "content": "import { execSync } from 'child_process';\nimport * as fs from 'fs';\nimport * as os from 'os';\nimport * as path from 'path';\n// eslint-disable-next-line import/no-extraneous-dependencies\nimport { SecretsManager } from '@aws-sdk/client-secrets-manager';\n\n/**\n * Properties for `Repository`.\n */\nexport interface RepositoryProps {\n  /**\n   * Local directory where the repository was cloned to.\n   */\n  readonly repoDir: string;\n}\n\n/**\n * Artifact produced by this repository.\n */\nexport interface Artifact {\n  /**\n   * Language of the artifact.\n   */\n  readonly lang: string;\n\n  /**\n   * Directory containing the artifact.\n   */\n  readonly directory: string;\n}\n\n/**\n * Options for `Repository.fromGitHub`\n */\nexport interface RepositoryFromGitHubOptions {\n\n  /**\n   * Repository slug (e.g cdk8s-team/cdk8s-core)\n   */\n  readonly slug: string;\n\n  /**\n   * Repository tag.\n   *\n   * @default - latest tag based on creation date.\n   */\n  readonly tag?: string;\n\n  /**\n    * Prefix for detecting the latest tag of the repo. Only applies if `tag` isn't specified.\n    * This is useful for repositories that produce multiple packages, and hence multiple tags\n    * for example: https://github.com/cdk8s-team/cdk8s-plus/tags.\n    */\n  readonly tagPrefix?: string;\n\n  /**\n   * ARN of an AWS secrets manager secret containing a GitHub token.\n   * Required for private repositories. Recommended for public ones, to avoid throtlling issues.\n   *\n   * @default - the repository is cloned without credentials.\n   */\n  readonly githubTokenSecretArn?: string;\n\n}\n\n/**\n * Options for `Repository.fromDir`\n */\nexport interface RepositoryFromDirOptions {\n\n  /**\n   * The directory of the repo.\n   */\n  readonly repoDir: string;\n\n}\n\n/**\n * Repository containing a node project.\n */\nexport class Repository {\n\n  /**\n   * Create a repository from a local directory.\n   */\n  public static async fromDir(options: RepositoryFromDirOptions): Promise<Repository> {\n    return new Repository(options.repoDir);\n  }\n\n  /**\n   * Create a repository from a GitHub repository.\n   */\n  public static async fromGitHub(options: RepositoryFromGitHubOptions): Promise<Repository> {\n\n    const workdir = fs.mkdtempSync(path.join(os.tmpdir(), 'work'));\n    const sm = new SecretsManager();\n\n    let token = undefined;\n    if (options.githubTokenSecretArn) {\n      const secret = await sm.getSecretValue({ SecretId: options.githubTokenSecretArn });\n      token = secret.SecretString;\n    }\n    const repoDir = fs.mkdtempSync(path.join(workdir, 'repo'));\n\n    console.log(`Cloning ${options.slug} into ${repoDir}`);\n    execSync(`git clone https://${token ? `${token}@` : ''}github.com/${options.slug}.git ${repoDir}`);\n\n    const latestTag = findLatestTag(repoDir, options.tagPrefix);\n    execSync(`git checkout ${latestTag}`, { cwd: repoDir });\n\n    return new Repository(repoDir);\n\n  }\n\n  private readonly isJsii: boolean;\n  private readonly manifest: any;\n\n  private constructor(public readonly repoDir: string) {\n    const manifestPath = path.join(repoDir, 'package.json');\n\n    this.manifest = JSON.parse(fs.readFileSync(manifestPath, { encoding: 'utf-8' }));\n\n    const isProjen = fs.existsSync(path.join(repoDir, '.projen'));\n    const isYarn = fs.existsSync(path.join(repoDir, 'yarn.lock'));\n\n    if (!isProjen) {\n      // this makes packing much simpler since projen standrardizes it.\n      // for now it will suffice, re-evaluate if a use-case arrises.\n      throw new Error('Only projen managed repositories are supported at this time');\n    }\n\n    if (!isYarn) {\n      // the projen version we use has to match the one in the repo since otherwise\n      // synthesis may result in a diff. so we use and enforce a yarn.lock file\n      // to make it simpler and not have to worry about other package managers.\n      // for now it will suffice, re-evaluate if a use-case arrises.\n      throw new Error('Only yarn managed repositories are supported at this time');\n    }\n\n    this.isJsii = !!this.manifest.jsii;\n  }\n\n  /**\n   * Pack the repository to produce the artifacts.\n   */\n  public pack(command: string): Artifact[] {\n\n    const installCommand = 'yarn install --frozen-lockfile';\n    console.log(`Installing | ${installCommand}`);\n    this._shell(installCommand);\n\n    const dist = this.isJsii ? this.manifest.jsii.outdir ?? 'dist' : 'dist';\n    const outdir = path.join(this.repoDir, dist);\n\n    console.log(`Packing | ${command}`);\n\n    // crapy: https://github.com/projen/projen/pull/1631\n    this._shell(this.isJsii ? `unset CI && ${command}` : command);\n\n    const artifacts: Artifact[] = [];\n    for (const lang of fs.readdirSync(outdir)) {\n      const langDir = path.join(outdir, lang);\n      if (!fs.lstatSync(langDir).isDirectory()) {\n        // dist folder may contain files such as changelog.md\n        // so we ignore these\n        continue;\n      }\n      artifacts.push({ lang, directory: path.join(outdir, lang) });\n    }\n\n    return artifacts;\n\n  }\n\n  private _shell(command: string) {\n    execSync(command, { cwd: this.repoDir, stdio: ['ignore', 'inherit', 'inherit'] });\n  }\n\n}\n\nfunction findLatestTag(repoDir: string, prefix?: string) {\n  const tags = execSync(`git tag -l --sort=-creatordate \"${prefix ?? ''}*\"`, { cwd: repoDir }).toString();\n  return tags.split(os.EOL)[0].trim();\n}\n"
  },
  {
    "path": "lib/package-integrity/handler/validate.sh",
    "content": "#!/bin/bash\n\nset -euo pipefail\n\nnode ${SCRIPT_DIR}/validate.bundle.js"
  },
  {
    "path": "lib/package-integrity/handler/validate.ts",
    "content": "#!/usr/bin/env node\nimport { RepositoryIntegrity } from './integrity';\nimport { Repository } from './repository';\n\nfunction requiredEnv(name: string): string {\n  const value = process.env[name];\n  if (value) return value;\n  throw new Error(`${name} env variable is required`);\n}\n\nfunction optionalEnv(name: string, defaultValue?: string) {\n  return process.env[name] ?? defaultValue;\n}\n\nconst GITHUB_REPOSITORY = requiredEnv('GITHUB_REPOSITORY');\nconst GITHUB_TOKEN_ARN = optionalEnv('GITHUB_TOKEN_ARN');\nconst TAG_PREFIX = optionalEnv('TAG_PREFIX');\nconst PACK_TASK = optionalEnv('PACK_TASK');\n\nasync function main() {\n\n  const repo = await Repository.fromGitHub({\n    githubTokenSecretArn: GITHUB_TOKEN_ARN,\n    slug: GITHUB_REPOSITORY,\n    tagPrefix: TAG_PREFIX,\n  });\n\n  const integrity = new RepositoryIntegrity({\n    repository: repo,\n    packCommand: PACK_TASK,\n  });\n\n  await integrity.validate();\n\n}\n\nmain()\n  .catch(e => {\n    console.log(`Error: ${e.message}`);\n    process.exitCode = 1;\n  });\n"
  },
  {
    "path": "lib/package-integrity/index.ts",
    "content": "export * from './integrity';"
  },
  {
    "path": "lib/package-integrity/integrity.ts",
    "content": "import * as path from 'path';\nimport {\n  aws_cloudwatch as cloudwatch,\n  aws_events as events,\n  aws_events_targets as targets,\n  aws_secretsmanager as sm,\n  Duration,\n} from 'aws-cdk-lib';\nimport { Construct } from 'constructs';\nimport { Shellable, ShellPlatform } from '../shellable';\n\n/**\n * Properties for `PackageIntegrityValidation`.\n */\nexport interface PackageIntegrityValidationProps {\n\n  /**\n   * The repository slug of the package (i.e cdklabs/jsii-docgen)\n   */\n  readonly repository: string;\n\n  /**\n   * Secret containing a github token.\n   */\n  readonly githubTokenSecret?: sm.ISecret;\n\n  /**\n   * The build platform to use. This platform should contain all necessary tools to package the artifacts\n   * in the repository. Note that by default, this also means running the tests.\n   *\n   * @default ShellPlatform.LinuxUbuntu\n   */\n  readonly buildPlatform?: ShellPlatform;\n\n  /**\n   * How often to run the validation.\n   *\n   * @default - once a day.\n   */\n  readonly rate?: Duration;\n\n  /**\n   * How many consecutive failures should cause the monitor to go into alarm.\n   *\n   * @default 3\n   */\n  readonly consecutiveFailuresToAlarm?: number;\n\n  /**\n   * Wether or not the environment should be privileged, necessary to run container images.\n   *\n   * @default false\n   */\n  readonly privileged?: boolean;\n\n  /**\n   * Tag prefix for this specific validation. Only needed for repositories that either release\n   * multiple packages or multiple major versions.\n   *\n   * @default - no prefix\n   */\n  readonly tagPrefix?: string;\n\n  /**\n   * The projen task that produces the local artifacts.\n   *\n   * @default 'release'\n   */\n  readonly packTask?: string;\n\n  /**\n   * Additional environment variables to set.\n   *\n   * @default - No additional environment variables\n   */\n  readonly environment?: { [key: string]: string | undefined };\n\n  /**\n   * Environment variables with secrets manager values. The values must be complete Secret Manager ARNs.\n   *\n   * @default no additional environment variables\n   */\n  readonly environmentSecrets?: { [key: string]: string };\n\n  /**\n   * Environment variables with SSM parameter values.\n   *\n   * @default no additional environment variables\n   */\n  readonly environmentParameters?: { [key: string]: string };\n}\n\n/**\n * Perform periodic integrity checks on published packages based on the\n * source code of the package. Currently supports only GitHub hosted packages.\n *\n * The check is done by downloading the published artifact, building the source code, and comparing the two.\n * If they differ, it means that of the following was compromised:\n *\n * - The publishing platform (for example GitHub runners)\n * - The artifact storage (for example npmjs.com)\n */\nexport class PackageIntegrityValidation extends Construct {\n\n  /**\n   * The alarm that will trigger if the validation fails.\n   */\n  public readonly failureAlarm: cloudwatch.Alarm;\n\n  constructor(scope: Construct, id: string, props: PackageIntegrityValidationProps) {\n    super(scope, id);\n\n    const rate = props.rate ?? Duration.days(1);\n\n    const shellable = new Shellable(this, 'Default', {\n      scriptDirectory: path.join(__dirname, 'handler'),\n      entrypoint: 'validate.sh',\n      privileged: props.privileged ?? false,\n      platform: props.buildPlatform ?? ShellPlatform.LinuxUbuntu,\n      environmentSecrets: props.environmentSecrets,\n      environmentParameters: props.environmentParameters,\n      environment: {\n        ...props.environment,\n        // always override the env vars we have explicit options for\n        GITHUB_REPOSITORY: props.repository,\n        TAG_PREFIX: props.tagPrefix ?? '',\n        GITHUB_TOKEN_ARN: props.githubTokenSecret?.secretArn,\n        PACK_TASK: props.packTask,\n      },\n      alarmPeriod: rate,\n      alarmEvaluationPeriods: props.consecutiveFailuresToAlarm ?? 3,\n    });\n\n    if (props.githubTokenSecret) {\n      const grant = props.githubTokenSecret.grantRead(shellable.role);\n      grant.assertSuccess();\n    }\n\n    new events.Rule(this, 'ScheduledTrigger', {\n      schedule: events.Schedule.rate(rate),\n      targets: [new targets.CodeBuildProject(shellable.project)],\n    });\n\n    this.failureAlarm = shellable.alarm;\n\n  }\n}\n"
  },
  {
    "path": "lib/permissions.ts",
    "content": "import { aws_iam as iam } from 'aws-cdk-lib';\n\n\n/**\n * Describe a Secrets Manager secret external to the CDK app\n */\nexport interface ExternalSecret {\n  /**\n   * The ARN of the AWS Secrets Manager secret.\n   */\n  secretArn: string;\n\n  /**\n   * ARN of the encryption key for this secret.\n   *\n   * (After creation of the project, you must manually grant \"kms:Decrypt\"\n   * permissions on this key to the role created for this CodeBuild project).\n   */\n  keyArn?: string;\n\n  /**\n   * Optional role to be assumed in order to access the secret.\n   * @default None\n   */\n  assumeRoleArn?: string;\n\n  /**\n   * The region where the secret is stored.\n   * @default current region\n   */\n  region?: Region;\n}\n\n// List taken from https://docs.aws.amazon.com/general/latest/gr/rande.html.\nexport type Region =\n  'us-east-1' |\n  'us-east-2' |\n  'us-west-1' |\n  'us-west-2' |\n  'ap-northeast-1' |\n  'ap-northeast-2' |\n  'ap-northeast-3' |\n  'ap-south-1' |\n  'ap-southeast-1' |\n  'ap-southeast-2' |\n  'ca-central-1' |\n  'cn-north-1' |\n  'cn-northwest-1' |\n  'eu-central-1' |\n  'eu-west-1' |\n  'eu-west-2' |\n  'eu-west-3' |\n  'sa-east-1';\n\n/**\n * Give the role permission to read a particular secret and its key.\n */\nexport function grantSecretRead(secret: ExternalSecret, identity: iam.IPrincipal) {\n  identity.addToPrincipalPolicy(new iam.PolicyStatement({\n    resources: [secret.secretArn],\n    actions: ['secretsmanager:ListSecrets', 'secretsmanager:DescribeSecret', 'secretsmanager:GetSecretValue'],\n  }));\n\n  if (secret.keyArn) {\n    identity.addToPrincipalPolicy(new iam.PolicyStatement({\n      resources: [secret.keyArn],\n      actions: ['kms:Decrypt'],\n    }));\n  }\n}\n\n/**\n * Give the role permission to assume another role.\n */\nexport function grantAssumeRole(roleToAssumeArn: string, identity: iam.IPrincipal) {\n  identity.addToPrincipalPolicy(new iam.PolicyStatement({\n    resources: [roleToAssumeArn],\n    actions: ['sts:AssumeRole'],\n  }));\n}\n"
  },
  {
    "path": "lib/pipeline-notifications/chime.ts",
    "content": "import * as crypto from 'crypto';\nimport { ChimeNotifier, ChimeNotifierOptions, IPipelineNotification, PipelineNotificationBindOptions } from '../';\n\n/**\n * Properties to initialize ChimeNotification\n */\nexport interface ChimeNotificationProps extends ChimeNotifierOptions {\n}\n\n/**\n * Notify events on pipeline to a Chime room.\n */\nexport class ChimeNotification implements IPipelineNotification {\n  constructor(private readonly props: ChimeNotificationProps) {\n  }\n\n  public bind(options: PipelineNotificationBindOptions): void {\n    const md5 = crypto.createHash('md5');\n    md5.update(JSON.stringify(this.props.webhookUrls));\n    new ChimeNotifier(options.pipeline, `PipelineNotificationChime-${md5.digest('hex')}`, {\n      ...this.props,\n      pipeline: options.pipeline.pipeline,\n    });\n  }\n}"
  },
  {
    "path": "lib/pipeline-notifications/index.ts",
    "content": "import { ChimeNotification, ChimeNotificationProps } from './chime';\nimport { SlackNotification, SlackNotificationProps } from './slack';\nimport { IPipelineNotification } from '../pipeline';\n\nexport class PipelineNotification {\n  public static slack(props: SlackNotificationProps): IPipelineNotification {\n    return new SlackNotification(props);\n  }\n\n  public static chime(props: ChimeNotificationProps): IPipelineNotification {\n    return new ChimeNotification(props);\n  }\n}\n\nexport * from './chime';\nexport * from './slack';"
  },
  {
    "path": "lib/pipeline-notifications/slack.ts",
    "content": "import * as crypto from 'crypto';\nimport {\n  aws_chatbot as chatbot,\n  aws_codestarnotifications as starnotifs,\n  Stack,\n} from 'aws-cdk-lib';\nimport { IPipelineNotification, PipelineNotificationBindOptions } from '../';\n\n/**\n * Properties to initialize SlackNotification\n */\nexport interface SlackNotificationProps {\n  /**\n   * The list of Chatbot registered slack channels.\n   */\n  readonly channels: chatbot.SlackChannelConfiguration[];\n\n  /**\n   * The level of details to be included in the notification\n   * @default SlackNotificationDetailLevel.BASIC\n   */\n  readonly detailLevel?: SlackNotificationDetailLevel;\n}\n\n/**\n * The level of details to be included in a slack notification.\n */\nexport enum SlackNotificationDetailLevel {\n  /**\n   * Basic event details without the contents of the error message.\n   */\n  BASIC = 'BASIC',\n  /**\n   * Information included in BASIC, plus the contents of the error message.\n   */\n  FULL = 'FULL',\n}\n\n/**\n * Notify events on pipeline to a Slack channel via AWS Chatbot\n */\nexport class SlackNotification implements IPipelineNotification {\n  constructor(private readonly props: SlackNotificationProps) {\n    if (this.props.channels.length == 0) {\n      throw new Error('channels cannot be empty');\n    }\n  }\n\n  public bind(options: PipelineNotificationBindOptions): void {\n    const targets: starnotifs.CfnNotificationRule.TargetProperty[] = this.props.channels.map(c => {\n      return {\n        targetAddress: c.slackChannelConfigurationArn,\n        targetType: 'AWSChatbotSlack',\n      };\n    });\n    const hash = crypto.createHash('md5')\n      .update(JSON.stringify(\n        // Resolving the value so tokens don't cause flaky outputs\n        Stack.of(options.pipeline).resolve(targets)),\n      )\n      .digest('hex');\n    new starnotifs.CfnNotificationRule(options.pipeline, `PipelineNotificationSlack-${hash}`, {\n      name: `${options.pipeline.pipeline.pipelineName}-${hash}`,\n      detailType: this.props.detailLevel ?? SlackNotificationDetailLevel.BASIC,\n      resource: options.pipeline.pipeline.pipelineArn,\n      targets,\n      eventTypeIds: ['codepipeline-pipeline-action-execution-failed'],\n    });\n  }\n}\n"
  },
  {
    "path": "lib/pipeline-watcher/handler/watcher-handler.ts",
    "content": "// eslint-disable-next-line import/no-extraneous-dependencies\n\n\n// eslint-disable-next-line import/no-extraneous-dependencies\nimport { CloudWatch, Dimension, PutMetricDataCommandInput } from '@aws-sdk/client-cloudwatch';\n\n// Partial type for the 'detail' section of an event from Amazon EventBridge for 'CodePipeline Execution State Change'\n// See https://docs.aws.amazon.com/eventbridge/latest/userguide/event-types.html#codepipeline-event-type\nexport interface ExecutionStateChangeEvent {\n  readonly pipeline: string;\n  readonly state: 'STARTED' | 'CANCELED' | 'FAILED' | 'SUCCEEDED';\n}\n\n// Partial type for the 'detail' section of an event from Amazon EventBridge for 'CodePipeline Action Execution State Change'\n// See https://docs.aws.amazon.com/eventbridge/latest/userguide/event-types.html#codepipeline-event-type\nexport interface ActionStateChangeEvent extends ExecutionStateChangeEvent {\n  readonly action: string;\n}\n\nexport type LambdaExecutionStateChangeEvent = AWSLambda.EventBridgeEvent<'CodePipeline Pipeline Execution State Change', ExecutionStateChangeEvent>;\nexport type LambdaActionStateChangeEvent = AWSLambda.EventBridgeEvent<'CodePipeline Action Execution State Change', ActionStateChangeEvent>;\nexport type EventType = LambdaExecutionStateChangeEvent | LambdaActionStateChangeEvent;\n\n// export for tests\nexport const cloudwatch = new CloudWatch();\nconst logger = {\n  log: (line: string) => process.stdout.write(line),\n};\n\n/**\n * Lambda function that reacts to an Amazon EventBridge event triggered by a 'CodePipeline Action Execution State Change'.\n * The handler reads the event and sends off metrics to CloudWatch.\n */\nexport async function handler(event: EventType) {\n  logger.log(`Received event: ${JSON.stringify(event)}`);\n\n  switch (event['detail-type']) {\n    case 'CodePipeline Pipeline Execution State Change': await handleExecutionChange(event); break;\n    case 'CodePipeline Action Execution State Change': await handleActionChange(event); break;\n    default: throw new Error(`Unhandled detail type ${event['detaill-type']}`);\n  }\n}\n\nasync function handleExecutionChange(event: LambdaExecutionStateChangeEvent) {\n  const pipelineName = event.detail.pipeline;\n  const state = event.detail.state;\n\n  let value: number;\n  switch (state) {\n    case 'FAILED': value = 1; break;\n    case 'SUCCEEDED': value = 0; break;\n    default: throw new Error(`Unsupported state: ${state}. Only FAILED and SUCCEEDED states are supported. ` +\n    'Others must be filtered out prior to this function.');\n  }\n\n  await putMetric(event, value, [\n    { Name: 'Pipeline', Value: pipelineName },\n  ]);\n\n  logger.log('Done');\n}\n\n\nasync function handleActionChange(event: LambdaActionStateChangeEvent) {\n  const pipelineName = event.detail.pipeline;\n  const action = event.detail.action;\n  const state = event.detail.state;\n\n  let value: number;\n  switch (state) {\n    case 'FAILED': value = 1; break;\n    case 'SUCCEEDED': value = 0; break;\n    default: throw new Error(`Unsupported state: ${state}. Only FAILED and SUCCEEDED states are supported. ` +\n    'Others must be filtered out prior to this function.');\n  }\n\n  await putMetric(event, value, [\n    { Name: 'Pipeline', Value: pipelineName },\n    { Name: 'Action', Value: action },\n  ]);\n\n  logger.log('Done');\n}\n\nasync function putMetric(event: EventType, value: number, dimensions: Array<Dimension>) {\n  const metricNamespace = process.env.METRIC_NAMESPACE;\n  const metricName = process.env.METRIC_NAME;\n  const time = new Date(event.time);\n\n  if (!metricNamespace || !metricName) {\n    throw new Error('Both METRIC_NAMESPACE and METRIC_NAME environment variables must be set.');\n  }\n\n  const input: PutMetricDataCommandInput = {\n    Namespace: metricNamespace,\n    MetricData: [\n      {\n        MetricName: metricName,\n        Value: value,\n        Dimensions: dimensions,\n        Timestamp: time,\n      },\n    ],\n  };\n\n  logger.log(`Calling PutMetricData with payload: ${JSON.stringify(input)}`);\n\n  await cloudwatch.putMetricData(input);\n}"
  },
  {
    "path": "lib/pipeline-watcher/index.ts",
    "content": "export * from './watcher';"
  },
  {
    "path": "lib/pipeline-watcher/watcher.ts",
    "content": "import * as path from 'path';\nimport {\n  aws_cloudwatch as cloudwatch,\n  aws_codepipeline as cpipeline,\n  aws_events as events,\n  aws_events_targets as events_targets,\n  aws_iam as iam,\n  aws_lambda as lambda,\n} from 'aws-cdk-lib';\nimport { Construct } from 'constructs';\n\nexport interface PipelineWatcherProps {\n  /**\n   * The CloudWatch metric namespace to which metrics should be sent\n   */\n  metricNamespace: string;\n\n  /**\n   * The CloudWatch metric name for failures.\n   */\n  failureMetricName: string;\n\n  /**\n   * Code Pipeline to monitor for failed stages\n   */\n  pipeline: cpipeline.IPipeline;\n\n  /**\n   * Set the pipelineName of the alarm description.\n   *\n   * Description is set to 'Pipeline <title> has failed stages'\n   *\n   * @default pipeline's name\n   */\n  title?: string;\n}\n\n/**\n * Construct which watches a Code Pipeline for failed stages and raises an alarm\n * if there are any failed stages.\n *\n * A function runs every minute and calls GetPipelineState for the provided pipeline's\n * name, counts the number of failed stages and emits a JSON log { failedCount: <number> }.\n * A metric filter is then configured to track this value as a CloudWatch metric, and\n * a corresponding alarm is set to fire when the maximim value of a single 5-minute interval\n * is >= 1.\n */\nexport class PipelineWatcher extends Construct {\n  public readonly alarm: cloudwatch.Alarm;\n\n  constructor(parent: Construct, name: string, props: PipelineWatcherProps) {\n    super(parent, name);\n\n    const pipelineWatcher = new lambda.Function(this, 'Poller', {\n      handler: 'watcher-handler.handler',\n      runtime: lambda.Runtime.NODEJS_20_X,\n      code: lambda.Code.fromAsset(path.join(__dirname, 'handler')),\n      environment: {\n        METRIC_NAMESPACE: props.metricNamespace,\n        METRIC_NAME: props.failureMetricName,\n      },\n    });\n\n    pipelineWatcher.addToRolePolicy(new iam.PolicyStatement({\n      resources: ['*'],\n      actions: ['cloudwatch:PutMetricData'],\n      conditions: {\n        StringEquals: {\n          'cloudwatch:namespace': props.metricNamespace,\n        },\n      },\n    }));\n\n    new events.Rule(this, 'Trigger', {\n      eventPattern: {\n        source: ['aws.codepipeline'],\n        resources: [props.pipeline.pipelineArn],\n        detailType: [\n          'CodePipeline Action Execution State Change',\n          'CodePipeline Pipeline Execution State Change',\n        ],\n        detail: {\n          state: ['FAILED', 'SUCCEEDED'],\n        },\n      },\n      targets: [new events_targets.LambdaFunction(pipelineWatcher)],\n    });\n\n    this.alarm = new cloudwatch.Alarm(this, 'Alarm', {\n      alarmDescription: `Pipeline ${props.title || props.pipeline.pipelineName} has failed stages`,\n      metric: new cloudwatch.Metric({\n        metricName: props.failureMetricName,\n        namespace: props.metricNamespace,\n        statistic: cloudwatch.Statistic.MAXIMUM,\n        dimensionsMap: {\n          Pipeline: props.pipeline.pipelineName,\n        },\n      }),\n      threshold: 1,\n      comparisonOperator: cloudwatch.ComparisonOperator.GREATER_THAN_OR_EQUAL_TO_THRESHOLD,\n      evaluationPeriods: 1,\n      // IGNORE missing data, so the alarm stays in its current state, until the next data point.\n      treatMissingData: cloudwatch.TreatMissingData.IGNORE,\n    });\n  }\n}\n"
  },
  {
    "path": "lib/pipeline.ts",
    "content": "import {\n  Duration,\n  aws_cloudwatch as cloudwatch,\n  aws_codebuild as cbuild,\n  aws_codepipeline as cpipeline,\n  aws_codepipeline_actions as cpipeline_actions,\n  aws_events as events,\n  aws_events_targets as events_targets,\n  aws_iam as iam, aws_s3 as s3,\n  aws_sns as sns,\n  aws_sns_subscriptions as sns_subs,\n} from 'aws-cdk-lib';\nimport { Construct, IConstruct } from 'constructs';\n\nimport { AutoBuild, AutoBuildOptions } from './auto-build';\nimport { createBuildEnvironment } from './build-env';\nimport { Canary, CanaryProps } from './canary';\nimport { ChangeController } from './change-controller';\nimport { ChimeNotifier } from './chime-notifier';\nimport { PipelineWatcher } from './pipeline-watcher';\nimport * as publishing from './publishing';\nimport { AutoBump, AutoMergeBack, AutoBumpProps } from './pull-request';\nimport { AutoMergeBackPipelineOptions } from './pull-request/merge-back';\nimport { IRepo, WritableGitHubRepo } from './repo';\nimport { Shellable, ShellableProps } from './shellable';\nimport * as signing from './signing';\nimport { determineRunOrder, flatMap } from './util';\n\nconst PUBLISH_STAGE_NAME = 'Publish';\nconst SIGNING_STAGE_NAME = 'Sign';\nconst TEST_STAGE_NAME = 'Test';\nconst METRIC_NAMESPACE = 'CDK/Delivlib';\nconst FAILURE_METRIC_NAME = 'Failures';\n\nexport interface PipelineProps {\n  /**\n   * The source repository to build (e.g. GitHubRepo).\n   */\n  readonly repo: IRepo;\n\n  /**\n   * A display name for this pipeline.\n   */\n  readonly title?: string;\n\n  /**\n   * A physical name for this pipeline.\n   * @default - a new name will be generated.\n   */\n  readonly pipelineName?: string;\n\n  /**\n   * Branch to build.\n   * @default master\n   */\n  readonly branch?: string;\n\n  /**\n   * Email to send failure notifications.\n   * @default - No email notifications\n   */\n  readonly notificationEmail?: string;\n\n  /**\n   * The image used for the builds.\n   *\n   * @default jsii/superchain (see docs)\n   */\n  readonly buildImage?: cbuild.IBuildImage;\n\n  /**\n   * The name of the CodeBuild project that will be part of this pipeline.\n   * @default - `${pipelineName}-Build`, if `pipelineName` property is specified; automatically generated, otherwise.\n   */\n  readonly buildProjectName?: string;\n\n  /**\n   * The type of compute to use for this build.\n   * See the {@link ComputeType} enum for the possible values.\n   *\n   * @default taken from {@link #buildImage#defaultComputeType}\n   */\n  readonly computeType?: cbuild.ComputeType;\n\n  /**\n   * Indicates how the project builds Docker images. Specify true to enable\n   * running the Docker daemon inside a Docker container. This value must be\n   * set to true only if this build project will be used to build Docker\n   * images, and the specified build environment image is not one provided by\n   * AWS CodeBuild with Docker support. Otherwise, all associated builds that\n   * attempt to interact with the Docker daemon will fail.\n   *\n   * @default false\n   */\n  readonly privileged?: boolean;\n\n  /**\n   * Environment variables to pass to build\n   */\n  readonly environment?: { [key: string]: string };\n\n  /**\n   * Optional buildspec, as an alternative to a buildspec.yml file\n   */\n  readonly buildSpec?: cbuild.BuildSpec;\n\n  /**\n   * Indicates whether to re-run the pipeline after you've updated it.\n   * @default true\n   */\n  readonly restartExecutionOnUpdate?: boolean;\n\n  /**\n   * Indicates the concurrency limit test and publish stages.\n   *\n   * For example, if this value is 2, then only two actions will execute concurrently.\n   * If this value is 1, the pipeline will not have any concurrent execution.\n   *\n   * @default - no limit\n   */\n  readonly concurrency?: number;\n\n  /**\n   * Set the default dryRun for all publishing steps\n   *\n   * (Can still be changed when adding a step).\n   *\n   * @default false\n   */\n  readonly dryRun?: boolean;\n\n  /**\n   * Automatically build commits that are pushed to this repository, including PR builds on github.\n   *\n   * @default false\n   */\n  readonly autoBuild?: boolean;\n\n  /**\n   * Options for auto-build\n   *\n   * @default - 'autoBuildOptions.publicLogs' will be set to its default. 'autoBuildOptions.buildspec' will be configured to match with the\n   * 'buildSpec' property.\n   */\n  readonly autoBuildOptions?: AutoBuildOptions;\n\n  /**\n   * Post a notification to the given Chime webhooks if the pipeline fails\n   * @default - no Chime notifications on pipeline failure\n   * @deprecated - use `notifyOnFailure()` instead in combination with `PipelineNotification.chime()`.\n   */\n  readonly chimeFailureWebhooks?: string[];\n\n  /**\n   * The Chime message to post\n   *\n   * @default - A default message\n   */\n  readonly chimeMessage?: string;\n\n  /**\n   * Build timeout\n   *\n   * How long the build can take at maximum (before failing with an error).\n   *\n   * @default - Duration.hours(8)\n   */\n  readonly buildTimeout?: Duration;\n}\n\nexport interface PipelineNotificationBindOptions {\n  readonly pipeline: Pipeline;\n}\n\nexport interface IPipelineNotification {\n  bind(pipeline: PipelineNotificationBindOptions): void;\n}\n\n/**\n * Options for configuring an auto bump for this pipeline.\n */\nexport interface AutoBumpOptions extends Omit<AutoBumpProps, 'repo'> {\n}\n\n/**\n * Defines a delivlib CI/CD pipeline.\n */\nexport class Pipeline extends Construct {\n  public buildRole?: iam.IRole;\n  public readonly failureAlarm: cloudwatch.Alarm;\n  public readonly buildOutput: cpipeline.Artifact;\n  public readonly sourceArtifact: cpipeline.Artifact;\n\n  /**\n   * The primary CodeBuild project of this pipeline.\n   */\n  public readonly buildProject: cbuild.IProject;\n\n  /**\n   * The auto build project. undefined if 'autoBuild' is disabled for this pipeline.\n   */\n  public readonly autoBuildProject?: cbuild.Project;\n\n  /*\n   * The underlying CodePipeline Pipeline object that models this pipeline.\n   */\n  public readonly pipeline: cpipeline.Pipeline;\n  private readonly branch: string;\n  private readonly notify?: sns.Topic;\n  private defaultArtifact: cpipeline.Artifact;\n  private stages: { [name: string]: cpipeline.IStage } = { };\n  private _signingOutput?: cpipeline.Artifact;\n\n  private readonly concurrency?: number;\n  private readonly repo: IRepo;\n  private readonly dryRun: boolean;\n  private readonly buildEnvironment: cbuild.BuildEnvironment;\n  private readonly buildSpec?: cbuild.BuildSpec;\n  private firstPublishStageName?: string;\n  private readonly descrPipelineName: string;\n\n  constructor(parent: Construct, name: string, props: PipelineProps) {\n    super(parent, name);\n\n    this.concurrency = props.concurrency;\n    this.repo = props.repo;\n    this.dryRun = !!props.dryRun;\n\n    this.pipeline = new cpipeline.Pipeline(this, 'BuildPipeline', {\n      pipelineName: props.pipelineName,\n      restartExecutionOnUpdate: props.restartExecutionOnUpdate === undefined ? true : props.restartExecutionOnUpdate,\n    });\n    // We will use the pipeline name if given, but we can't use the Ref if not given\n    // because that would create cyclic references. Fall back to construct path if anonymous.\n    this.descrPipelineName = props.pipelineName ?? this.node.path;\n\n    this.branch = props.branch || 'master';\n    this.sourceArtifact = props.repo.createSourceStage(this.pipeline, this.branch);\n\n    this.buildEnvironment = createBuildEnvironment(props);\n    this.buildSpec = props.buildSpec;\n\n    let buildProjectName = props.buildProjectName;\n    if (buildProjectName === undefined && props.pipelineName !== undefined) {\n      buildProjectName = `${props.pipelineName}-Build`;\n    }\n    this.buildProject = new cbuild.PipelineProject(this, 'BuildProject', {\n      description: `Pipeline ${this.descrPipelineName}: build step`,\n      projectName: buildProjectName,\n      environment: this.buildEnvironment,\n      buildSpec: this.buildSpec,\n      timeout: props.buildTimeout ?? Duration.hours(8),\n      ssmSessionPermissions: true,\n    });\n\n    this.buildRole = this.buildProject.role;\n    this.buildRole!.addManagedPolicy(iam.ManagedPolicy.fromAwsManagedPolicyName('AmazonElasticContainerRegistryPublicReadOnly'));\n\n    const buildStage = this.getOrCreateStage('Build');\n    const buildOutput = new cpipeline.Artifact();\n    buildStage.addAction(new cpipeline_actions.CodeBuildAction({\n      actionName: 'Build',\n      project: this.buildProject,\n      input: this.sourceArtifact,\n      outputs: [buildOutput],\n    }));\n    this.buildOutput = buildOutput;\n    this.defaultArtifact = buildOutput;\n\n    if (props.notificationEmail) {\n      this.notify = new sns.Topic(this, 'NotificationsTopic');\n      this.notify.addSubscription(new sns_subs.EmailSubscription(props.notificationEmail));\n    }\n\n    // add a failure alarm for the entire pipeline.\n    this.failureAlarm = this.addFailureAlarm(props.title);\n\n    // emit an SNS notification every time build fails.\n    this.addBuildFailureNotification(this.buildProject, `${props.title} build failed`);\n\n    // Also emit to Chime webhooks if configured\n    if (props.chimeFailureWebhooks) {\n      new ChimeNotifier(this, 'ChimeNotifier', {\n        pipeline: this.pipeline,\n        message: props.chimeMessage,\n        webhookUrls: props.chimeFailureWebhooks,\n      });\n    }\n\n    if (props.autoBuild) {\n      this.autoBuildProject = this.autoBuild(props.autoBuildOptions).project;\n    }\n  }\n\n  /**\n   * Signing output artifact\n   */\n  public get signingOutput() {\n    return this._signingOutput;\n  }\n\n  public notifyOnFailure(notification: IPipelineNotification) {\n    notification.bind({\n      pipeline: this,\n    });\n  }\n\n  /**\n   * Add an action to run a shell script to the pipeline\n   *\n   * @return The Shellable and the Action added to the pipeline.\n   */\n  public addShellable(\n    stageName: string,\n    id: string,\n    options: AddShellableOptions,\n  ): { shellable: Shellable; action: cpipeline_actions.CodeBuildAction } {\n    const stage = this.getOrCreateStage(stageName);\n\n    const sh = new Shellable(this, id, options);\n    const action = sh.addToPipeline(\n      stage,\n      options.actionName || `Action${id}`,\n      options.inputArtifact || this.defaultArtifact,\n      this.determineRunOrderForNewAction(stage));\n\n    if (options.failureNotification) {\n      this.addBuildFailureNotification(sh.project, options.failureNotification);\n    }\n\n    return { shellable: sh, action };\n  }\n\n  public addTest(id: string, props: ShellableProps): { shellable: Shellable; action: cpipeline_actions.CodeBuildAction } {\n    return this.addShellable(TEST_STAGE_NAME, id, {\n      actionName: `Test${id}`,\n      failureNotification: `Test ${id} failed`,\n      ...props,\n    });\n  }\n\n  /**\n   * Convenience/discovery method that defines a canary test in your account.\n   * @param id the construct id\n   * @param props canary options\n   */\n  public addCanary(id: string, props: CanaryProps) {\n    return new Canary(this, `Canary${id}`, props);\n  }\n\n  public addPublish(publisher: IPublisher, options: AddPublishOptions = {}) {\n    const publishStageName = options.stageName ?? PUBLISH_STAGE_NAME;\n    if (!this.firstPublishStageName) {\n      this.firstPublishStageName = publishStageName;\n    }\n    const stage = this.getOrCreateStage(publishStageName);\n\n    publisher.addToPipeline(stage, `${publisher.node.id}Publish`, {\n      inputArtifact: options.inputArtifact || this.defaultArtifact,\n      runOrder: this.determineRunOrderForNewAction(stage),\n    });\n  }\n\n  /**\n   * Adds a change control policy to block transitions into the publish stage during certain time windows.\n   * @param options the options to configure the change control policy.\n   */\n  public addChangeControl(options: AddChangeControlOptions = { }): ChangeController {\n    const publishStage = this.getStage(this.firstPublishStageName ?? PUBLISH_STAGE_NAME);\n    if (!publishStage) {\n      throw new Error(`This pipeline does not have a ${PUBLISH_STAGE_NAME} stage yet. Add one first.`);\n    }\n\n    return new ChangeController(this, 'ChangeController', {\n      ...options,\n      pipelineStage: publishStage,\n    });\n  }\n\n  public addSigning(signer: signing.ISigner, options: signing.AddSigningOptions = {}) {\n    const signingStageName = options.stageName ?? SIGNING_STAGE_NAME;\n    const stage = this.getOrCreateStage(signingStageName);\n\n    this._signingOutput = signer.addToPipeline(stage, `${signer.node.id}Sign`, {\n      inputArtifact: options.inputArtifact || this.defaultArtifact,\n      runOrder: this.determineRunOrderForNewAction(stage),\n    });\n    this.defaultArtifact = this._signingOutput;\n  }\n\n  public signNuGetWithSigner(options: signing.SignNuGetWithSignerProps & signing.AddSigningOptions) {\n    this.addSigning(new signing.SignNuGetWithSigner(this, 'NuGetSigning', {\n      ...options,\n    }), options);\n  }\n\n  public publishToNpm(options: publishing.PublishToNpmProjectProps & AddPublishOptions) {\n    this.addPublish(new publishing.PublishToNpmProject(this, 'Npm', {\n      description: options.description ?? `Pipeline ${this.descrPipelineName}: publish to NPM`,\n      dryRun: this.dryRun,\n      ...options,\n    }), options);\n  }\n\n  public publishToMaven(options: publishing.PublishToMavenProjectProps & AddPublishOptions) {\n    this.addPublish(new publishing.PublishToMavenProject(this, 'Maven', {\n      description: options.description ?? `Pipeline ${this.descrPipelineName}: publish to Maven`,\n      dryRun: this.dryRun,\n      ...options,\n    }), options);\n  }\n\n  public publishToNuGet(options: publishing.PublishToNuGetProjectProps & AddPublishOptions) {\n    this.addPublish(new publishing.PublishToNuGetProject(this, 'NuGet', {\n      description: options.description ?? `Pipeline ${this.descrPipelineName}: publish to NuGet`,\n      dryRun: this.dryRun,\n      ...options,\n    }), options);\n  }\n\n  public publishToGitHubPages(options: publishing.PublishDocsToGitHubProjectProps & AddPublishOptions) {\n    this.addPublish(new publishing.PublishDocsToGitHubProject(this, 'GitHubPages', {\n      description: options.description ?? `Pipeline ${this.descrPipelineName}: publish to GitHub Pages`,\n      dryRun: this.dryRun,\n      ...options,\n    }), options);\n  }\n\n  public publishToGitHub(options: publishing.PublishToGitHubProps & AddPublishOptions) {\n    this.addPublish(new publishing.PublishToGitHub(this, 'GitHub', {\n      description: options.description ?? `Pipeline ${this.descrPipelineName}: publish to GitHub`,\n      dryRun: this.dryRun,\n      ...options,\n    }), options);\n  }\n\n  public publishToPyPI(options: publishing.PublishToPyPiProps & AddPublishOptions) {\n    this.addPublish(new publishing.PublishToPyPi(this, 'PyPI', {\n      description: options.description ?? `Pipeline ${this.descrPipelineName}: publish to PyPI`,\n      dryRun: this.dryRun,\n      ...options,\n    }), options);\n  }\n\n  public publishToS3(id: string, options: publishing.PublishToS3Props & AddPublishOptions) {\n    this.addPublish(new publishing.PublishToS3(this, id, {\n      description: options.description ?? `Pipeline ${this.descrPipelineName}: publish to S3 (${options.bucket.bucketName})`,\n      dryRun: this.dryRun,\n      ...options,\n    }), options);\n  }\n\n  /**\n   * Publish Golang code from `go` directory in build artifact to a GitHub repository.\n   */\n  public publishToGolang(options: publishing.PublishToGolangProps) {\n    this.addPublish(new publishing.PublishToGolang(this, 'Golang', {\n      description: options.description ?? `Pipeline ${this.descrPipelineName}: publish Golang`,\n      dryRun: this.dryRun,\n      ...options,\n    }));\n  }\n\n  /**\n   * Enables automatic bumps for the source repo.\n   * @param options Options for auto bump (see AutoBumpOptions for description of defaults)\n   */\n  public autoBump(options?: AutoBumpOptions): AutoBump {\n    if (!WritableGitHubRepo.isWritableGitHubRepo(this.repo)) {\n      throw new Error('\"repo\" must be a WritableGitHubRepo in order to enable auto-bump');\n    }\n\n    const autoBump = new AutoBump(this, 'AutoBump', {\n      repo: this.repo,\n      ...options,\n    });\n\n    return autoBump;\n  }\n\n  /**\n   * Enables automatic merge backs for the source repo.\n   * @param options Options for auto bump (see AutoMergeBackPipelineOptions for description of defaults)\n   */\n  public autoMergeBack(options?: AutoMergeBackPipelineOptions) {\n    if (!WritableGitHubRepo.isWritableGitHubRepo(this.repo)) {\n      throw new Error('\"repo\" must be a WritableGitHubRepo in order to enable auto-merge-back');\n    }\n\n    const mergeBack = new AutoMergeBack(this, 'MergeBack', {\n      repo: this.repo,\n      ...options,\n      projectDescription: options?.projectDescription ?? `Pipeline ${this.descrPipelineName}: merge-back step`,\n    });\n\n    if (options?.stage) {\n\n      const afterStage = this.getStage(options.stage.after);\n\n      if (!afterStage) {\n        throw new Error(`'options.stage.after' must be configured to an existing stage: ${options.stage.after}`);\n      }\n\n      const stage = this.getOrCreateStage(options.stage.name ?? 'MergeBack', { justAfter: afterStage });\n      stage.addAction(new cpipeline_actions.CodeBuildAction({\n        actionName: 'CreateMergeBackPullRequest',\n        project: mergeBack.pr.project,\n        input: this.sourceArtifact,\n      }));\n    }\n  }\n\n  /**\n   * Enables automatic builds of pull requests in the Github repository and posts the\n   * results back as a comment with a public link to the build logs.\n   */\n  public autoBuild(options: AutoBuildOptions = { }): AutoBuild {\n    return new AutoBuild(this, 'AutoBuild', {\n      environment: this.buildEnvironment,\n      repo: this.repo,\n      buildSpec: options.buildSpec || this.buildSpec,\n      ...options,\n    });\n  }\n\n  /**\n   * The metric that tracks pipeline failures.\n   */\n  public metricFailures(options: cloudwatch.MetricOptions): cloudwatch.Metric {\n    return new cloudwatch.Metric({\n      namespace: METRIC_NAMESPACE,\n      metricName: FAILURE_METRIC_NAME,\n      dimensionsMap: {\n        Pipeline: this.pipeline.pipelineName,\n      },\n      statistic: 'Sum',\n      ...options,\n    });\n  }\n\n  /**\n   * The metrics that track failure of each action within the pipeline.\n   */\n  public metricActionFailures(options: cloudwatch.MetricOptions): cloudwatch.Metric[] {\n    return flatMap(this.pipeline.stages, stage => stage.actions.map(action => {\n      return new cloudwatch.Metric({\n        namespace: METRIC_NAMESPACE,\n        metricName: FAILURE_METRIC_NAME,\n        dimensionsMap: {\n          Pipeline: this.pipeline.pipelineName,\n          Action: action.actionProperties.actionName,\n        },\n        statistic: 'Sum',\n        ...options,\n      });\n    }));\n  }\n\n  public addManualApprovalToStage(stageName: string, props?: cpipeline_actions.ManualApprovalActionProps) {\n    const stage = this.getOrCreateStage(stageName);\n    stage.addAction(new cpipeline_actions.ManualApprovalAction(props ?? {\n      actionName: 'ManualApprovalAction',\n    }));\n  }\n\n  private addFailureAlarm(title?: string): cloudwatch.Alarm {\n    return new PipelineWatcher(this, 'PipelineWatcher', {\n      pipeline: this.pipeline,\n      metricNamespace: METRIC_NAMESPACE,\n      failureMetricName: FAILURE_METRIC_NAME,\n      title,\n    }).alarm;\n  }\n\n  private addBuildFailureNotification(buildProject: cbuild.IProject, message: string) {\n    if (!this.notify) {\n      return;\n    }\n\n    buildProject.onBuildFailed('OnBuildFailed').addTarget(new events_targets.SnsTopic(this.notify, {\n      message: events.RuleTargetInput.fromText(message),\n    }));\n  }\n\n  /**\n   * @returns the stage or undefined if the stage doesn't exist\n   */\n  private getStage(stageName: string): cpipeline.IStage | undefined {\n    return this.stages[stageName];\n  }\n\n  private getOrCreateStage(stageName: string, placement?: cpipeline.StagePlacement): cpipeline.IStage {\n    // otherwise, group all actions so they run concurrently.\n    let stage = this.getStage(stageName);\n    if (!stage) {\n      stage = this.pipeline.addStage({\n        stageName,\n        placement,\n      });\n      this.stages[stageName] = stage;\n    }\n    return stage;\n  }\n\n  private determineRunOrderForNewAction(stage: cpipeline.IStage): number | undefined {\n    return determineRunOrder(stage.actions.length, this.concurrency);\n  }\n}\n\nexport interface IPublisher extends IConstruct {\n  addToPipeline(stage: cpipeline.IStage, id: string, options: AddToPipelineOptions): void;\n}\n\nexport interface AddToPipelineOptions {\n  inputArtifact?: cpipeline.Artifact;\n  runOrder?: number;\n}\n\nexport interface AddChangeControlOptions {\n  /**\n   * The bucket in which the ChangeControl iCal document will be stored.\n   *\n   * @default a new bucket will be provisioned.\n   */\n  changeControlBucket?: s3.IBucket;\n\n  /**\n   * The key in which the iCal fille will be stored.\n   *\n   * @default 'change-control.ical'\n   */\n  changeControlObjectKey?: string;\n\n  /**\n   * Schedule to run the change controller on\n   *\n   * @default rate(15 minutes)\n   */\n  scheduleExpression?: string;\n}\n\nexport interface AddPublishOptions {\n  /**\n   * The input artifact to use\n   *\n   * @default Signing output artifact when a signing stage is added to the\n   * pipeline via `addSigning` or `signNuGetWithSigner`. Otherwise, the default\n   * will be the build output artifact.\n   */\n  inputArtifact?: cpipeline.Artifact;\n\n  /**\n   * Stage name to add publishing job to\n   *\n   * By default, this will be the stage name `'Publish'`, but if you want to\n   * separate out the publishing actions into different stages (in order to\n   * block/unblock them separately for example) you can change this.\n   *\n   * Stages appear in the pipeline in the order they are referenced for\n   * the first time.\n   *\n   * @default \"Publish\"\n   */\n  readonly stageName?: string;\n}\n\nexport interface AddShellableOptions extends ShellableProps {\n  /**\n   * String to use as action name\n   *\n   * @default Id\n   */\n  actionName?: string;\n\n  /**\n   * Message to use as failure notification\n   *\n   * @default No notification\n   */\n  failureNotification?: string;\n\n  /**\n   * The input artifact to use\n   *\n   * @default Signing output artifact when a signing stage is added to the\n   * pipeline via `addSigning` or `signNuGetWithSigner`. Otherwise, the default\n   * will be the build output artifact.\n   */\n  inputArtifact?: cpipeline.Artifact;\n}\n"
  },
  {
    "path": "lib/publishing/docs/publish-docs.sh",
    "content": "#!/bin/bash\nset -euo pipefail\nartifacts=$PWD\n\n###\n# Usage: ./publish-docs.sh\n#\n# Publishes the documentation from the current directory to GitHub Pages\n###\n\nif [[ \"${GITHUB_REPO:-}\" == \"\" ]]; then\n    echo \"GITHUB_REPO variable not set.\" >&2\n    exit 1\nfi\n\nif [[ \"${FOR_REAL:-}\" == \"true\" ]]; then\n    dryrun=\"\"\nelse\n    echo \"=================================================\"\n    echo \"            🏜️ DRY-RUN MODE 🏜️\"\n    echo \"\"\n    echo \"Supply FOR_REAL=true as an environment variable to do actual publishing!\" >&2\n    echo \"=================================================\"\n    dryrun=\"--dry-run\"\nfi\n\nbranch=\"${GITHUB_PAGES_BRANCH:-gh-pages}\"\n\n\n###############\n# PREPARATION #\n###############\n\nread_json_field() {\n    node -e \"process.stdout.write(require('./$1').$2)\"\n}\n\nbuild_manifest=\"${BUILD_MANIFEST:-\"./build.json\"}\"\n\nif [ ! -f \"${build_manifest}\" ]; then\n    echo \"❌ ${build_manifest} file not found. should include 'name' and 'version' (did you set BUILD_MANIFEST?)\"\n    exit 1\nfi\n\nPKG_VERSION=\"$(read_json_field \"${build_manifest}\" version)\"\n\necho \"📖 Cloning branch ${branch} from ${GITHUB_REPO}\"\n\nWORKDIR=$(mktemp -d)\n\nif ! git clone -b ${branch} --depth=1 ${GITHUB_REPO} ${WORKDIR}; then\n    mkdir -p ${WORKDIR}\nfi\n\ncd ${WORKDIR}\n\n# reset history on this branch by recreating the git repo\nrm -fr .git\ngit init\ngit remote add origin ${GITHUB_REPO}\ngit checkout -b ${branch}\n\n# create directory for old versions if doesn't exist yet\nmkdir -p ./versions\n\n# Check if we already have docs published for this version\nif [ -d versions/${PKG_VERSION} ]; then\n    echo \"⚠️ Docs already published for version ${PKG_VERSION}. Skipping\"\n    exit 0\nfi\n\necho \"📖 Publishing new revision\"\nrsync -ar --delete --exclude=/.git --exclude=/versions ${artifacts}/docs/ ./\nrsync -ar --delete ${artifacts}/docs/ ./versions/${PKG_VERSION}/\n\ngit add .\ngit commit --allow-empty -m \"Release ${PKG_VERSION}\"\n\n# force push because we oblitirated the history on this branch\ngit push ${dryrun} --force origin ${branch}\n\necho \"✅ All OK!\"\n"
  },
  {
    "path": "lib/publishing/docs/publish.sh",
    "content": "#!/bin/bash\nset -euo pipefail\necho ----------------------------------------\necho \"Sources:\"\nls\necho ----------------------------------------\n\n# Configure git to successfully push to the repository\naws secretsmanager get-secret-value --secret-id \"${SSH_KEY_SECRET}\" --output=text --query=SecretString > ~/.ssh/id_rsa\nchmod 0600 ~/.ssh/id_rsa\n\ngit config --global user.name \"${COMMIT_USERNAME}\"\ngit config --global user.email \"${COMMIT_EMAIL}\"\n\n# We need rsync for the publish script\necho \"Installing rsync...\"\napt-get update > /dev/null && apt-get install -y rsync\n\n/bin/bash $SCRIPT_DIR/publish-docs.sh\n/bin/bash $SCRIPT_DIR/update-ssm.sh\n"
  },
  {
    "path": "lib/publishing/docs/update-ssm.sh",
    "content": "#!/bin/bash\n# Write the current version and timestamp to SSM, if the current version is new\nset -eu\n\nif [[ \"${SSM_PREFIX:-}\" != \"\" ]]; then\n  if [[ \"${FOR_REAL:-}\" == \"true\" ]]; then\n    dry_aws=\"aws\"\n  else\n    dry_aws=\"echo aws\"\n  fi\n\n  build_manifest=\"${BUILD_MANIFEST:-\"./build.json\"}\"\n  version=\"$(node -p \"require('${build_manifest}').version\")\"\n\n  cur_version=$(aws ssm get-parameter --name \"$SSM_PREFIX/version\" --output text --query 'Parameter.Value' || echo '-missing-')\n\n  if [[ \"$cur_version\" != \"$version\" ]]; then\n    echo \"📖 Writing version and timestamp to $SSM_PREFIX/{version,timestamp}\"\n    $dry_aws ssm put-parameter --name \"$SSM_PREFIX/version\" --type \"String\" --value \"$version\" --overwrite\n    $dry_aws ssm put-parameter --name \"$SSM_PREFIX/timestamp\" --type \"String\" --value \"$(date +%s)\" --overwrite\n  else\n    echo \"⚠️ Version already up-to-date.\"\n  fi\nfi\n"
  },
  {
    "path": "lib/publishing/github/create-release.ts",
    "content": "import { createReadStream, existsSync, promises as fs } from 'fs';\nimport path from 'path';\nimport parseChangelog from 'changelog-parser';\nimport { Octokit } from 'octokit';\n\nif (!process.env.GITHUB_TOKEN) { throw new Error('GITHUB_TOKEN is required'); }\nif (!process.env.GITHUB_REPO) { throw new Error('GITHUB_REPO is required'); }\nif (!process.env.GITHUB_OWNER) { throw new Error('GITHUB_OWNER is required'); }\n\nconst build_manifest = process.env.BUILD_MANIFEST || './build.json';\nconst changelog_file = process.env.CHANGELOG || './CHANGELOG.md';\nconst release_notes_file = process.env.RELEASE_NOTES || './RELEASE_NOTES.md';\n\nconst client = new Octokit({ auth: process.env.GITHUB_TOKEN });\nconst owner = process.env.GITHUB_OWNER;\nconst repo = process.env.GITHUB_REPO;\n\nasync function read_release_notes() {\n  if (!existsSync(release_notes_file)) {\n    return undefined;\n  }\n  return fs.readFile(release_notes_file, { encoding: 'utf8' });\n}\n\nasync function read_changelog(version: string) {\n  if (!existsSync(changelog_file)) {\n    return undefined;\n  }\n\n  const changelog = await parseChangelog(changelog_file);\n\n  const entry = (changelog.versions || []).find((item) => item.version === version);\n  if (!entry) {\n    throw new Error(`No changelog entry found for version ${version} in ${changelog_file}`);\n  }\n\n  return entry.body;\n}\n\nasync function main() {\n  const manifest = JSON.parse(await fs.readFile(path.resolve(process.cwd(), build_manifest), 'utf-8'));\n  const tag_name = `v${manifest.version}`;\n  const commit = manifest.commit || undefined;\n\n  console.log(`Checking if release ${tag_name} already exists...`);\n  let release = (await client.rest.repos.getReleaseByTag({\n    owner, repo, tag: tag_name,\n  }).catch((cause) => {\n    if (cause.status === 404) {\n      return Promise.resolve(undefined);\n    } else {\n      return Promise.reject(cause);\n    }\n  }))?.data;\n\n  if (release != null) {\n    console.warn(`️⚠️ Release '${tag_name}' already exists. Release notes will not be updated.`);\n  } else {\n    console.log('Reading release notes...');\n    let release_notes = await read_release_notes();\n\n    if (!release_notes) {\n      console.log('No release notes found... Reading changelog...');\n      release_notes = await read_changelog(manifest.version);\n    }\n\n    release = (await client.rest.repos.createRelease({\n      owner,\n      repo,\n      tag_name,\n      name: tag_name,\n      target_commitish: commit,\n      body: release_notes,\n    })).data;\n  }\n\n  console.log('Uploading assets...');\n  for (const assetPath of process.argv.slice(2)) {\n    const assetName = path.basename(assetPath);\n    if (release.assets.some((asset) => asset.name === assetName)) {\n      console.warn(`⚠️ Release '${tag_name}' already has an asset named '${assetName}'. Leaving it as-is.`);\n      continue;\n    }\n    console.log(`Uploading asset '${assetName}' from ${assetPath}`);\n    await client.rest.repos.uploadReleaseAsset({\n      owner,\n      repo,\n      release_id: release.id,\n      name: assetName,\n      // Note: Cheating here to send the data in streamng mode.\n      //       When doing so, we need to specify the content-length header.\n      // See: https://github.com/octokit/octokit.js/discussions/2087\n      data: createReadStream(assetPath) as unknown as string,\n      headers: {\n        'content-type': 'application/octet-stream',\n        'content-length': (await fs.stat(assetPath)).size,\n      },\n    });\n  }\n\n  console.log('✅ done');\n}\n\nmain().catch(e => {\n  console.error('❌', e);\n  process.exit(1);\n});\n"
  },
  {
    "path": "lib/publishing/github/package-lock.json",
    "content": "{\n  \"name\": \"github\",\n  \"version\": \"1.0.0\",\n  \"lockfileVersion\": 3,\n  \"requires\": true,\n  \"packages\": {\n    \"\": {\n      \"name\": \"github\",\n      \"version\": \"1.0.0\",\n      \"license\": \"ISC\",\n      \"dependencies\": {\n        \"changelog-parser\": \"^2.8.1\",\n        \"octokit\": \"^2.0.19\"\n      },\n      \"devDependencies\": {\n        \"@types/changelog-parser\": \"^2.8.1\",\n        \"@types/node\": \"^14\",\n        \"typescript\": \"~4.9.4\"\n      }\n    },\n    \"node_modules/@octokit/app\": {\n      \"version\": \"13.1.5\",\n      \"resolved\": \"https://registry.npmjs.org/@octokit/app/-/app-13.1.5.tgz\",\n      \"integrity\": \"sha512-6qTa24S+gdQUU66SCVfqTkyt2jAr9/ZeyPqJhnNI9PZ8Wum4lQy3bPS+voGlxABNOlzRKnxbSdYKoraMr3MqBA==\",\n      \"dependencies\": {\n        \"@octokit/auth-app\": \"^4.0.13\",\n        \"@octokit/auth-unauthenticated\": \"^3.0.0\",\n        \"@octokit/core\": \"^4.0.0\",\n        \"@octokit/oauth-app\": \"^4.0.7\",\n        \"@octokit/plugin-paginate-rest\": \"^6.0.0\",\n        \"@octokit/types\": \"^9.0.0\",\n        \"@octokit/webhooks\": \"^10.0.0\"\n      },\n      \"engines\": {\n        \"node\": \">= 14\"\n      }\n    },\n    \"node_modules/@octokit/auth-app\": {\n      \"version\": \"4.0.13\",\n      \"resolved\": \"https://registry.npmjs.org/@octokit/auth-app/-/auth-app-4.0.13.tgz\",\n      \"integrity\": \"sha512-NBQkmR/Zsc+8fWcVIFrwDgNXS7f4XDrkd9LHdi9DPQw1NdGHLviLzRO2ZBwTtepnwHXW5VTrVU9eFGijMUqllg==\",\n      \"dependencies\": {\n        \"@octokit/auth-oauth-app\": \"^5.0.0\",\n        \"@octokit/auth-oauth-user\": \"^2.0.0\",\n        \"@octokit/request\": \"^6.0.0\",\n        \"@octokit/request-error\": \"^3.0.0\",\n        \"@octokit/types\": \"^9.0.0\",\n        \"deprecation\": \"^2.3.1\",\n        \"lru-cache\": \"^9.0.0\",\n        \"universal-github-app-jwt\": \"^1.1.1\",\n        \"universal-user-agent\": \"^6.0.0\"\n      },\n      \"engines\": {\n        \"node\": \">= 14\"\n      }\n    },\n    \"node_modules/@octokit/auth-oauth-app\": {\n      \"version\": \"5.0.5\",\n      \"resolved\": \"https://registry.npmjs.org/@octokit/auth-oauth-app/-/auth-oauth-app-5.0.5.tgz\",\n      \"integrity\": \"sha512-UPX1su6XpseaeLVCi78s9droxpGtBWIgz9XhXAx9VXabksoF0MyI5vaa1zo1njyYt6VaAjFisC2A2Wchcu2WmQ==\",\n      \"dependencies\": {\n        \"@octokit/auth-oauth-device\": \"^4.0.0\",\n        \"@octokit/auth-oauth-user\": \"^2.0.0\",\n        \"@octokit/request\": \"^6.0.0\",\n        \"@octokit/types\": \"^9.0.0\",\n        \"@types/btoa-lite\": \"^1.0.0\",\n        \"btoa-lite\": \"^1.0.0\",\n        \"universal-user-agent\": \"^6.0.0\"\n      },\n      \"engines\": {\n        \"node\": \">= 14\"\n      }\n    },\n    \"node_modules/@octokit/auth-oauth-device\": {\n      \"version\": \"4.0.4\",\n      \"resolved\": \"https://registry.npmjs.org/@octokit/auth-oauth-device/-/auth-oauth-device-4.0.4.tgz\",\n      \"integrity\": \"sha512-Xl85BZYfqCMv+Uvz33nVVUjE7I/PVySNaK6dRRqlkvYcArSr9vRcZC9KVjXYObGRTCN6mISeYdakAZvWEN4+Jw==\",\n      \"dependencies\": {\n        \"@octokit/oauth-methods\": \"^2.0.0\",\n        \"@octokit/request\": \"^6.0.0\",\n        \"@octokit/types\": \"^9.0.0\",\n        \"universal-user-agent\": \"^6.0.0\"\n      },\n      \"engines\": {\n        \"node\": \">= 14\"\n      }\n    },\n    \"node_modules/@octokit/auth-oauth-user\": {\n      \"version\": \"2.1.1\",\n      \"resolved\": \"https://registry.npmjs.org/@octokit/auth-oauth-user/-/auth-oauth-user-2.1.1.tgz\",\n      \"integrity\": \"sha512-JgqnNNPf9CaWLxWm9uh2WgxcaVYhxBR09NVIPTiMU2dVZ3FObOHs3njBiLNw+zq84k+rEdm5Y7AsiASrZ84Apg==\",\n      \"dependencies\": {\n        \"@octokit/auth-oauth-device\": \"^4.0.0\",\n        \"@octokit/oauth-methods\": \"^2.0.0\",\n        \"@octokit/request\": \"^6.0.0\",\n        \"@octokit/types\": \"^9.0.0\",\n        \"btoa-lite\": \"^1.0.0\",\n        \"universal-user-agent\": \"^6.0.0\"\n      },\n      \"engines\": {\n        \"node\": \">= 14\"\n      }\n    },\n    \"node_modules/@octokit/auth-token\": {\n      \"version\": \"3.0.4\",\n      \"resolved\": \"https://registry.npmjs.org/@octokit/auth-token/-/auth-token-3.0.4.tgz\",\n      \"integrity\": \"sha512-TWFX7cZF2LXoCvdmJWY7XVPi74aSY0+FfBZNSXEXFkMpjcqsQwDSYVv5FhRFaI0V1ECnwbz4j59T/G+rXNWaIQ==\",\n      \"engines\": {\n        \"node\": \">= 14\"\n      }\n    },\n    \"node_modules/@octokit/auth-unauthenticated\": {\n      \"version\": \"3.0.5\",\n      \"resolved\": \"https://registry.npmjs.org/@octokit/auth-unauthenticated/-/auth-unauthenticated-3.0.5.tgz\",\n      \"integrity\": \"sha512-yH2GPFcjrTvDWPwJWWCh0tPPtTL5SMgivgKPA+6v/XmYN6hGQkAto8JtZibSKOpf8ipmeYhLNWQ2UgW0GYILCw==\",\n      \"dependencies\": {\n        \"@octokit/request-error\": \"^3.0.0\",\n        \"@octokit/types\": \"^9.0.0\"\n      },\n      \"engines\": {\n        \"node\": \">= 14\"\n      }\n    },\n    \"node_modules/@octokit/core\": {\n      \"version\": \"4.2.1\",\n      \"resolved\": \"https://registry.npmjs.org/@octokit/core/-/core-4.2.1.tgz\",\n      \"integrity\": \"sha512-tEDxFx8E38zF3gT7sSMDrT1tGumDgsw5yPG6BBh/X+5ClIQfMH/Yqocxz1PnHx6CHyF6pxmovUTOfZAUvQ0Lvw==\",\n      \"dependencies\": {\n        \"@octokit/auth-token\": \"^3.0.0\",\n        \"@octokit/graphql\": \"^5.0.0\",\n        \"@octokit/request\": \"^6.0.0\",\n        \"@octokit/request-error\": \"^3.0.0\",\n        \"@octokit/types\": \"^9.0.0\",\n        \"before-after-hook\": \"^2.2.0\",\n        \"universal-user-agent\": \"^6.0.0\"\n      },\n      \"engines\": {\n        \"node\": \">= 14\"\n      }\n    },\n    \"node_modules/@octokit/endpoint\": {\n      \"version\": \"7.0.5\",\n      \"resolved\": \"https://registry.npmjs.org/@octokit/endpoint/-/endpoint-7.0.5.tgz\",\n      \"integrity\": \"sha512-LG4o4HMY1Xoaec87IqQ41TQ+glvIeTKqfjkCEmt5AIwDZJwQeVZFIEYXrYY6yLwK+pAScb9Gj4q+Nz2qSw1roA==\",\n      \"dependencies\": {\n        \"@octokit/types\": \"^9.0.0\",\n        \"is-plain-object\": \"^5.0.0\",\n        \"universal-user-agent\": \"^6.0.0\"\n      },\n      \"engines\": {\n        \"node\": \">= 14\"\n      }\n    },\n    \"node_modules/@octokit/graphql\": {\n      \"version\": \"5.0.6\",\n      \"resolved\": \"https://registry.npmjs.org/@octokit/graphql/-/graphql-5.0.6.tgz\",\n      \"integrity\": \"sha512-Fxyxdy/JH0MnIB5h+UQ3yCoh1FG4kWXfFKkpWqjZHw/p+Kc8Y44Hu/kCgNBT6nU1shNumEchmW/sUO1JuQnPcw==\",\n      \"dependencies\": {\n        \"@octokit/request\": \"^6.0.0\",\n        \"@octokit/types\": \"^9.0.0\",\n        \"universal-user-agent\": \"^6.0.0\"\n      },\n      \"engines\": {\n        \"node\": \">= 14\"\n      }\n    },\n    \"node_modules/@octokit/oauth-app\": {\n      \"version\": \"4.2.2\",\n      \"resolved\": \"https://registry.npmjs.org/@octokit/oauth-app/-/oauth-app-4.2.2.tgz\",\n      \"integrity\": \"sha512-/jsPd43Yu2UXJ4XGq9KyOjPj5kNWQ5pfVzeDEfIVE8ENchyIPS+/IY2a8b0+OQSAsBKBLTHVp9m51RfGHmPZlw==\",\n      \"dependencies\": {\n        \"@octokit/auth-oauth-app\": \"^5.0.0\",\n        \"@octokit/auth-oauth-user\": \"^2.0.0\",\n        \"@octokit/auth-unauthenticated\": \"^3.0.0\",\n        \"@octokit/core\": \"^4.0.0\",\n        \"@octokit/oauth-authorization-url\": \"^5.0.0\",\n        \"@octokit/oauth-methods\": \"^2.0.0\",\n        \"@types/aws-lambda\": \"^8.10.83\",\n        \"fromentries\": \"^1.3.1\",\n        \"universal-user-agent\": \"^6.0.0\"\n      },\n      \"engines\": {\n        \"node\": \">= 14\"\n      }\n    },\n    \"node_modules/@octokit/oauth-authorization-url\": {\n      \"version\": \"5.0.0\",\n      \"resolved\": \"https://registry.npmjs.org/@octokit/oauth-authorization-url/-/oauth-authorization-url-5.0.0.tgz\",\n      \"integrity\": \"sha512-y1WhN+ERDZTh0qZ4SR+zotgsQUE1ysKnvBt1hvDRB2WRzYtVKQjn97HEPzoehh66Fj9LwNdlZh+p6TJatT0zzg==\",\n      \"engines\": {\n        \"node\": \">= 14\"\n      }\n    },\n    \"node_modules/@octokit/oauth-methods\": {\n      \"version\": \"2.0.5\",\n      \"resolved\": \"https://registry.npmjs.org/@octokit/oauth-methods/-/oauth-methods-2.0.5.tgz\",\n      \"integrity\": \"sha512-yQP6B5gE3axNxuM3U9KqWs/ErAQ+WLPaPgC/7EjsZsQibkf8sjdAfF8/y/EJW+Dd05XQvadX4WhQZPMnO1SE1A==\",\n      \"dependencies\": {\n        \"@octokit/oauth-authorization-url\": \"^5.0.0\",\n        \"@octokit/request\": \"^6.2.3\",\n        \"@octokit/request-error\": \"^3.0.3\",\n        \"@octokit/types\": \"^9.0.0\",\n        \"btoa-lite\": \"^1.0.0\"\n      },\n      \"engines\": {\n        \"node\": \">= 14\"\n      }\n    },\n    \"node_modules/@octokit/openapi-types\": {\n      \"version\": \"17.2.0\",\n      \"resolved\": \"https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-17.2.0.tgz\",\n      \"integrity\": \"sha512-MazrFNx4plbLsGl+LFesMo96eIXkFgEtaKbnNpdh4aQ0VM10aoylFsTYP1AEjkeoRNZiiPe3T6Gl2Hr8dJWdlQ==\"\n    },\n    \"node_modules/@octokit/plugin-paginate-rest\": {\n      \"version\": \"6.1.2\",\n      \"resolved\": \"https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-6.1.2.tgz\",\n      \"integrity\": \"sha512-qhrmtQeHU/IivxucOV1bbI/xZyC/iOBhclokv7Sut5vnejAIAEXVcGQeRpQlU39E0WwK9lNvJHphHri/DB6lbQ==\",\n      \"dependencies\": {\n        \"@octokit/tsconfig\": \"^1.0.2\",\n        \"@octokit/types\": \"^9.2.3\"\n      },\n      \"engines\": {\n        \"node\": \">= 14\"\n      },\n      \"peerDependencies\": {\n        \"@octokit/core\": \">=4\"\n      }\n    },\n    \"node_modules/@octokit/plugin-rest-endpoint-methods\": {\n      \"version\": \"7.1.2\",\n      \"resolved\": \"https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-7.1.2.tgz\",\n      \"integrity\": \"sha512-R0oJ7j6f/AdqPLtB9qRXLO+wjI9pctUn8Ka8UGfGaFCcCv3Otx14CshQ89K4E88pmyYZS8p0rNTiprML/81jig==\",\n      \"dependencies\": {\n        \"@octokit/types\": \"^9.2.3\",\n        \"deprecation\": \"^2.3.1\"\n      },\n      \"engines\": {\n        \"node\": \">= 14\"\n      },\n      \"peerDependencies\": {\n        \"@octokit/core\": \">=3\"\n      }\n    },\n    \"node_modules/@octokit/plugin-retry\": {\n      \"version\": \"4.1.6\",\n      \"resolved\": \"https://registry.npmjs.org/@octokit/plugin-retry/-/plugin-retry-4.1.6.tgz\",\n      \"integrity\": \"sha512-obkYzIgEC75r8+9Pnfiiqy3y/x1bc3QLE5B7qvv9wi9Kj0R5tGQFC6QMBg1154WQ9lAVypuQDGyp3hNpp15gQQ==\",\n      \"dependencies\": {\n        \"@octokit/types\": \"^9.0.0\",\n        \"bottleneck\": \"^2.15.3\"\n      },\n      \"engines\": {\n        \"node\": \">= 14\"\n      },\n      \"peerDependencies\": {\n        \"@octokit/core\": \">=3\"\n      }\n    },\n    \"node_modules/@octokit/plugin-throttling\": {\n      \"version\": \"5.2.3\",\n      \"resolved\": \"https://registry.npmjs.org/@octokit/plugin-throttling/-/plugin-throttling-5.2.3.tgz\",\n      \"integrity\": \"sha512-C9CFg9mrf6cugneKiaI841iG8DOv6P5XXkjmiNNut+swePxQ7RWEdAZRp5rJoE1hjsIqiYcKa/ZkOQ+ujPI39Q==\",\n      \"dependencies\": {\n        \"@octokit/types\": \"^9.0.0\",\n        \"bottleneck\": \"^2.15.3\"\n      },\n      \"engines\": {\n        \"node\": \">= 14\"\n      },\n      \"peerDependencies\": {\n        \"@octokit/core\": \"^4.0.0\"\n      }\n    },\n    \"node_modules/@octokit/request\": {\n      \"version\": \"6.2.5\",\n      \"resolved\": \"https://registry.npmjs.org/@octokit/request/-/request-6.2.5.tgz\",\n      \"integrity\": \"sha512-z83E8UIlPNaJUsXpjD8E0V5o/5f+vJJNbNcBwVZsX3/vC650U41cOkTLjq4PKk9BYonQGOnx7N17gvLyNjgGcQ==\",\n      \"dependencies\": {\n        \"@octokit/endpoint\": \"^7.0.0\",\n        \"@octokit/request-error\": \"^3.0.0\",\n        \"@octokit/types\": \"^9.0.0\",\n        \"is-plain-object\": \"^5.0.0\",\n        \"node-fetch\": \"^2.6.7\",\n        \"universal-user-agent\": \"^6.0.0\"\n      },\n      \"engines\": {\n        \"node\": \">= 14\"\n      }\n    },\n    \"node_modules/@octokit/request-error\": {\n      \"version\": \"3.0.3\",\n      \"resolved\": \"https://registry.npmjs.org/@octokit/request-error/-/request-error-3.0.3.tgz\",\n      \"integrity\": \"sha512-crqw3V5Iy2uOU5Np+8M/YexTlT8zxCfI+qu+LxUB7SZpje4Qmx3mub5DfEKSO8Ylyk0aogi6TYdf6kxzh2BguQ==\",\n      \"dependencies\": {\n        \"@octokit/types\": \"^9.0.0\",\n        \"deprecation\": \"^2.0.0\",\n        \"once\": \"^1.4.0\"\n      },\n      \"engines\": {\n        \"node\": \">= 14\"\n      }\n    },\n    \"node_modules/@octokit/tsconfig\": {\n      \"version\": \"1.0.2\",\n      \"resolved\": \"https://registry.npmjs.org/@octokit/tsconfig/-/tsconfig-1.0.2.tgz\",\n      \"integrity\": \"sha512-I0vDR0rdtP8p2lGMzvsJzbhdOWy405HcGovrspJ8RRibHnyRgggUSNO5AIox5LmqiwmatHKYsvj6VGFHkqS7lA==\"\n    },\n    \"node_modules/@octokit/types\": {\n      \"version\": \"9.2.3\",\n      \"resolved\": \"https://registry.npmjs.org/@octokit/types/-/types-9.2.3.tgz\",\n      \"integrity\": \"sha512-MMeLdHyFIALioycq+LFcA71v0S2xpQUX2cw6pPbHQjaibcHYwLnmK/kMZaWuGfGfjBJZ3wRUq+dOaWsvrPJVvA==\",\n      \"dependencies\": {\n        \"@octokit/openapi-types\": \"^17.2.0\"\n      }\n    },\n    \"node_modules/@octokit/webhooks\": {\n      \"version\": \"10.9.1\",\n      \"resolved\": \"https://registry.npmjs.org/@octokit/webhooks/-/webhooks-10.9.1.tgz\",\n      \"integrity\": \"sha512-5NXU4VfsNOo2VSU/SrLrpPH2Z1ZVDOWFcET4EpnEBX1uh/v8Uz65UVuHIRx5TZiXhnWyRE9AO1PXHa+M/iWwZA==\",\n      \"dependencies\": {\n        \"@octokit/request-error\": \"^3.0.0\",\n        \"@octokit/webhooks-methods\": \"^3.0.0\",\n        \"@octokit/webhooks-types\": \"6.11.0\",\n        \"aggregate-error\": \"^3.1.0\"\n      },\n      \"engines\": {\n        \"node\": \">= 14\"\n      }\n    },\n    \"node_modules/@octokit/webhooks-methods\": {\n      \"version\": \"3.0.3\",\n      \"resolved\": \"https://registry.npmjs.org/@octokit/webhooks-methods/-/webhooks-methods-3.0.3.tgz\",\n      \"integrity\": \"sha512-2vM+DCNTJ5vL62O5LagMru6XnYhV4fJslK+5YUkTa6rWlW2S+Tqs1lF9Wr9OGqHfVwpBj3TeztWfVON/eUoW1Q==\",\n      \"engines\": {\n        \"node\": \">= 14\"\n      }\n    },\n    \"node_modules/@octokit/webhooks-types\": {\n      \"version\": \"6.11.0\",\n      \"resolved\": \"https://registry.npmjs.org/@octokit/webhooks-types/-/webhooks-types-6.11.0.tgz\",\n      \"integrity\": \"sha512-AanzbulOHljrku1NGfafxdpTCfw2ENaWzH01N2vqQM+cUFbk868Cgh0xylz0JIM9BoKbfI++bdD6EYX0Q/UTEw==\"\n    },\n    \"node_modules/@types/aws-lambda\": {\n      \"version\": \"8.10.116\",\n      \"resolved\": \"https://registry.npmjs.org/@types/aws-lambda/-/aws-lambda-8.10.116.tgz\",\n      \"integrity\": \"sha512-LSvIyxYCsIMOiBnb5D6HTf7JXLCh3KPiZWL6Pkn1MqV/v5OoP42GDqn5H4wHKGGKN0mJB+4y1r0oat1dLBAkuA==\"\n    },\n    \"node_modules/@types/btoa-lite\": {\n      \"version\": \"1.0.0\",\n      \"resolved\": \"https://registry.npmjs.org/@types/btoa-lite/-/btoa-lite-1.0.0.tgz\",\n      \"integrity\": \"sha512-wJsiX1tosQ+J5+bY5LrSahHxr2wT+uME5UDwdN1kg4frt40euqA+wzECkmq4t5QbveHiJepfdThgQrPw6KiSlg==\"\n    },\n    \"node_modules/@types/changelog-parser\": {\n      \"version\": \"2.8.1\",\n      \"resolved\": \"https://registry.npmjs.org/@types/changelog-parser/-/changelog-parser-2.8.1.tgz\",\n      \"integrity\": \"sha512-Wul8tHLumoC7mauxXzPbtBLC2KuY5NZ2w6BlRYnOM53oZz5mz7oUGsYL4l8bKzZzIX7y2vO7Y4/2K8qICJjckw==\",\n      \"dev\": true\n    },\n    \"node_modules/@types/jsonwebtoken\": {\n      \"version\": \"9.0.2\",\n      \"resolved\": \"https://registry.npmjs.org/@types/jsonwebtoken/-/jsonwebtoken-9.0.2.tgz\",\n      \"integrity\": \"sha512-drE6uz7QBKq1fYqqoFKTDRdFCPHd5TCub75BM+D+cMx7NU9hUz7SESLfC2fSCXVFMO5Yj8sOWHuGqPgjc+fz0Q==\",\n      \"dependencies\": {\n        \"@types/node\": \"*\"\n      }\n    },\n    \"node_modules/@types/node\": {\n      \"version\": \"14.18.20\",\n      \"license\": \"MIT\"\n    },\n    \"node_modules/aggregate-error\": {\n      \"version\": \"3.1.0\",\n      \"resolved\": \"https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz\",\n      \"integrity\": \"sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==\",\n      \"dependencies\": {\n        \"clean-stack\": \"^2.0.0\",\n        \"indent-string\": \"^4.0.0\"\n      },\n      \"engines\": {\n        \"node\": \">=8\"\n      }\n    },\n    \"node_modules/before-after-hook\": {\n      \"version\": \"2.2.3\",\n      \"resolved\": \"https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.3.tgz\",\n      \"integrity\": \"sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==\"\n    },\n    \"node_modules/bottleneck\": {\n      \"version\": \"2.19.5\",\n      \"resolved\": \"https://registry.npmjs.org/bottleneck/-/bottleneck-2.19.5.tgz\",\n      \"integrity\": \"sha512-VHiNCbI1lKdl44tGrhNfU3lup0Tj/ZBMJB5/2ZbNXRCPuRCO7ed2mgcK4r17y+KB2EfuYuRaVlwNbAeaWGSpbw==\"\n    },\n    \"node_modules/btoa-lite\": {\n      \"version\": \"1.0.0\",\n      \"resolved\": \"https://registry.npmjs.org/btoa-lite/-/btoa-lite-1.0.0.tgz\",\n      \"integrity\": \"sha512-gvW7InbIyF8AicrqWoptdW08pUxuhq8BEgowNajy9RhiE86fmGAGl+bLKo6oB8QP0CkqHLowfN0oJdKC/J6LbA==\"\n    },\n    \"node_modules/buffer-equal-constant-time\": {\n      \"version\": \"1.0.1\",\n      \"resolved\": \"https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz\",\n      \"integrity\": \"sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==\"\n    },\n    \"node_modules/changelog-parser\": {\n      \"version\": \"2.8.1\",\n      \"license\": \"ISC\",\n      \"dependencies\": {\n        \"line-reader\": \"^0.2.4\",\n        \"remove-markdown\": \"^0.2.2\"\n      },\n      \"bin\": {\n        \"changelog-parser\": \"bin/cli.js\"\n      }\n    },\n    \"node_modules/clean-stack\": {\n      \"version\": \"2.2.0\",\n      \"resolved\": \"https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz\",\n      \"integrity\": \"sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==\",\n      \"engines\": {\n        \"node\": \">=6\"\n      }\n    },\n    \"node_modules/deprecation\": {\n      \"version\": \"2.3.1\",\n      \"resolved\": \"https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz\",\n      \"integrity\": \"sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==\"\n    },\n    \"node_modules/ecdsa-sig-formatter\": {\n      \"version\": \"1.0.11\",\n      \"resolved\": \"https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz\",\n      \"integrity\": \"sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==\",\n      \"dependencies\": {\n        \"safe-buffer\": \"^5.0.1\"\n      }\n    },\n    \"node_modules/fromentries\": {\n      \"version\": \"1.3.2\",\n      \"resolved\": \"https://registry.npmjs.org/fromentries/-/fromentries-1.3.2.tgz\",\n      \"integrity\": \"sha512-cHEpEQHUg0f8XdtZCc2ZAhrHzKzT0MrFUTcvx+hfxYu7rGMDc5SKoXFh+n4YigxsHXRzc6OrCshdR1bWH6HHyg==\",\n      \"funding\": [\n        {\n          \"type\": \"github\",\n          \"url\": \"https://github.com/sponsors/feross\"\n        },\n        {\n          \"type\": \"patreon\",\n          \"url\": \"https://www.patreon.com/feross\"\n        },\n        {\n          \"type\": \"consulting\",\n          \"url\": \"https://feross.org/support\"\n        }\n      ]\n    },\n    \"node_modules/indent-string\": {\n      \"version\": \"4.0.0\",\n      \"resolved\": \"https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz\",\n      \"integrity\": \"sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==\",\n      \"engines\": {\n        \"node\": \">=8\"\n      }\n    },\n    \"node_modules/is-plain-object\": {\n      \"version\": \"5.0.0\",\n      \"resolved\": \"https://registry.npmjs.org/is-plain-object/-/is-plain-object-5.0.0.tgz\",\n      \"integrity\": \"sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==\",\n      \"engines\": {\n        \"node\": \">=0.10.0\"\n      }\n    },\n    \"node_modules/jsonwebtoken\": {\n      \"version\": \"9.0.0\",\n      \"resolved\": \"https://registry.npmjs.org/jsonwebtoken/-/jsonwebtoken-9.0.0.tgz\",\n      \"integrity\": \"sha512-tuGfYXxkQGDPnLJ7SibiQgVgeDgfbPq2k2ICcbgqW8WxWLBAxKQM/ZCu/IT8SOSwmaYl4dpTFCW5xZv7YbbWUw==\",\n      \"dependencies\": {\n        \"jws\": \"^3.2.2\",\n        \"lodash\": \"^4.17.21\",\n        \"ms\": \"^2.1.1\",\n        \"semver\": \"^7.3.8\"\n      },\n      \"engines\": {\n        \"node\": \">=12\",\n        \"npm\": \">=6\"\n      }\n    },\n    \"node_modules/jwa\": {\n      \"version\": \"1.4.1\",\n      \"resolved\": \"https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz\",\n      \"integrity\": \"sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==\",\n      \"dependencies\": {\n        \"buffer-equal-constant-time\": \"1.0.1\",\n        \"ecdsa-sig-formatter\": \"1.0.11\",\n        \"safe-buffer\": \"^5.0.1\"\n      }\n    },\n    \"node_modules/jws\": {\n      \"version\": \"3.2.2\",\n      \"resolved\": \"https://registry.npmjs.org/jws/-/jws-3.2.2.tgz\",\n      \"integrity\": \"sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==\",\n      \"dependencies\": {\n        \"jwa\": \"^1.4.1\",\n        \"safe-buffer\": \"^5.0.1\"\n      }\n    },\n    \"node_modules/line-reader\": {\n      \"version\": \"0.2.4\"\n    },\n    \"node_modules/lodash\": {\n      \"version\": \"4.17.21\",\n      \"resolved\": \"https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz\",\n      \"integrity\": \"sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==\"\n    },\n    \"node_modules/lru-cache\": {\n      \"version\": \"9.1.2\",\n      \"resolved\": \"https://registry.npmjs.org/lru-cache/-/lru-cache-9.1.2.tgz\",\n      \"integrity\": \"sha512-ERJq3FOzJTxBbFjZ7iDs+NiK4VI9Wz+RdrrAB8dio1oV+YvdPzUEE4QNiT2VD51DkIbCYRUUzCRkssXCHqSnKQ==\",\n      \"engines\": {\n        \"node\": \"14 || >=16.14\"\n      }\n    },\n    \"node_modules/ms\": {\n      \"version\": \"2.1.3\",\n      \"resolved\": \"https://registry.npmjs.org/ms/-/ms-2.1.3.tgz\",\n      \"integrity\": \"sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==\"\n    },\n    \"node_modules/node-fetch\": {\n      \"version\": \"2.6.11\",\n      \"resolved\": \"https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.11.tgz\",\n      \"integrity\": \"sha512-4I6pdBY1EthSqDmJkiNk3JIT8cswwR9nfeW/cPdUagJYEQG7R95WRH74wpz7ma8Gh/9dI9FP+OU+0E4FvtA55w==\",\n      \"dependencies\": {\n        \"whatwg-url\": \"^5.0.0\"\n      },\n      \"engines\": {\n        \"node\": \"4.x || >=6.0.0\"\n      },\n      \"peerDependencies\": {\n        \"encoding\": \"^0.1.0\"\n      },\n      \"peerDependenciesMeta\": {\n        \"encoding\": {\n          \"optional\": true\n        }\n      }\n    },\n    \"node_modules/octokit\": {\n      \"version\": \"2.0.19\",\n      \"resolved\": \"https://registry.npmjs.org/octokit/-/octokit-2.0.19.tgz\",\n      \"integrity\": \"sha512-hSloK4MK78QGbAuBrtIir0bsxMoRVZE5CkwKSbSRH9lqv2hx9EwhCxtPqEF+BtHqLXkXdfUaGkJMyMBotYno+A==\",\n      \"dependencies\": {\n        \"@octokit/app\": \"^13.1.5\",\n        \"@octokit/core\": \"^4.2.1\",\n        \"@octokit/oauth-app\": \"^4.2.1\",\n        \"@octokit/plugin-paginate-rest\": \"^6.1.0\",\n        \"@octokit/plugin-rest-endpoint-methods\": \"^7.1.1\",\n        \"@octokit/plugin-retry\": \"^4.1.3\",\n        \"@octokit/plugin-throttling\": \"^5.2.2\",\n        \"@octokit/types\": \"^9.2.2\"\n      },\n      \"engines\": {\n        \"node\": \">= 14\"\n      }\n    },\n    \"node_modules/once\": {\n      \"version\": \"1.4.0\",\n      \"resolved\": \"https://registry.npmjs.org/once/-/once-1.4.0.tgz\",\n      \"integrity\": \"sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==\",\n      \"dependencies\": {\n        \"wrappy\": \"1\"\n      }\n    },\n    \"node_modules/remove-markdown\": {\n      \"version\": \"0.2.2\",\n      \"license\": \"MIT\"\n    },\n    \"node_modules/safe-buffer\": {\n      \"version\": \"5.2.1\",\n      \"resolved\": \"https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz\",\n      \"integrity\": \"sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==\",\n      \"funding\": [\n        {\n          \"type\": \"github\",\n          \"url\": \"https://github.com/sponsors/feross\"\n        },\n        {\n          \"type\": \"patreon\",\n          \"url\": \"https://www.patreon.com/feross\"\n        },\n        {\n          \"type\": \"consulting\",\n          \"url\": \"https://feross.org/support\"\n        }\n      ]\n    },\n    \"node_modules/semver\": {\n      \"version\": \"7.5.4\",\n      \"resolved\": \"https://registry.npmjs.org/semver/-/semver-7.5.4.tgz\",\n      \"integrity\": \"sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==\",\n      \"dependencies\": {\n        \"lru-cache\": \"^6.0.0\"\n      },\n      \"bin\": {\n        \"semver\": \"bin/semver.js\"\n      },\n      \"engines\": {\n        \"node\": \">=10\"\n      }\n    },\n    \"node_modules/semver/node_modules/lru-cache\": {\n      \"version\": \"6.0.0\",\n      \"resolved\": \"https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz\",\n      \"integrity\": \"sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==\",\n      \"dependencies\": {\n        \"yallist\": \"^4.0.0\"\n      },\n      \"engines\": {\n        \"node\": \">=10\"\n      }\n    },\n    \"node_modules/tr46\": {\n      \"version\": \"0.0.3\",\n      \"resolved\": \"https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz\",\n      \"integrity\": \"sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==\"\n    },\n    \"node_modules/typescript\": {\n      \"version\": \"4.9.5\",\n      \"resolved\": \"https://registry.npmjs.org/typescript/-/typescript-4.9.5.tgz\",\n      \"integrity\": \"sha512-1FXk9E2Hm+QzZQ7z+McJiHL4NW1F2EzMu9Nq9i3zAaGqibafqYwCVU6WyWAuyQRRzOlxou8xZSyXLEN8oKj24g==\",\n      \"dev\": true,\n      \"bin\": {\n        \"tsc\": \"bin/tsc\",\n        \"tsserver\": \"bin/tsserver\"\n      },\n      \"engines\": {\n        \"node\": \">=4.2.0\"\n      }\n    },\n    \"node_modules/universal-github-app-jwt\": {\n      \"version\": \"1.1.1\",\n      \"resolved\": \"https://registry.npmjs.org/universal-github-app-jwt/-/universal-github-app-jwt-1.1.1.tgz\",\n      \"integrity\": \"sha512-G33RTLrIBMFmlDV4u4CBF7dh71eWwykck4XgaxaIVeZKOYZRAAxvcGMRFTUclVY6xoUPQvO4Ne5wKGxYm/Yy9w==\",\n      \"dependencies\": {\n        \"@types/jsonwebtoken\": \"^9.0.0\",\n        \"jsonwebtoken\": \"^9.0.0\"\n      }\n    },\n    \"node_modules/universal-user-agent\": {\n      \"version\": \"6.0.0\",\n      \"resolved\": \"https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.0.tgz\",\n      \"integrity\": \"sha512-isyNax3wXoKaulPDZWHQqbmIx1k2tb9fb3GGDBRxCscfYV2Ch7WxPArBsFEG8s/safwXTT7H4QGhaIkTp9447w==\"\n    },\n    \"node_modules/webidl-conversions\": {\n      \"version\": \"3.0.1\",\n      \"resolved\": \"https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz\",\n      \"integrity\": \"sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==\"\n    },\n    \"node_modules/whatwg-url\": {\n      \"version\": \"5.0.0\",\n      \"resolved\": \"https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz\",\n      \"integrity\": \"sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==\",\n      \"dependencies\": {\n        \"tr46\": \"~0.0.3\",\n        \"webidl-conversions\": \"^3.0.0\"\n      }\n    },\n    \"node_modules/wrappy\": {\n      \"version\": \"1.0.2\",\n      \"resolved\": \"https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz\",\n      \"integrity\": \"sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==\"\n    },\n    \"node_modules/yallist\": {\n      \"version\": \"4.0.0\",\n      \"resolved\": \"https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz\",\n      \"integrity\": \"sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==\"\n    }\n  }\n}\n"
  },
  {
    "path": "lib/publishing/github/package.json",
    "content": "{\n  \"name\": \"github\",\n  \"version\": \"1.0.0\",\n  \"description\": \"\",\n  \"main\": \"parse-changelog.js\",\n  \"scripts\": {\n    \"test\": \"echo \\\"Error: no test specified\\\" && exit 1\"\n  },\n  \"keywords\": [],\n  \"author\": \"\",\n  \"license\": \"ISC\",\n  \"dependencies\": {\n    \"changelog-parser\": \"^2.8.1\",\n    \"octokit\": \"^2.0.19\"\n  },\n  \"devDependencies\": {\n    \"@types/changelog-parser\": \"^2.8.1\",\n    \"@types/node\": \"^14\",\n    \"typescript\": \"~4.9.4\"\n  }\n}\n"
  },
  {
    "path": "lib/publishing/github/publish.sh",
    "content": "#!/bin/bash\nset -euo pipefail\nscriptdir=\"$(cd $(dirname $0) && pwd)\"\nworkdir=\"$(mktemp -d)\"\n\nheading() {\n    echo\n    echo \"~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\"\n    echo \"$@\"\n    echo \"~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\"\n}\n\nread_json_field() {\n    node -e \"process.stdout.write(require('./$1').$2)\"\n}\n\n# prepare_artifacts_in_current_dir TRY_TO_SIGN\nprepare_artifacts_in_current_dir() {\n    echo \"dir:\" $(pwd)\n    local build_manifest=\"${BUILD_MANIFEST:-\"./build.json\"}\"\n\n    if [ ! -f \"${build_manifest}\" ]; then\n        echo \"${build_manifest} file not found. should include the fields: 'name', 'version' and 'commit' (did you set BUILD_MANIFEST?)\" >&2\n        exit 1\n    fi\n\n    local version=\"$(read_json_field \"${build_manifest}\" version)\"\n    local name=\"$(read_json_field \"${build_manifest}\" name)\"\n\n    # --------------------------------------------------------------------------------------------------\n    echo \"name: ${name}\"\n    echo \"version: ${version}\"\n\n    # --------------------------------------------------------------------------------------------------\n    local archive=\"${workdir}/${name}-${version}.zip\"\n    echo \"Preparing .zip archive: ${archive}\"\n\n    [[ ! -f ${archive} ]] || {\n        echo \"File already created by a different artifact: $archive\" >&2\n        echo \"(Did you remember to create a different ${build_manifest} for every artifact?)\" >&2\n        exit 1\n    }\n    zip -y -r ${archive} .\n\n    # --------------------------------------------------------------------------------------------------\n    if $1; then\n        echo \"Signing .zip archive\"\n        chmod +x ${scriptdir}/with-signing-key.sh\n        chmod +x ${scriptdir}/sign-files.sh\n        ${scriptdir}/with-signing-key.sh ${scriptdir}/sign-files.sh ${archive}\n    fi\n}\n\n# --------------------------------------------------------------------------------------------------\n\nheading \"Primary Source\"\nprepare_artifacts_in_current_dir true\n\nif [[ \"${SECONDARY_SOURCE_NAMES:-}\" != \"\" ]]; then\n    for source_name in ${SECONDARY_SOURCE_NAMES}; do\n        heading \"Additional Source: $source_name\"\n        source_dir_var=CODEBUILD_SRC_DIR_${source_name}\n        (cd ${!source_dir_var} && prepare_artifacts_in_current_dir ${SIGN_ADDITIONAL_ARTIFACTS:-false})\n    done\nfi\n\n\n# --------------------------------------------------------------------------------------------------\n# install npm deps\nNODE_ENV=production\n(cd ${scriptdir} && npm ci)\n(cd ${scriptdir} && npm ls)\n\nheading \"Creating release\"\nls ${workdir}\n\nif $FOR_REAL; then\n    node ${scriptdir}/create-release.js ${workdir}/*\nelse\n    echo \"===========================================\"\n    echo \"            🏜️ DRY-RUN MODE 🏜️\"\n    echo\n    echo \"Skipping the actual publishing step.\"\n    echo\n    echo \"Set FOR_REAL=true to do it!\"\n    echo \"===========================================\"\nfi\n\n/bin/bash $SCRIPT_DIR/update-ssm.sh"
  },
  {
    "path": "lib/publishing/github/sign-files.sh",
    "content": "#!/bin/bash\nset -euo pipefail\n\nif [[ \"${1:-}\" == \"\" ]]; then\n    echo \"Usage: sign-files.sh FILE [FILE...]\" >&2\n    echo \"\">&2\n    echo \"Creates detached signature as FILE.sig.\" >&2\n    exit 1\nelse\n    if [ ! -f ${1} ]; then\n        echo \"Asked to sign ${1}, but no such file exists.\"\n        exit 1\n    fi\nfi\n\nif [[ \"${KEY_AVAILABLE:-}\" == \"\" ]]; then\n    echo \"Run this script using with-signing-key.sh\" >&2\n    exit 1\nfi\n\nif ! $KEY_AVAILABLE; then\n    echo \"No key available, not signing anything.\" >&2\n    exit 0  # Note: NOT an error\nfi\n\nwhile [[ \"${1:-}\" != \"\" ]]; do\n    echo \"Signing $1...\" >&2\n    echo $KEY_PASSPHRASE | gpg \\\n        ${GPG_PASSPHRASE_FROM_STDIN} \\\n        --local-user $KEY_ID \\\n        --batch --yes --no-tty \\\n        --output $1.sig \\\n        --detach-sign $1\n    shift\ndone\n\necho \"Done!\" >&2"
  },
  {
    "path": "lib/publishing/github/tsconfig.json",
    "content": "{\n  \"compilerOptions\": {\n    \"alwaysStrict\": true,\n    \"esModuleInterop\": true,\n    \"experimentalDecorators\": true,\n    \"inlineSourceMap\": true,\n    \"inlineSources\": false,\n    \"incremental\": true,\n    \"composite\": true,\n    \"lib\": [\n      \"es2019\",\n      \"dom\"\n    ],\n    \"module\": \"CommonJS\",\n    \"noEmitOnError\": false,\n    \"noFallthroughCasesInSwitch\": true,\n    \"noImplicitAny\": true,\n    \"noImplicitReturns\": true,\n    \"noImplicitThis\": true,\n    \"noUnusedLocals\": true,\n    \"noUnusedParameters\": true,\n    \"resolveJsonModule\": true,\n    \"skipLibCheck\": true,\n    \"strict\": true,\n    \"strictNullChecks\": true,\n    \"strictPropertyInitialization\": true,\n    \"stripInternal\": true,\n    \"target\": \"ES2019\"\n  },\n  \"include\": [\n    \"**/*.ts\"\n  ],\n}\n"
  },
  {
    "path": "lib/publishing/github/update-ssm.sh",
    "content": "#!/bin/bash\n# Write the current version and timestamp to SSM, if the current version is new\nset -eu\n\nif [[ \"${SSM_PREFIX:-}\" != \"\" ]]; then\n  if [[ \"${FOR_REAL:-}\" == \"true\" ]]; then\n    dry_aws=\"aws\"\n  else\n    dry_aws=\"echo aws\"\n  fi\n\n  build_manifest=\"${BUILD_MANIFEST:-\"./build.json\"}\"\n  version=\"$(node -p \"require('${build_manifest}').version\")\"\n\n  cur_version=$(aws ssm get-parameter --name \"$SSM_PREFIX/version\" --output text --query 'Parameter.Value' || echo '-missing-')\n\n  if [[ \"$cur_version\" != \"$version\" ]]; then\n    echo \"📖 Writing version and timestamp to $SSM_PREFIX/{version,timestamp}\"\n    $dry_aws ssm put-parameter --name \"$SSM_PREFIX/version\" --type \"String\" --value \"$version\" --overwrite\n    $dry_aws ssm put-parameter --name \"$SSM_PREFIX/timestamp\" --type \"String\" --value \"$(date +%s)\" --overwrite\n  else\n    echo \"⚠️ Version already up-to-date.\"\n  fi\nfi\n"
  },
  {
    "path": "lib/publishing/github/with-signing-key.sh",
    "content": "#!/bin/bash\n# Run another command with the signing key for the current scope,\n# if set.\n#\n# Upon running the subcommand, $KEY_AVAILABLE will be set to either\n# 'true' or 'false'. If $KEY_AVAILABLE is 'true', the following\n# variables will be set as well:\n#\n#    $KEY_ID\n#    $KEY_PASSPHRASE\n#    $GPG_PASSPHRASE_FROM_STDIN\n#\n# The environment variable KEY_PASSPHRASE will be set to\n# the key's passphrase, to pass in like so:\n#\n#    echo $KEY_PASSPHRASE | gpg ${GPG_PASSPHRASE_FROM_STDIN} \\\n#        ...other gpg arguments...\nset -euo pipefail\n\nif [[ \"${1:-}\" == \"\" ]]; then\n    echo \"Usage: with-signing-key.sh CMD [ARG...]\" >&2\n    echo \"\">&2\n    echo \"Run another command with a preloaded GPG keyring.\" >&2\n    exit 1\nfi\n\nif [[ \"${SIGNING_KEY_ARN:-}\" == \"\" ]]; then\n    echo \"SIGNING_KEY_ARN not set, running without a key\" >&2\n    export KEY_AVAILABLE=false\nelse\n    tmpdir=$(mktemp -d)\n    trap \"find $tmpdir -type f -exec rm {} \\\\; && rm -rf $tmpdir\" EXIT\n\n    # Use secrets manager to obtain the key and passphrase into a JSON file\n    echo \"Retrieving key $SIGNING_KEY_ARN...\" >&2\n    aws secretsmanager get-secret-value --secret-id \"$SIGNING_KEY_ARN\" --output text --query SecretString > $tmpdir/secret.txt\n\n    value-from-secret() {\n        node -e \"console.log(JSON.parse(require('fs').readFileSync('$tmpdir/secret.txt', { encoding: 'utf-8' })).$1)\"\n    }\n\n    export KEY_PASSPHRASE=$(value-from-secret Passphrase)\n\n    # GnuPG will occasionally bail out with \"gpg: <whatever> failed: Inappropriate ioctl for device\", the following attempts to fix\n    export GPG_TTY=$(tty)\n    export GNUPGHOME=$tmpdir\n\n    echo \"Importing key...\" >&2\n    gpg --allow-secret-key-import \\\n        --batch --yes --no-tty \\\n        --import <(value-from-secret PrivateKey)\n\n    export KEY_ID=$(gpg --list-keys --with-colons | grep pub | cut -d: -f5)\n\n    # Prepare environment variables with flags to GPG\n    #        --passphrase-fd 0 \\\n    #        ${EXTRA_GPG_OPTS} \\\n    export GPG_PASSPHRASE_FROM_STDIN=\"--passphrase-fd 0 --pinentry-mode loopback\"\n\n    export KEY_AVAILABLE=true\nfi\n\n# Execute remaining commands\necho \"Running: $@\" >&2\n\"$@\"\n"
  },
  {
    "path": "lib/publishing/golang/publish.sh",
    "content": "#!/bin/bash\nset -euo pipefail\necho ----------------------------------------\necho \"Sources:\"\nls\necho ----------------------------------------\n\n# Prepare the GitHub token\ntoken=\"$(aws secretsmanager get-secret-value --secret-id ${GITHUB_TOKEN_SECRET} --output=text --query=SecretString)\"\nexport GITHUB_TOKEN=\"${token}\"\n\nif [ ! -d \"go\" ]; then\n  echo \"Skipping go publishing. No 'go' directory in artifact.\"\n  exit 0\nfi\n\nnpx -p jsii-release jsii-release-golang go/\n\n/bin/bash $SCRIPT_DIR/update-ssm.sh\n"
  },
  {
    "path": "lib/publishing/golang/update-ssm.sh",
    "content": "#!/bin/bash\n# Write the current version and timestamp to SSM, if the current version is new\nset -eu\n\nif [[ \"${SSM_PREFIX:-}\" != \"\" ]]; then\n  if [[ \"${FOR_REAL:-}\" == \"true\" ]]; then\n    dry_aws=\"aws\"\n  else\n    dry_aws=\"echo aws\"\n  fi\n\n  build_manifest=\"${BUILD_MANIFEST:-\"./build.json\"}\"\n  version=\"$(node -p \"require('${build_manifest}').version\")\"\n\n  cur_version=$(aws ssm get-parameter --name \"$SSM_PREFIX/version\" --output text --query 'Parameter.Value' || echo '-missing-')\n\n  if [[ \"$cur_version\" != \"$version\" ]]; then\n    echo \"📖 Writing version and timestamp to $SSM_PREFIX/{version,timestamp}\"\n    $dry_aws ssm put-parameter --name \"$SSM_PREFIX/version\" --type \"String\" --value \"$version\" --overwrite\n    $dry_aws ssm put-parameter --name \"$SSM_PREFIX/timestamp\" --type \"String\" --value \"$(date +%s)\" --overwrite\n  else\n    echo \"⚠️ Version already up-to-date.\"\n  fi\nfi\n"
  },
  {
    "path": "lib/publishing/maven/publish.sh",
    "content": "#!/bin/bash\nset -euo pipefail\necho ----------------------------------------\necho \"Sources:\"\nls\necho ----------------------------------------\n\nif [[ ! -d ./java ]]; then\n    echo \"❌ No JARS to publish: 'java/' directory is missing.\"\n    exit 1\nfi\n\nif [[ \"${FOR_REAL:-}\" == \"true\" ]]; then\n    echo \"Publishing to Maven\"\n    unset MAVEN_DRYRUN\nelse\n    echo \"===========================================\"\n    echo \"            🏜️ DRY-RUN MODE 🏜️\"\n    echo\n    echo \"Set FOR_REAL=true to do actual publishing!\"\n    echo \"===========================================\"\n    export MAVEN_DRYRUN='true'\nfi\n\necho \"Getting credentials...\"\ncredentials=$(aws secretsmanager get-secret-value --secret-id ${MAVEN_LOGIN_SECRET} --output=text --query=SecretString)\n\nexport MAVEN_USERNAME=$(node -e \"console.log(${credentials}.username);\")\nexport MAVEN_PASSWORD=$(node -e \"console.log(${credentials}.password);\")\n\nchmod +x $SCRIPT_DIR/with-signing-key.sh\n$SCRIPT_DIR/with-signing-key.sh npx -p publib@latest publib-maven ./java\n\n/bin/bash $SCRIPT_DIR/update-ssm.sh\n"
  },
  {
    "path": "lib/publishing/maven/update-ssm.sh",
    "content": "#!/bin/bash\n# Write the current version and timestamp to SSM, if the current version is new\nset -eu\n\nif [[ \"${SSM_PREFIX:-}\" != \"\" ]]; then\n  if [[ \"${FOR_REAL:-}\" == \"true\" ]]; then\n    dry_aws=\"aws\"\n  else\n    dry_aws=\"echo aws\"\n  fi\n\n  build_manifest=\"${BUILD_MANIFEST:-\"./build.json\"}\"\n  version=\"$(node -p \"require('${build_manifest}').version\")\"\n\n  cur_version=$(aws ssm get-parameter --name \"$SSM_PREFIX/version\" --output text --query 'Parameter.Value' || echo '-missing-')\n\n  if [[ \"$cur_version\" != \"$version\" ]]; then\n    echo \"📖 Writing version and timestamp to $SSM_PREFIX/{version,timestamp}\"\n    $dry_aws ssm put-parameter --name \"$SSM_PREFIX/version\" --type \"String\" --value \"$version\" --overwrite\n    $dry_aws ssm put-parameter --name \"$SSM_PREFIX/timestamp\" --type \"String\" --value \"$(date +%s)\" --overwrite\n  else\n    echo \"⚠️ Version already up-to-date.\"\n  fi\nfi\n"
  },
  {
    "path": "lib/publishing/maven/with-signing-key.sh",
    "content": "#!/bin/bash\n# Run another command with the signing key for the current scope,\n# if set.\n#\n# Upon running the subcommand, $KEY_AVAILABLE will be set to either\n# 'true' or 'false'. If $KEY_AVAILABLE is 'true', the following\n# variables will be set as well:\n#\n#    $MAVEN_GPG_PRIVATE_KEY\n#    $MAVEN_GPG_PRIVATE_KEY_PASSPHRASE\n#\n# These will be used by `publib-maven`.\n#\n# See <https://github.com/cdklabs/publib?tab=readme-ov-file#maven>.\nset -euo pipefail\n\nif [[ \"${1:-}\" == \"\" ]]; then\n    echo \"Usage: with-signing-key.sh CMD [ARG...]\" >&2\n    echo \"\">&2\n    echo \"Run another command with a preloaded GPG keyring.\" >&2\n    exit 1\nfi\n\nif [[ \"${SIGNING_KEY_ARN:-}\" == \"\" ]]; then\n    echo \"SIGNING_KEY_ARN not set, running without a key\" >&2\n    export KEY_AVAILABLE=false\nelse\n    tmpdir=$(mktemp -d)\n    trap \"find $tmpdir -type f -exec rm {} \\\\; && rm -rf $tmpdir\" EXIT\n\n    # Use secrets manager to obtain the key and passphrase into a JSON file\n    echo \"Retrieving key $SIGNING_KEY_ARN...\" >&2\n    aws secretsmanager get-secret-value --secret-id \"$SIGNING_KEY_ARN\" --output text --query SecretString > $tmpdir/secret.txt\n\n    value-from-secret() {\n        node -e \"console.log(JSON.parse(require('fs').readFileSync('$tmpdir/secret.txt', { encoding: 'utf-8' })).$1)\"\n    }\n\n    export KEY_AVAILABLE=true\n    export MAVEN_GPG_PRIVATE_KEY=$(value-from-secret PrivateKey)\n    export MAVEN_GPG_PRIVATE_KEY_PASSPHRASE=$(value-from-secret Passphrase)\nfi\n\n# Execute remaining commands\necho \"Running: $@\" >&2\n\"$@\"\n"
  },
  {
    "path": "lib/publishing/npm/publish-npm.sh",
    "content": "#!/bin/bash\nset -euo pipefail\n\n###\n# Usage: ./publish-mvn.sh\n#\n# Publishes the content of a release bundle (current directory) to NPM.\n###\n\nif [[ \"${FOR_REAL:-}\" == \"true\" ]]; then\n    dry_npm=\"npm\"\nelse\n    echo \"=================================================\"\n    echo \"            🏜️ DRY-RUN MODE 🏜️\"\n    echo \"\"\n    echo \"Supply FOR_REAL=true as an environment variable to do actual publishing!\" >&2\n    echo \"=================================================\"\n    dry_npm=\"echo npm\"\nfi\n\n#######\n# NPM #\n#######\n\nDISTTAG=${DISTTAG:-\"\"}\nif [ -n \"${DISTTAG}\" ]; then\n    DISTTAG=\"--tag=${DISTTAG}\"\nfi\n\nACCESS=\"${ACCESS:-\"public\"}\"\nif [ -n \"$ACCESS\" ]; then\n    ACCESS=public\nfi\n\necho \"📦 Publishing to NPM\"\n\nTOKENS=$(npm token list 2>&1 || echo '')\nif echo ${TOKENS} | grep 'EAUTHUNKNOWN' > /dev/null; then\n    echo \"🔑 Can't list tokens - apparently missing authentication info\"\n    npm login\nfi\n\n# Get a list of tarballs to publish, in order:\n# 1. Print the 'js/npm-publish-order.txt' file if it exists;\n# 2. List all tarballs in the js/ directory (strip leading './')\n# 3. Retain only unique lines from both of these (with awk magic: https://stackoverflow.com/a/11532197).\n#\n# Result is that the js/npm-publish-order.txt files are published in indicated\n# order, and all remaining tarballs not in that file afterwards.\nlist_of_tarballs=$(cd ${PWD}/js && { \\\n    [[ ! -f npm-publish-order.txt ]] || cat npm-publish-order.txt && \\\n    find . -iname '*.tgz' | sed 's/^\\.\\///'; } | awk '!x[$0]++')\n\nfound=false\nfor filename in $list_of_tarballs; do\n    TGZ=${PWD}/js/${filename}\n    found=true\n\n    # extract module name and version from the tarball (via package/package.json)\n    packageInfo=\"$(tar -zxOf $TGZ package/package.json)\"\n    mod=\"$(node -e \"console.log(${packageInfo}.name);\")\"\n    ver=\"$(node -e \"console.log(${packageInfo}.version);\")\"\n\n    echo \"-------------------------------------------------------------------------------------------------\"\n    echo \"Publishing to npm: ${mod}@${ver} ${DISTTAG} from $TGZ\"\n\n    # check that the package is not already published using \"npm view\"\n    # returns an empty string if the package exists, but version doesn't\n    npm_view=$(npm view ${mod}@${ver} 2> /dev/null || true)\n    if [ -z \"${npm_view}\" ]; then\n        $dry_npm publish $TGZ --access=${ACCESS} ${DISTTAG} --loglevel=silly\n    else\n        echo \"⚠️ Package ${mod}@${ver} already published. Skipping.\"\n    fi\ndone\n\nif ! $found; then\n    echo \"❌ No js/**/*.tgz files. Nothing to publish.\"\n    exit 1\nfi\n\n\necho \"✅ All OK!\"\n"
  },
  {
    "path": "lib/publishing/npm/publish.sh",
    "content": "#!/bin/bash\nset -euo pipefail\necho ----------------------------------------\necho \"Sources:\"\nls\necho ----------------------------------------\n\n# Prepare the NPM publishing token\nsecret=$(aws secretsmanager get-secret-value --secret-id $NPM_TOKEN_SECRET --output=text --query=SecretString)\ntoken=$(node -e \"console.log(${secret}.token);\")\n\nexport NPM_TOKEN=$token\n\n# Creating an .npmrc that references an envvar is what you're supposed to do.\n# https://docs.npmjs.com/private-modules/ci-server-config\necho '//registry.npmjs.org/:_authToken=${NPM_TOKEN}' > ~/.npmrc\n\n# Call publishing script\n/bin/bash $SCRIPT_DIR/publish-npm.sh\n/bin/bash $SCRIPT_DIR/update-ssm.sh\n"
  },
  {
    "path": "lib/publishing/npm/update-ssm.sh",
    "content": "#!/bin/bash\n# Write the current version and timestamp to SSM, if the current version is new\nset -eu\n\nif [[ \"${SSM_PREFIX:-}\" != \"\" ]]; then\n  if [[ \"${FOR_REAL:-}\" == \"true\" ]]; then\n    dry_aws=\"aws\"\n  else\n    dry_aws=\"echo aws\"\n  fi\n\n  build_manifest=\"${BUILD_MANIFEST:-\"./build.json\"}\"\n  version=\"$(node -p \"require('${build_manifest}').version\")\"\n\n  cur_version=$(aws ssm get-parameter --name \"$SSM_PREFIX/version\" --output text --query 'Parameter.Value' || echo '-missing-')\n\n  if [[ \"$cur_version\" != \"$version\" ]]; then\n    echo \"📖 Writing version and timestamp to $SSM_PREFIX/{version,timestamp}\"\n    $dry_aws ssm put-parameter --name \"$SSM_PREFIX/version\" --type \"String\" --value \"$version\" --overwrite\n    $dry_aws ssm put-parameter --name \"$SSM_PREFIX/timestamp\" --type \"String\" --value \"$(date +%s)\" --overwrite\n  else\n    echo \"⚠️ Version already up-to-date.\"\n  fi\nfi\n"
  },
  {
    "path": "lib/publishing/nuget/publish.sh",
    "content": "#!/bin/bash\nset -euo pipefail\n\necho \"Installing required CLI tools: jq, openssl...\"\nif command -v yum &>/dev/null; then\n    yum install -y jq openssl\nelif command -v apt-get &>/dev/null; then\n    apt-get update\n    apt-get install -y jq openssl\nelse\n    echo \"!!! Neither an apt nor yum distribution - could not install jq and openssl, things might break!\"\nfi\n\nif [[ \"${FOR_REAL:-}\" == \"true\" ]]; then\n    dotnet=dotnet\nelse\n    echo \"===========================================\"\n    echo \"            🏜️ DRY-RUN MODE 🏜️\"\n    echo\n    echo \"Set FOR_REAL=true to do actual publishing!\"\n    echo \"===========================================\"\n    dotnet=\"echo dotnet\"\nfi\n\nif [ -n \"${CODE_SIGNING_SECRET_ID:-}\" ]; then\n    declare -a CLEANUP=()\n    function cleanup() {\n        for ((i = 0; i < ${#CLEANUP[@]}; i++ ))\n        do\n            eval \"${CLEANUP[$i]}\"\n        done\n    }\n    trap cleanup 'EXIT'\n\n    echo \"Preparing code-signing certificate...\"\n    cert=$(mktemp -d)\n    CLEANUP+=(\"echo '🚮 Cleaning code-signing certificate'\" \"rm -fr ${cert}\")\n\n    # Prepare the PEM encoded certificate for sign.sh to use\n    echo \"Reading certificate from SSM parameter: ${CODE_SIGNING_PARAMETER_NAME}\"\n    signcode_spc=\"${cert}/certificate.spc\"\n    CERTIFICATE_LOCATION=$(aws ssm get-parameter --name \"/delivlib-test/X509CodeSigningKey/Certificate\" | jq -r '.Parameter.Value')\n    aws s3 cp \"${CERTIFICATE_LOCATION}\" \"${signcode_spc}.pem\"\n    openssl crl2pkcs7 -nocrl -certfile \"${signcode_spc}.pem\" -outform DER -out \"${signcode_spc}\"\n    echo \"Successfully converted certificate from PEM to DER (.spc)\"\n\n    # Prepare the PEM encoded private key for sign.sh to use\n    echo \"Reading signing key from secret ID: ${CODE_SIGNING_SECRET_ID}\"\n    signcode_pvk=\"${cert}/certificate.pvk\"\n    aws secretsmanager get-secret-value --secret-id \"${CODE_SIGNING_SECRET_ID}\" | jq -r '.SecretString' > \"${signcode_pvk}.pem\"\n    openssl rsa -in \"${signcode_pvk}.pem\" -outform PVK -pvk-none -out \"${signcode_pvk}\"\n    echo \"Successfully converted signing key from PEM to PVK\"\n\n    # Set the timestamp server\n    signcode_tss=\"${CODE_SIGNING_TIMESTAMP_SERVER:-http://timestamp.digicert.com}\"\nfi\n\necho \"Publishing NuGet packages...\"\n\n(\n    # Assume a role, just for the purposes of retrieving the secret and nothing else.\n    # Run in a subshell so the changed environment variables in here don't interfere with the ones\n    # of the parent shell.\n    if [ -n \"${NUGET_ROLE_ARN:-}\" ]; then\n        ROLE=$(aws sts assume-role --region \"${NUGET_SECRET_REGION:-}\" --role-arn \"${NUGET_ROLE_ARN:-}\" --role-session-name \"buildable_nuget_publish\")\n        export AWS_ACCESS_KEY_ID=$(echo $ROLE | jq -r .Credentials.AccessKeyId)\n        export AWS_SECRET_ACCESS_KEY=$(echo $ROLE | jq -r .Credentials.SecretAccessKey)\n        export AWS_SESSION_TOKEN=$(echo $ROLE | jq -r .Credentials.SessionToken)\n    fi\n    aws secretsmanager get-secret-value --region \"${NUGET_SECRET_REGION:-}\" --secret-id \"${NUGET_SECRET_ID:-}\" | jq -r .SecretString | jq -r .NugetApiKey > /tmp/key.txt\n)\nNUGET_API_KEY=$(cat /tmp/key.txt)\n\nNUGET_SOURCE=\"https://api.nuget.org/v3/index.json\"\nNUGET_SYMBOL_SOURCE=\"https://nuget.smbsrc.net/\"\n\nlog=$(mktemp -d)/log.txt\n\nfound=false\nfor NUGET_PACKAGE_PATH in $(find dotnet -name *.nupkg -not -iname *.symbols.nupkg); do\n    found=true\n    if [ -n \"${CODE_SIGNING_SECRET_ID:-}\" ]; then\n        /bin/bash $SCRIPT_DIR/sign.sh \"${NUGET_PACKAGE_PATH}\" \"${signcode_spc}\" \"${signcode_pvk}\" \"${signcode_tss}\"\n        if [ $? -ne 0 ]; then\n            echo \"❌ Code Signing failed\"\n            exit 1\n        fi\n    fi\n    echo \"📦  Publishing ${NUGET_PACKAGE_PATH} to NuGet\"\n    (\n        cd $(dirname $NUGET_PACKAGE_PATH)\n        NUGET_PACKAGE_NAME=$(basename $NUGET_PACKAGE_PATH)\n        NUGET_PACKAGE_BASE=${NUGET_PACKAGE_NAME%.nupkg}\n\n        if [ -f \"${NUGET_PACKAGE_BASE}.symbols.nupkg\" ]; then\n            # Legacy mode - there's a .symbols.nupkg file that can't go to the NuGet symbols server\n            $dotnet nuget push $NUGET_PACKAGE_NAME -k $NUGET_API_KEY -s $NUGET_SOURCE -ss $NUGET_SYMBOL_SOURCE --force-english-output --skip-duplicate | tee ${log}\n        else\n            [ -f \"${NUGET_PACKAGE_BASE}.snupkg\" ] || echo \"⚠️ No symbols package was found!\"\n            # The .snupkg will be published at the same time as the .nupkg if both are in the current folder (which is the case)\n            $dotnet nuget push $NUGET_PACKAGE_NAME -k $NUGET_API_KEY -s $NUGET_SOURCE --force-english-output --skip-duplicate | tee ${log}\n        fi\n    )\n\n    # If push failed, check if this was caused because we are trying to publish\n    # the same version again, which is not an error by searching for a magic string in the log\n    # ugly, yes!\n    if [ ${PIPESTATUS[0]} -ne 0 ]; then\n        if cat ${log} | grep -q \"already exists and cannot be modified\"; then\n            echo \"⚠️ Artifact already published. Skipping\"\n        else\n            echo \"❌ Release failed\"\n            exit 1\n        fi\n    fi\ndone\n\nif ! ${found}; then\n    echo \"❌ No nupkg files found under the dotnet/ directory. Nothing to publish\"\n    exit 1\nfi\n\n/bin/bash $SCRIPT_DIR/update-ssm.sh\n\necho \"~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\"\necho \"✅ All Done!\"\n"
  },
  {
    "path": "lib/publishing/nuget/sign.sh",
    "content": "#!/bin/bash\nset -euo pipefail\n\nif [ $# -ne 4 ]\nthen\n  echo \"Usage: $0 <nuget-package.nupkg> <certificate.spc> <privatekey.pvk> <timestamp-url>\"\n  exit -1\nfi\nNUGET_PACKAGE=$(cd $(dirname $1) && echo $PWD)/$(basename $1)\nSOFTWARE_PUBLISHER_CERTIFICATE=$2\nPRIVATE_KEY=$3\nTIMESTAMP_URL=$4\n\necho \"🔑 Applying authenticode signatures to assemblies in ${NUGET_PACKAGE}\"\nfor FILE in $(unzip -Z1 ${NUGET_PACKAGE} '*.dll')\ndo\n  echo \"📄 Assemby: ${FILE}\"\n  TMP=$(mktemp -d)\n  # Extract the DLL from the ZIP file\n  unzip -q ${NUGET_PACKAGE} -d ${TMP} ${FILE}\n  # Need to set appropriate permissions, otherwise the file has none.\n  chmod u+rw ${TMP}/${FILE}\n  # Sign the DLL\n  signcode  -a    sha256                                                        \\\n            -spc  ${SOFTWARE_PUBLISHER_CERTIFICATE}                             \\\n            -v    ${PRIVATE_KEY}                                                \\\n            -t    ${TIMESTAMP_URL}                                              \\\n            ${TMP}/${FILE}\n  # Replace the DLL in the NuGet package\n  (\n    cd ${TMP} # Need to step in so the TMP prefix isn't mirrored in the ZIP -_-\n    zip -qfr ${NUGET_PACKAGE} ${FILE}\n  )\n  # Clean up temporary directory\n  rm -fr ${TMP}\ndone\necho \"🔐 All Done!\"\n"
  },
  {
    "path": "lib/publishing/nuget/update-ssm.sh",
    "content": "#!/bin/bash\n# Write the current version and timestamp to SSM, if the current version is new\nset -eu\n\nif [[ \"${SSM_PREFIX:-}\" != \"\" ]]; then\n  if [[ \"${FOR_REAL:-}\" == \"true\" ]]; then\n    dry_aws=\"aws\"\n  else\n    dry_aws=\"echo aws\"\n  fi\n\n  build_manifest=\"${BUILD_MANIFEST:-\"./build.json\"}\"\n  version=\"$(node -p \"require('${build_manifest}').version\")\"\n\n  cur_version=$(aws ssm get-parameter --name \"$SSM_PREFIX/version\" --output text --query 'Parameter.Value' || echo '-missing-')\n\n  if [[ \"$cur_version\" != \"$version\" ]]; then\n    echo \"📖 Writing version and timestamp to $SSM_PREFIX/{version,timestamp}\"\n    $dry_aws ssm put-parameter --name \"$SSM_PREFIX/version\" --type \"String\" --value \"$version\" --overwrite\n    $dry_aws ssm put-parameter --name \"$SSM_PREFIX/timestamp\" --type \"String\" --value \"$(date +%s)\" --overwrite\n  else\n    echo \"⚠️ Version already up-to-date.\"\n  fi\nfi\n"
  },
  {
    "path": "lib/publishing/pypi/publish.sh",
    "content": "#!/bin/bash\nset -euo pipefail\n\n# load login credentials from secrets manager\ncredentials=$(aws secretsmanager get-secret-value --secret-id ${PYPI_CREDENTIALS_SECRET_ID} --output=text --query=SecretString)\nexport TWINE_USERNAME=$(python -c \"import json; print(json.loads('''${credentials}''')['username'])\")\nexport TWINE_PASSWORD=$(python -c \"import json; print(json.loads('''${credentials}''')['password'])\")\n\n# make sure we use the latest pip\n# see https://cryptography.io/en/latest/faq.html#installing-cryptography-fails-with-error-can-not-find-rust-compiler\npip install --upgrade pip\n\npip install twine\n\nif [[ \"${FOR_REAL:-}\" == \"true\" ]]; then\n  twine upload --skip-existing python/**\nelse\n  echo \"===========================================\"\n  echo \"            🏜️ DRY-RUN MODE 🏜️\"\n  echo\n  echo \"Skipping the actual publishing step.\"\n  echo\n  echo \"Set FOR_REAL=true to do it!\"\n  echo \"===========================================\"\nfi\n\n/bin/bash $SCRIPT_DIR/update-ssm.sh"
  },
  {
    "path": "lib/publishing/pypi/update-ssm.sh",
    "content": "#!/bin/bash\n# Write the current version and timestamp to SSM, if the current version is new\nset -eu\n\nif [[ \"${SSM_PREFIX:-}\" != \"\" ]]; then\n  if [[ \"${FOR_REAL:-}\" == \"true\" ]]; then\n    dry_aws=\"aws\"\n  else\n    dry_aws=\"echo aws\"\n  fi\n\n  build_manifest=\"${BUILD_MANIFEST:-\"./build.json\"}\"\n  version=\"$(node -p \"require('${build_manifest}').version\")\"\n\n  cur_version=$(aws ssm get-parameter --name \"$SSM_PREFIX/version\" --output text --query 'Parameter.Value' || echo '-missing-')\n\n  if [[ \"$cur_version\" != \"$version\" ]]; then\n    echo \"📖 Writing version and timestamp to $SSM_PREFIX/{version,timestamp}\"\n    $dry_aws ssm put-parameter --name \"$SSM_PREFIX/version\" --type \"String\" --value \"$version\" --overwrite\n    $dry_aws ssm put-parameter --name \"$SSM_PREFIX/timestamp\" --type \"String\" --value \"$(date +%s)\" --overwrite\n  else\n    echo \"⚠️ Version already up-to-date.\"\n  fi\nfi\n"
  },
  {
    "path": "lib/publishing/s3/publish.sh",
    "content": "#!/bin/bash\nset -euo pipefail\necho ----------------------------------------\necho \"Sources:\"\nls\necho ----------------------------------------\n\n# Sync to S3 publicly readable\nargs=\"\"\nif ${PUBLIC:-false}; then\n  args=\"--acl public-read\"\nfi\n\nidempotency_token=\"\"\n\n# See if there's a file with publishing commands\nif [[ -f s3-publishing.json ]]; then\n    echo \"Found publishing instructions\"\n    cat s3-publishing.json\n\n    idempotency_token=$(node -pe \"require('./s3-publishing.json')['idempotency-token'] || ''\")\n\n    # We don't want to upload this file\n    args=\"$args --exclude s3-publishing.json\"\nfi\n\nif [[ \"${idempotency_token:-}\" != \"\" ]]; then\n    echo \"Idempotency token: $idempotency_token\"\n\n    # Must use 's3 cp' to try and read exact filename. 's3 ls' would match prefixes as well.\n    if aws s3 cp $BUCKET_URL/$idempotency_token - > /dev/null 2>&1; then\n        echo \"Token found, stopping.\"\n        exit 0\n    else\n        echo \"Idempotency token not found, continuing.\"\n    fi\nfi\n\n# Do the copy\necho \"Starting the upload to $BUCKET_URL\"\necho \"(Args: $args)\"\n\nif $FOR_REAL; then\n    aws s3 cp --recursive . $BUCKET_URL $args\n\n    if [[ \"${idempotency_token:-}\" != \"\" ]]; then\n        echo \"Writing idempotency token...\"\n        echo 1 | aws s3 cp - $BUCKET_URL/$idempotency_token\n    fi\nelse\n    echo \"===========================================\"\n    echo \"            🏜️ DRY-RUN MODE 🏜️\"\n    echo\n    echo \"Skipping the actual publishing step.\"\n    echo\n    echo \"Set FOR_REAL=true to do it!\"\n    echo \"===========================================\"\nfi\n\n/bin/bash $SCRIPT_DIR/update-ssm.sh\n"
  },
  {
    "path": "lib/publishing/s3/update-ssm.sh",
    "content": "#!/bin/bash\n# Write the current version and timestamp to SSM, if the current version is new\nset -eu\n\nif [[ \"${SSM_PREFIX:-}\" != \"\" ]]; then\n  if [[ \"${FOR_REAL:-}\" == \"true\" ]]; then\n    dry_aws=\"aws\"\n  else\n    dry_aws=\"echo aws\"\n  fi\n\n  build_manifest=\"${BUILD_MANIFEST:-\"./build.json\"}\"\n  version=\"$(node -p \"require('${build_manifest}').version\")\"\n\n  cur_version=$(aws ssm get-parameter --name \"$SSM_PREFIX/version\" --output text --query 'Parameter.Value' || echo '-missing-')\n\n  if [[ \"$cur_version\" != \"$version\" ]]; then\n    echo \"📖 Writing version and timestamp to $SSM_PREFIX/{version,timestamp}\"\n    $dry_aws ssm put-parameter --name \"$SSM_PREFIX/version\" --type \"String\" --value \"$version\" --overwrite\n    $dry_aws ssm put-parameter --name \"$SSM_PREFIX/timestamp\" --type \"String\" --value \"$(date +%s)\" --overwrite\n  else\n    echo \"⚠️ Version already up-to-date.\"\n  fi\nfi\n"
  },
  {
    "path": "lib/publishing.ts",
    "content": "import * as path from 'path';\nimport {\n  Stack,\n  aws_codebuild as cbuild,\n  aws_codepipeline as cpipeline,\n  aws_codepipeline_actions as cpipeline_actions,\n  aws_iam as iam,\n  aws_s3 as s3,\n} from 'aws-cdk-lib';\nimport { Construct } from 'constructs';\nimport { ICodeSigningCertificate } from './code-signing';\nimport { DEFAULT_SUPERCHAIN_IMAGE } from './constants';\nimport { OpenPGPKeyPair } from './open-pgp-key-pair';\nimport * as permissions from './permissions';\nimport { AddToPipelineOptions, IPublisher } from './pipeline';\nimport { WritableGitHubRepo } from './repo';\nimport { LinuxPlatform, Shellable } from './shellable';\nimport { noUndefined } from './util';\n\n/**\n * Type of access permissions to request from npmjs.\n */\nexport enum NpmAccess {\n  /**\n   * No access restriction. Note that unscoped packages must always be public.\n   */\n  PUBLIC = 'public',\n\n  /**\n   * Limit access to whitelisted npmjs users.\n   */\n  RESTRICTED = 'restricted',\n}\n\nexport interface PublishToMavenProjectProps {\n  /**\n   * The signing key itself\n   */\n  signingKey: OpenPGPKeyPair;\n\n  /**\n   * The ID of the sonatype staging profile (e.g. \"68a05363083174\").\n   */\n  stagingProfileId: string;\n\n  /**\n   * Identifier of the secret that contains the Maven login\n   */\n  mavenLoginSecret: permissions.ExternalSecret;\n\n  /**\n   * If true (default) performs a dry-run only instead of actually publishing.\n   * @default true\n   */\n  dryRun?: boolean;\n\n  /**\n   * The Maven publishing endpoint to be used.\n   *\n   * @default \"https://oss.sonatype.org\"\n   */\n  mavenEndpoint?: string;\n\n  /**\n   * The server ID\n   *\n   * The only sensible value here is `central-ossrh`, which will use the new\n   * publishing endpoint that is mandatory starting June 30th.\n   *\n   * Any other value can `central-ossrh` will cause the underlying publishing\n   * library `publib` to assume publishing to a custom Nexus server, but this\n   * action currently doesn't have a way of specifying that Nexus server's\n   * endpoint.\n   *\n   * @default - Use legacy OSSRH server\n   */\n  serverId?: string;\n\n  /**\n   * The build image to do the publishing in\n   *\n   * Needs to have Maven preinstalled.\n   *\n   * @default Latest superchain\n   */\n  readonly buildImage?: cbuild.IBuildImage;\n\n  /**\n   * The prefix under which to record the fact that the publish step executed\n   *\n   * This will write `<prefix>/version` and `<prefix>/timestamp` variables\n   *\n   * @default - no SSM parameters\n   */\n  ssmPrefix?: string;\n\n  /**\n   * Description for the CodeBuild project\n   *\n   * @default - No description\n   */\n  description?: string;\n}\n\n/**\n * CodeBuild project that will publish all packages in a release bundle to Maven\n */\nexport class PublishToMavenProject extends Construct implements IPublisher {\n  public readonly role: iam.IRole;\n  public readonly project: cbuild.Project;\n\n  constructor(parent: Construct, id: string, props: PublishToMavenProjectProps) {\n    super(parent, id);\n\n    const forReal = props.dryRun === undefined ? 'false' : (!props.dryRun).toString();\n\n    // When using `serverId`, we shouldn't try to guess a MAVEN_ENDPOINT; but keep the old\n    // behavior for backwards compatibility.\n    const mavenEndpointDefault = props.serverId === undefined ? 'https://oss.sonatype.org' : undefined;\n\n    const shellable = new Shellable(this, 'Default', {\n      description: props.description,\n      platform: new LinuxPlatform(props.buildImage ?? cbuild.LinuxBuildImage.fromDockerRegistry(DEFAULT_SUPERCHAIN_IMAGE)),\n      scriptDirectory: path.join(__dirname, 'publishing', 'maven'),\n      entrypoint: 'publish.sh',\n      environment: noUndefined({\n        STAGING_PROFILE_ID: props.stagingProfileId,\n        SIGNING_KEY_ARN: props.signingKey.credential.secretArn,\n        FOR_REAL: forReal,\n        MAVEN_LOGIN_SECRET: props.mavenLoginSecret.secretArn,\n        MAVEN_ENDPOINT: props.mavenEndpoint ?? mavenEndpointDefault,\n        MAVEN_SERVER_ID: props.serverId,\n        SSM_PREFIX: props.ssmPrefix,\n      }),\n    });\n\n    if (shellable.role) {\n      permissions.grantSecretRead(props.mavenLoginSecret, shellable.role);\n      props.signingKey.grantRead(shellable.role);\n    }\n    grantSsmPrefix(shellable.role, props.ssmPrefix);\n\n    this.role = shellable.role;\n    this.project = shellable.project;\n  }\n\n  public addToPipeline(stage: cpipeline.IStage, id: string, options: AddToPipelineOptions): void {\n    stage.addAction(new cpipeline_actions.CodeBuildAction({\n      actionName: id,\n      input: options.inputArtifact || new cpipeline.Artifact(),\n      runOrder: options.runOrder,\n      project: this.project,\n    }));\n  }\n}\n\nexport interface PublishToNpmProjectProps {\n  /**\n   * Identifier of the secret that contains the NPM token\n   */\n  npmTokenSecret: permissions.ExternalSecret;\n\n  /**\n   * If `true` (default) will only perform a dry-run but will not actually publish.\n   * @default true\n   */\n  dryRun?: boolean;\n\n  /**\n   * npm dist-tag to use when publishing artifacts.\n   *\n   * @default - npm default behavior (\"latest\" unless dist tag is specified in package.json)\n   */\n  distTag?: string;\n\n  /**\n   * npm --access public|restricted\n   *\n   * See https://docs.npmjs.com/cli-commands/publish#:~:text=Tells%20the\n   *\n   * Tells the registry whether this package should be published as public or restricted.\n   * Only applies to scoped packages, which default to restricted.\n   * If you don’t have a paid account, you must publish with --access public to publish scoped packages.\n   *\n   * @default NpmAccess.PUBLIC\n   */\n  access?: NpmAccess;\n\n  /**\n   * The prefix under which to record the fact that the publish step executed\n   *\n   * This will write `<prefix>/version` and `<prefix>/timestamp` variables\n   *\n   * @default - no SSM parameters\n   */\n  ssmPrefix?: string;\n\n  /**\n   * Description for the CodeBuild project\n   *\n   * @default - No description\n   */\n  description?: string;\n}\n\n/**\n * CodeBuild project that will publish all packages in a release bundle to NPM\n */\nexport class PublishToNpmProject extends Construct implements IPublisher {\n  public readonly role?: iam.IRole;\n  public readonly project: cbuild.Project;\n\n  constructor(parent: Construct, id: string, props: PublishToNpmProjectProps) {\n    super(parent, id);\n\n    const forReal = props.dryRun === undefined ? 'false' : (!props.dryRun).toString();\n\n    const access = props.access ?? NpmAccess.PUBLIC;\n\n    const shellable = new Shellable(this, 'Default', {\n      description: props.description,\n      platform: new LinuxPlatform(cbuild.LinuxBuildImage.STANDARD_7_0),\n      scriptDirectory: path.join(__dirname, 'publishing', 'npm'),\n      entrypoint: 'publish.sh',\n      environment: noUndefined({\n        FOR_REAL: forReal,\n        NPM_TOKEN_SECRET: props.npmTokenSecret.secretArn,\n        DISTTAG: props.distTag || '',\n        ACCESS: access,\n        SSM_PREFIX: props.ssmPrefix,\n      }),\n    });\n\n    if (shellable.role) {\n      permissions.grantSecretRead(props.npmTokenSecret, shellable.role);\n    }\n\n    grantSsmPrefix(shellable.role, props.ssmPrefix);\n\n    this.role = shellable.role;\n    this.project = shellable.project;\n  }\n\n  public addToPipeline(stage: cpipeline.IStage, id: string, options: AddToPipelineOptions): void {\n    stage.addAction(new cpipeline_actions.CodeBuildAction({\n      actionName: id,\n      input: options.inputArtifact || new cpipeline.Artifact(),\n      runOrder: options.runOrder,\n      project: this.project,\n    }));\n  }\n}\n\nexport interface PublishToNuGetProjectProps {\n  /**\n   * The SecretsManager secret which stores the Nuget API key.\n   */\n  nugetApiKeySecret: permissions.ExternalSecret;\n\n  /**\n   * If `true` (default) will only perform a dry-run but will not actually publish.\n   * @default true\n   */\n  dryRun?: boolean;\n\n  /**\n   * A code signing certificate to use to sign assemblies.\n   * @default No signing\n   */\n  codeSign?: ICodeSigningCertificate;\n\n  /**\n   * The build image to do the publishing in\n   *\n   * Needs to have NuGet preinstalled.\n   *\n   * @default Latest superchain\n   */\n  readonly buildImage?: cbuild.IBuildImage;\n\n  /**\n   * The prefix under which to record the fact that the publish step executed\n   *\n   * This will write `<prefix>/version` and `<prefix>/timestamp` variables\n   *\n   * @default - no SSM parameters\n   */\n  ssmPrefix?: string;\n\n  /**\n   * Description for the CodeBuild project\n   *\n   * @default - No description\n   */\n  description?: string;\n}\n\n/**\n * CodeBuild project that will publish all packages in a release bundle to NuGet\n */\nexport class PublishToNuGetProject extends Construct implements IPublisher {\n  public readonly role: iam.IRole;\n  public readonly project: cbuild.Project;\n\n  constructor(parent: Construct, id: string, props: PublishToNuGetProjectProps) {\n    super(parent, id);\n\n    const environment: { [key: string]: string } = {};\n\n    environment.FOR_REAL = props.dryRun === undefined ? 'false' : (!props.dryRun).toString();\n\n    if (props.nugetApiKeySecret.assumeRoleArn) {\n      environment.NUGET_ROLE_ARN = props.nugetApiKeySecret.assumeRoleArn;\n    }\n\n    if (props.nugetApiKeySecret.region) {\n      environment.NUGET_SECRET_REGION = props.nugetApiKeySecret.region;\n    } else {\n      environment.NUGET_SECRET_REGION = Stack.of(this).region;\n    }\n\n    environment.NUGET_SECRET_ID = props.nugetApiKeySecret.secretArn;\n    if (props.ssmPrefix) {\n      environment.SSM_PREFIX = props.ssmPrefix;\n    }\n\n    const shellable = new Shellable(this, 'Default', {\n      description: props.description,\n      platform: new LinuxPlatform(props.buildImage ?? cbuild.LinuxBuildImage.fromDockerRegistry(DEFAULT_SUPERCHAIN_IMAGE)),\n      scriptDirectory: path.join(__dirname, 'publishing', 'nuget'),\n      entrypoint: 'publish.sh',\n      environment,\n    });\n\n    if (props.codeSign) {\n      environment.CODE_SIGNING_SECRET_ID = props.codeSign.credential.secretArn;\n      environment.CODE_SIGNING_PARAMETER_NAME = props.codeSign.principal.parameterName;\n    }\n\n    if (shellable.role) {\n      if (props.nugetApiKeySecret.assumeRoleArn) {\n        permissions.grantAssumeRole(props.nugetApiKeySecret.assumeRoleArn, shellable.role);\n      } else {\n        permissions.grantSecretRead(props.nugetApiKeySecret, shellable.role);\n      }\n\n      if (props.codeSign) {\n        props.codeSign.grantDecrypt(shellable.role);\n      }\n    }\n\n    grantSsmPrefix(shellable.role, props.ssmPrefix);\n\n    this.role = shellable.role;\n    this.project = shellable.project;\n  }\n\n  public addToPipeline(stage: cpipeline.IStage, id: string, options: AddToPipelineOptions): void {\n    stage.addAction(new cpipeline_actions.CodeBuildAction({\n      actionName: id,\n      input: options.inputArtifact || new cpipeline.Artifact(),\n      runOrder: options.runOrder,\n      project: this.project,\n    }));\n  }\n}\n\nexport interface PublishDocsToGitHubProjectProps {\n  /**\n   * The repository to publish to\n   */\n  githubRepo: WritableGitHubRepo;\n\n  /**\n   * If `true` (default) will only perform a dry-run but will not actually publish.\n   * @default true\n   */\n  dryRun?: boolean;\n\n  /**\n   * The name of the build manifest JSON file (must include \"name\" and \"version\" fields).\n   * Relative to the artifacts root.\n   * @default \"./build.json\"\n   */\n  buildManifestFileName?: string;\n\n  /**\n   * GitHub Pages branch to push to.\n   * @default gh-pages\n   */\n  branch?: string;\n\n  /**\n   * The prefix under which to record the fact that the publish step executed\n   *\n   * This will write `<prefix>/version` and `<prefix>/timestamp` variables\n   *\n   * @default - no SSM parameters\n   */\n  ssmPrefix?: string;\n\n  /**\n   * Description for the CodeBuild project\n   *\n   * @default - No description\n   */\n  description?: string;\n}\n\n/**\n * CodeBuild project that will publish all packages in a release bundle to NuGet\n */\nexport class PublishDocsToGitHubProject extends Construct implements IPublisher {\n  public readonly role: iam.IRole;\n  public readonly project: cbuild.Project;\n\n  constructor(parent: Construct, id: string, props: PublishDocsToGitHubProjectProps) {\n    super(parent, id);\n\n    const forReal = props.dryRun === undefined ? 'false' : (!props.dryRun).toString();\n\n    const shellable = new Shellable(this, 'Default', {\n      description: props.description,\n      platform: new LinuxPlatform(cbuild.LinuxBuildImage.STANDARD_7_0),\n      scriptDirectory: path.join(__dirname, 'publishing', 'docs'),\n      entrypoint: 'publish.sh',\n      environment: noUndefined({\n        // Must be SSH because we use an SSH key to authenticate\n        GITHUB_REPO: props.githubRepo.repositoryUrlSsh,\n        GITHUB_PAGES_BRANCH: props.branch || 'gh-pages',\n        SSH_KEY_SECRET: props.githubRepo.sshKeySecret.secretArn,\n        FOR_REAL: forReal,\n        COMMIT_USERNAME: props.githubRepo.commitUsername,\n        COMMIT_EMAIL: props.githubRepo.commitEmail,\n        BUILD_MANIFEST: props.buildManifestFileName || './build.json',\n        SSM_PREFIX: props.ssmPrefix,\n      }),\n    });\n\n    if (shellable.role) {\n      permissions.grantSecretRead(props.githubRepo.sshKeySecret, shellable.role);\n    }\n\n    grantSsmPrefix(shellable.role, props.ssmPrefix);\n\n    this.role = shellable.role;\n    this.project = shellable.project;\n  }\n\n  public addToPipeline(stage: cpipeline.IStage, id: string, options: AddToPipelineOptions): void {\n    stage.addAction(new cpipeline_actions.CodeBuildAction({\n      actionName: id,\n      input: options.inputArtifact || new cpipeline.Artifact(),\n      runOrder: options.runOrder,\n      project: this.project,\n    }));\n  }\n}\n\nexport interface PublishToGitHubProps {\n  /**\n   * If `true` (default) will only perform a dry-run but will not actually publish.\n   * @default true\n   */\n  dryRun?: boolean;\n\n  /**\n   * The repository to create a release in.\n   */\n  githubRepo: WritableGitHubRepo;\n\n  /**\n   * The signign key to use to create a GPG signature of the artifact.\n   */\n  signingKey: OpenPGPKeyPair;\n\n  /**\n   * The name of the build manifest JSON file (must include \"name\" and \"version\" fields).\n   * Relative to the artifacts root.\n   * @default \"./build.json\"\n   */\n  buildManifestFileName?: string;\n\n  /**\n   * The name of the changelog markdown file, used to create release notes.\n   * Relative to the artifacts root.\n   * @default \"./CHANGELOG.md\"\n   */\n  changelogFileName?: string;\n\n  /**\n   * The name of the release notes file, containing the completed release notes\n   * for the current release.\n   * Relative to the artifacts root.\n   * NOTE - If this value is set and points to a valid file, the file in its entirety\n   * will be read and used for the release notes. The value of `changelogFileName` will\n   * be ignored.\n   * @default \"./RELEASE_NOTES.md\"\n   */\n  releaseNotesFileName?: string;\n\n  /**\n   * Additional input artifacts to publish binaries from to GitHub release\n   */\n  additionalInputArtifacts?: cpipeline.Artifact[];\n\n  /**\n   * Whether to sign the additional artifacts\n   *\n   * @default true\n   */\n  signAdditionalArtifacts?: boolean;\n\n  /**\n   * The prefix under which to record the fact that the publish step executed\n   *\n   * This will write `<prefix>/version` and `<prefix>/timestamp` variables\n   *\n   * @default - no SSM parameters\n   */\n  ssmPrefix?: string;\n\n  /**\n   * Description for the CodeBuild project\n   *\n   * @default - No description\n   */\n  description?: string;\n}\n\nexport class PublishToGitHub extends Construct implements IPublisher {\n  public readonly role: iam.IRole;\n  public readonly project: cbuild.Project;\n  private readonly additionalInputArtifacts?: cpipeline.Artifact[];\n\n  constructor(parent: Construct, id: string, props: PublishToGitHubProps) {\n    super(parent, id);\n\n    const forReal = props.dryRun === undefined ? 'false' : (!props.dryRun).toString();\n    this.additionalInputArtifacts = props.additionalInputArtifacts;\n\n    // The release notes, if set and a valid file, overrides any usages of the changelog.\n    if (props.changelogFileName && props.releaseNotesFileName) {\n      throw new Error('both `releaseNotesFileName` and `changelogFileName` cannot be specified; use one or the other');\n    }\n\n    const shellable = new Shellable(this, 'Default', {\n      description: props.description,\n      platform: new LinuxPlatform(cbuild.LinuxBuildImage.STANDARD_7_0),\n      scriptDirectory: path.join(__dirname, 'publishing', 'github'),\n      entrypoint: 'publish.sh',\n      environment: noUndefined({\n        BUILD_MANIFEST: props.buildManifestFileName || './build.json',\n        CHANGELOG: props.changelogFileName || './CHANGELOG.md',\n        RELEASE_NOTES: props.releaseNotesFileName || './RELEASE_NOTES.md',\n        SIGNING_KEY_ARN: props.signingKey.credential.secretArn,\n        GITHUB_OWNER: props.githubRepo.owner,\n        GITHUB_REPO: props.githubRepo.repo,\n        FOR_REAL: forReal,\n        // Transmit the names of the secondary sources to the shell script (for easier iteration)\n        SECONDARY_SOURCE_NAMES: props.additionalInputArtifacts ? props.additionalInputArtifacts.map(a => a.artifactName).join(' ') : undefined,\n        SIGN_ADDITIONAL_ARTIFACTS: props.additionalInputArtifacts && props.signAdditionalArtifacts !== false ? 'true' : undefined,\n        SSM_PREFIX: props.ssmPrefix,\n      }),\n      environmentSecrets: {\n        GITHUB_TOKEN: props.githubRepo.tokenSecretArn,\n      },\n    });\n\n    // allow script to read the signing key\n    if (shellable.role) {\n      props.signingKey.grantRead(shellable.role);\n    }\n\n    grantSsmPrefix(shellable.role, props.ssmPrefix);\n    this.role = shellable.role;\n    this.project = shellable.project;\n  }\n\n  public addToPipeline(stage: cpipeline.IStage, id: string, options: AddToPipelineOptions): void {\n    stage.addAction(new cpipeline_actions.CodeBuildAction({\n      actionName: id,\n      input: options.inputArtifact || new cpipeline.Artifact(),\n      extraInputs: this.additionalInputArtifacts,\n      runOrder: options.runOrder,\n      project: this.project,\n    }));\n  }\n}\n\nexport interface PublishToS3Props {\n  bucket: s3.IBucket;\n\n  /**\n   * Make files publicly readable\n   *\n   * @default false\n   */\n  public?: boolean;\n\n  /**\n   * If `true` (default) will only perform a dry-run but will not actually publish.\n   * @default true\n   */\n  dryRun?: boolean;\n\n  /**\n   * Description for the CodeBuild project\n   *\n   * @default - No description\n   */\n  description?: string;\n}\n\nexport class PublishToS3 extends Construct implements IPublisher {\n  public readonly role?: iam.IRole;\n  public readonly project: cbuild.Project;\n\n  constructor(scope: Construct, id: string, props: PublishToS3Props) {\n    super(scope, id);\n\n    const forReal = props.dryRun === undefined ? 'false' : (!props.dryRun).toString();\n\n    const shellable = new Shellable(this, 'Default', {\n      description: props.description,\n      platform: new LinuxPlatform(cbuild.LinuxBuildImage.STANDARD_7_0),\n      scriptDirectory: path.join(__dirname, 'publishing', 's3'),\n      entrypoint: 'publish.sh',\n      environment: noUndefined({\n        BUCKET_URL: `s3://${props.bucket.bucketName}`,\n        CHANGELOG: props.public ? 'true' : 'false',\n        FOR_REAL: forReal,\n      }),\n    });\n\n    // Allow script to write to bucket\n    if (shellable.role) {\n      props.bucket.grantReadWrite(shellable.role);\n    }\n\n    this.role = shellable.role;\n    this.project = shellable.project;\n  }\n\n  public addToPipeline(stage: cpipeline.IStage, id: string, options: AddToPipelineOptions): void {\n    stage.addAction(new cpipeline_actions.CodeBuildAction({\n      actionName: id,\n      input: options.inputArtifact || new cpipeline.Artifact(),\n      runOrder: options.runOrder,\n      project: this.project,\n    }));\n  }\n}\n\nexport interface PublishToPyPiProps {\n  /**\n   * Identifier of the secret that contains the PyPI credentials under\n   * \"username\" and \"password\" keys.\n   */\n  loginSecret: permissions.ExternalSecret;\n\n  /**\n   * If `true` (default) will only perform a dry-run but will not actually publish.\n   * @default true\n   */\n  dryRun?: boolean;\n\n  /**\n   * The prefix under which to record the fact that the publish step executed\n   *\n   * This will write `<prefix>/version` and `<prefix>/timestamp` variables\n   *\n   * @default - no SSM parameters\n   */\n  ssmPrefix?: string;\n\n  /**\n   * Description for the CodeBuild project\n   *\n   * @default - No description\n   */\n  description?: string;\n}\n\nexport class PublishToPyPi extends Construct {\n\n  public readonly project: cbuild.Project;\n  public readonly role: iam.IRole;\n\n  constructor(scope: Construct, id: string, props: PublishToPyPiProps) {\n    super(scope, id);\n\n    const forReal = props.dryRun === undefined ? 'false' : (!props.dryRun).toString();\n\n    const shellable = new Shellable(this, 'Default', {\n      description: props.description,\n      platform: new LinuxPlatform(cbuild.LinuxBuildImage.STANDARD_7_0),\n      scriptDirectory: path.join(__dirname, 'publishing', 'pypi'),\n      entrypoint: 'publish.sh',\n      environment: noUndefined({\n        FOR_REAL: forReal,\n        PYPI_CREDENTIALS_SECRET_ID: props.loginSecret.secretArn,\n        SSM_PREFIX: props.ssmPrefix,\n      }),\n    });\n\n    if (shellable.role) {\n      permissions.grantSecretRead(props.loginSecret, shellable.role);\n    }\n\n    grantSsmPrefix(shellable.role, props.ssmPrefix);\n\n    this.role = shellable.role;\n    this.project = shellable.project;\n  }\n\n  public addToPipeline(stage: cpipeline.IStage, id: string, options: AddToPipelineOptions): void {\n    stage.addAction(new cpipeline_actions.CodeBuildAction({\n      actionName: id,\n      input: options.inputArtifact || new cpipeline.Artifact(),\n      runOrder: options.runOrder,\n      project: this.project,\n    }));\n  }\n}\n\n/**\n * Props for Go publishing.\n */\nexport interface PublishToGolangProps {\n  /**\n   * Identifier of the secret that contains the GitHub personal access token\n   * used to push the go code to the github repository defined by it's name.\n   *\n   * @see https://docs.github.com/en/github/authenticating-to-github/creating-a-personal-access-token\n   */\n  readonly githubTokenSecret: permissions.ExternalSecret;\n\n  /**\n   * Username to perform the commit with.\n   */\n  readonly gitUserName: string;\n\n  /**\n   * Email to perform the commit with.\n   */\n  readonly gitUserEmail: string;\n\n  /**\n   * Set to \"true\" for a dry run.\n   * @default false\n   */\n  readonly dryRun?: boolean;\n\n  /**\n   * Module version.\n   *\n   * @default - Defaults to the value in the 'version' file of the module\n   * directory. Fails if it doesn't exist.\n   */\n  readonly version?: string;\n\n  /**\n   * Branch to push to.\n   *\n   * @default \"main\"\n   */\n  readonly gitBranch?: string;\n\n  /**\n   * The commit message.\n   *\n   * @default \"chore(release): $VERSION\"\n   */\n  readonly gitCommitMessage?: string;\n\n  /**\n   * The prefix under which to record the fact that the publish step executed\n   *\n   * This will write `<prefix>/version` and `<prefix>/timestamp` variables\n   *\n   * @default - no SSM parameters\n   */\n  ssmPrefix?: string;\n\n  /**\n   * Description for the CodeBuild project\n   *\n   * @default - No description\n   */\n  description?: string;\n}\n\n/**\n * Pushes a directory of golang modules to a GitHub repository.\n */\nexport class PublishToGolang extends Construct {\n  public readonly project: cbuild.Project;\n  public readonly role: iam.IRole;\n\n  constructor(scope: Construct, id: string, props: PublishToGolangProps) {\n    super(scope, id);\n\n    const dryRun = props.dryRun ?? false;\n\n    const shellable = new Shellable(this, 'Default', {\n      description: props.description,\n      platform: new LinuxPlatform(cbuild.LinuxBuildImage.STANDARD_7_0),\n      scriptDirectory: path.join(__dirname, 'publishing', 'golang'),\n      entrypoint: 'publish.sh',\n      environment: noUndefined({\n        DRYRUN: dryRun ? 'true' : undefined,\n        GITHUB_TOKEN_SECRET: props.githubTokenSecret.secretArn,\n        VERSION: props.version,\n        GIT_BRANCH: props.gitBranch,\n        GIT_USER_NAME: props.gitUserName,\n        GIT_USER_EMAIL: props.gitUserEmail,\n        GIT_COMMIT_MESSAGE: props.gitCommitMessage,\n        SSM_PREFIX: props.ssmPrefix,\n      }),\n    });\n\n    if (shellable.role) {\n      permissions.grantSecretRead(props.githubTokenSecret, shellable.role);\n    }\n\n    grantSsmPrefix(shellable.role, props.ssmPrefix);\n\n    this.role = shellable.role;\n    this.project = shellable.project;\n  }\n\n  public addToPipeline(stage: cpipeline.IStage, id: string, options: AddToPipelineOptions): void {\n    stage.addAction(new cpipeline_actions.CodeBuildAction({\n      actionName: id,\n      input: options.inputArtifact || new cpipeline.Artifact(),\n      runOrder: options.runOrder,\n      project: this.project,\n    }));\n  }\n}\n\nfunction grantSsmPrefix(role: iam.IRole, ssmPrefix?: string) {\n  if (ssmPrefix) {\n    if (!ssmPrefix.startsWith('/')) {\n      throw new Error(`SSM prefix should start with '/', got: ${ssmPrefix}`);\n    }\n    if (ssmPrefix.endsWith('/')) {\n      throw new Error(`SSM prefix must not end with '/', got: ${ssmPrefix}`);\n    }\n\n    role?.addToPrincipalPolicy(new iam.PolicyStatement({\n      actions: ['ssm:PutParameter', 'ssm:GetParameter'],\n      resources: [Stack.of(role).formatArn({\n        service: 'ssm',\n        resource: 'parameter',\n        resourceName: `${ssmPrefix.slice(1)}/*`,\n      })],\n    }));\n  }\n}"
  },
  {
    "path": "lib/pull-request/bump.ts",
    "content": "import { Construct } from 'constructs';\nimport { AutoPullRequest, AutoPullRequestOptions } from './pr';\nimport { WritableGitHubRepo } from '../repo';\n\n/**\n * Properties for configuring the head branch of the bump PR.\n */\nexport interface AutoBumpHead {\n\n  /**\n   * The name of branch. Will be created if it doesn't exist.\n   *\n   * $VERSION will be substituted by the current version (obtained by executing `versionCommand`).\n   *\n   * @default 'bump/$VERSION'\n   */\n  readonly name?: string;\n\n  /**\n   * @see 'source' property in AutoPullRequest.Head\n   */\n  readonly source?: string;\n}\n\n/**\n * Options for configuring an Auto Bump project.\n */\nexport interface AutoBumpProps extends AutoPullRequestOptions {\n  /**\n   * The repository to create a PR in.\n   */\n  repo: WritableGitHubRepo;\n\n  /**\n   * The command to execute in order to bump the repo.\n   *\n   * The bump command is responsible to bump any version metadata, update\n   * CHANGELOG and commit this to the repository.\n   *\n   * @default '/bin/bash ./bump.sh'\n   */\n  bumpCommand?: string;\n\n  /**\n   * The command to determine the current version.\n   *\n   * This is the value that will be used to evaluate $VERSION.\n   *\n   * @default 'git describe' (the latest git tag will be used to determine the current version)\n   */\n  versionCommand?: string;\n\n  /**\n   * Title of the PR.\n   *\n   * $VERSION will be substituted by the current version (obtained by executing `versionCommand`).\n   *\n   * @default' chore(release): $VERSION'\n   */\n  title?: string;\n\n  /**\n   * Body of the PR.\n   *\n   * @default 'See [CHANGELOG](https://github.com/${props.repo.owner}/${props.repo.repo}/blob/${head}/CHANGELOG.md)'\n   * (Link to the CHANGELOG file of the head branch)\n   */\n  body?: string;\n\n  /**\n   * The head branch of the PR.\n   *\n   * $VERSION will be substituted by the current version (obtained by executing `versionCommand`).\n   *\n   * @default - Wil be created from master and named 'bump/$VERSION'\n   */\n  head?: AutoBumpHead;\n\n  /**\n   * Description string for the CodeBuild project\n   *\n   * @default - A default description\n   */\n  readonly projectDescription?: string;\n}\n\nexport class AutoBump extends Construct {\n\n  /**\n   * The underlying AutoPullRequest construct.\n   */\n  public readonly pr: AutoPullRequest;\n\n  constructor(parent: Construct, id: string, props: AutoBumpProps) {\n    super(parent, id);\n\n    const branchName = props.head?.name ?? 'bump/$VERSION';\n    const baseBranch = props.base?.name ?? 'master';\n    const bumpCommand = props.bumpCommand ?? '/bin/sh ./bump.sh';\n    const versionCommand = props.versionCommand ?? 'git describe';\n    const title = props.title ?? 'chore(release): $VERSION';\n    const body = props.body ?? `See [CHANGELOG](https://github.com/${props.repo.owner}/${props.repo.repo}/blob/${branchName}/CHANGELOG.md)`;\n\n    this.pr = new AutoPullRequest(this, 'AutoPullRequest', {\n      ...props,\n      head: {\n        name: branchName,\n        source: props.head?.source,\n      },\n      title,\n      body,\n      commands: [bumpCommand],\n      exports: {\n        ...props.exports,\n        VERSION: versionCommand,\n      },\n      // check if base is already released\n      condition: `git describe --exact-match ${baseBranch}`,\n      projectDescription: props.projectDescription ?? `Release ${props.repo.owner}/${props.repo.repo}, branch ${baseBranch}`,\n    });\n  }\n}\n"
  },
  {
    "path": "lib/pull-request/index.ts",
    "content": "export * from './bump';\nexport * from './merge-back';\nexport * from './pr';\n"
  },
  {
    "path": "lib/pull-request/merge-back.ts",
    "content": "import { Construct } from 'constructs';\nimport * as pr from './pr';\nimport { WritableGitHubRepo } from '../repo';\n\n/**\n * Properties for configuring the head branch of the bump PR.\n * (The branch the PR will be merged from)\n */\nexport interface AutoMergeBackHead {\n\n  /**\n   * The name of branch. Will be created if it doesn't exist.\n   * $VERSION will be substituted by the current version (obtained by executing `versionCommand`).\n   *\n   * @default 'merge-back/$VERSION'\n   */\n  readonly name?: string;\n\n  /**\n   * @see 'source' property in AutoPullRequest.Head\n   */\n  readonly source?: string;\n}\n\nexport interface MergeBackStage {\n\n  /**\n   * Which stage should the merge back be part of. (Created if missing)\n   *\n   * @default 'MergeBack'\n   */\n  readonly name?: string;\n\n  /**\n   * The name of the stage that the merge back stage should go after of. (Must exist)\n   */\n  readonly after: string;\n}\n\nexport interface AutoMergeBackOptions extends pr.AutoPullRequestOptions {\n  /**\n   * The command to determine the current version.\n   *\n   * @default 'git describe'\n   */\n  versionCommand?: string;\n\n  /**\n   * Title of the PR.\n   *\n   * $VERSION will be substituted by the current version (obtained by executing `versionCommand`).\n   *\n   * @default 'chore(release): merge back $VERSION'\n   */\n  title?: string;\n\n  /**\n   * Body of the PR.\n   *\n   * @default 'See [CHANGELOG](https://github.com/${props.repo.owner}/${props.repo.repo}/blob/${head}/CHANGELOG.md)'\n   * (Link to the CHANGELOG file of the head branch)\n   */\n  body?: string;\n\n  /**\n   * Head branch of the PR.\n   *\n   * $VERSION will be substituted by the current version (obtained by executing `versionCommand`).\n   *\n   * @default - Will be created from release and named 'merge-back/$VERSION'\n   */\n  head?: AutoMergeBackHead;\n\n  /**\n   * The exit code of this command determines whether or not to proceed with the\n   * PR creation. If configured, this command is the first one to run, and if it fails, all\n   * other commands will be skipped.\n   *\n   * This command is the first to execute, and should not assume any pre-existing state.\n   *\n   * @default - no condition\n   */\n  condition?: string;\n\n  /**\n   * Description for the CodeBuild project\n   *\n   * @default - No description\n   */\n  projectDescription?: string;\n}\n\nexport interface AutoMergeBackPipelineOptions extends AutoMergeBackOptions {\n  /**\n   * Specify stage options to create the merge back inside a stage of the pipeline.\n   *\n   * @default - The CodeBuild project will be created indepdent of any stage.\n   */\n  readonly stage?: MergeBackStage;\n}\n\nexport interface AutoMergeBackProps extends AutoMergeBackOptions {\n  /**\n   * The repository to bump.\n   */\n  repo: WritableGitHubRepo;\n}\n\nexport class AutoMergeBack extends Construct {\n\n  /**\n   * The underlying AutoPullRequest construct.\n   */\n  public readonly pr: pr.AutoPullRequest;\n\n  constructor(parent: Construct, id: string, props: AutoMergeBackProps) {\n    super(parent, id);\n\n    const versionCommand = props.versionCommand ?? 'git describe';\n    const headName = props.head?.name ?? 'merge-back/$VERSION';\n    const title = props.title ?? 'chore(merge-back): $VERSION';\n    const body = props.body ?? `See [CHANGELOG](https://github.com/${props.repo.owner}/${props.repo.repo}/blob/${headName}/CHANGELOG.md)`;\n\n    this.pr = new pr.AutoPullRequest(this, 'AutoMergeBack', {\n      ...props,\n      body,\n      title,\n      head: {\n        name: headName,\n        source: props.head?.source,\n      },\n      exports: {\n        ...props.exports,\n        VERSION: versionCommand,\n      },\n    });\n  }\n}\n"
  },
  {
    "path": "lib/pull-request/pr.ts",
    "content": "import {\n  Duration,\n  aws_cloudwatch as cloudwatch,\n  aws_codebuild as cbuild,\n  aws_events as events,\n  aws_events_targets as events_targets,\n  aws_iam as iam,\n} from 'aws-cdk-lib';\nimport { Construct } from 'constructs';\nimport { BuildEnvironmentProps, createBuildEnvironment } from '../build-env';\nimport * as permissions from '../permissions';\nimport { WritableGitHubRepo } from '../repo';\n\n/**\n * Properties for creating a Pull Request Job.\n */\nexport interface AutoPullRequestOptions {\n  /**\n   * The base branch of the PR.\n   *\n   * @default 'master'\n   */\n  base?: Base;\n\n  /**\n   * True if you only want to push the head branch without creating a PR.\n   * Useful when used along with 'commits' to execute a commit-and-push automatically.\n   *\n   * // TODO: Consider moving this functionality to a separate construct.\n   *\n   * @default false\n   */\n  readonly pushOnly?: boolean;\n\n  /**\n   * Title of the PR.\n   *\n   * @default `Merge ${head} to ${base}`\n   */\n  title?: string;\n\n  /**\n   * Body the PR. Note that the body is updated post PR creation,\n   * this means you can use the $PR_NUMBER env variable to refer to the PR itself.\n   *\n   * @default - no body.\n   */\n  body?: string;\n\n  /**\n   * Labels applied to the PR.\n   *\n   * @default - no labels.\n   */\n  labels?: string[];\n\n  /**\n   * Build environment for the CodeBuild job.\n   *\n   * @default - default configuration.\n   */\n  build?: BuildEnvironmentProps;\n\n  /**\n   * Git clone depth.\n   *\n   * @default 0 (clones the entire repository revisions)\n   */\n  cloneDepth?: number;\n\n  /**\n   * Key value pairs of variables to export. These variables will be available for dynamic evaluation in any\n   * subsequent command.\n   *\n   * Key - Variable name (e.g VERSION)\n   * Value - Command that evaluates to the value of the variable (e.g 'git describe')\n   *\n   * Example:\n   *\n   * Configure an export in the form of:\n   *\n   * { 'VERSION': 'git describe' }\n   *\n   * Use the $VERSION variable in the PR title: 'chore(release): $VERSION'\n   *\n   * Note that these exports are executed after the `commands` execution,\n   * so they have access to the artifacts said commands produce (e.g version bump).\n   *\n   * @default - no exports\n   */\n  exports?: { [key: string]: string };\n\n  /**\n   * The schedule to produce an automatic PR.\n   *\n   * The expression can be one of:\n   *\n   *  - cron expression, such as \"cron(0 12 * * ? *)\" will trigger every day at 12pm UTC\n   *  - rate expression, such as \"rate(1 day)\" will trigger every 24 hours from the time of deployment\n   *\n   * @see https://docs.aws.amazon.com/AmazonCloudWatch/latest/events/ScheduledEvents.html\n   *\n   * @default - no schedule, should be triggered manually.\n   */\n  scheduleExpression?: string;\n\n}\n\nexport interface AutoPullRequestProps extends AutoPullRequestOptions {\n  /**\n   * The repository to create a PR in.\n   */\n  repo: WritableGitHubRepo;\n\n  /**\n   * A set of commands to run against the head branch.\n   * Useful for things like version bumps or any auto-generated commits.\n   *\n   * Note that you cannot use export keys in these commands (See `exports` property)\n   *\n   * @default - no commands.\n   */\n  commands?: string[];\n\n  /**\n   * The head branch of the PR.\n   */\n  head: Head;\n\n  /**\n   * The exit code of this command determines whether or not to proceed with the\n   * PR creation. If configured, this command is the first one to run, and if it fails, all\n   * other commands will be skipped.\n   *\n   * This command is the first to execute, and should not assume any pre-existing state.\n   *\n   * @default - no condition\n   */\n  condition?: string;\n\n  /**\n   * If any PR labeled with the given labels is still open, no new PR will be created\n   *\n   * @default - don't look at open PRs\n   */\n  readonly skipIfOpenPrsWithLabels?: string[];\n\n  /**\n   * Description string for the CodeBuild project\n   *\n   * @default - No description\n   */\n  readonly projectDescription?: string;\n}\n\n/**\n * Properties for configuring the base branch of the PR.\n */\nexport interface Base {\n\n  /**\n   * Branch name.\n   *\n   * This branch must exist.\n   *\n   * @default 'master'\n   */\n  readonly name?: string;\n}\n\n/**\n * Properties for configuring the head branch of the PR.\n */\nexport interface Head {\n\n  /**\n   * Branch name.\n   *\n   * This branch will be created if it doesn't exist.\n   */\n  readonly name: string;\n\n  /**\n   * The source sha of the branch.\n   *\n   * If the given branch already exists, this sha will be auto-merged onto it. Note that in such a case,\n   * the PR creation might fail in case there are merge conflicts.\n   *\n   * If the given branch doesn't exist, the newly created branch will be based of this hash.\n   *\n   * Note that dynamic exports are not allowed for this property.\n   *\n   * @default - the base branch of the pr.\n   */\n  readonly source?: string;\n}\n\n/**\n * Creates a CodeBuild job that, when triggered, opens a GitHub Pull Request.\n */\nexport class AutoPullRequest extends Construct {\n\n  /**\n   * CloudWatch alarm that will be triggered if the job fails.\n   */\n  public readonly alarm: cloudwatch.Alarm;\n\n  /**\n   * The CodeBuild project this construct creates.\n   */\n  public readonly project: cbuild.IProject;\n\n  private readonly props: AutoPullRequestProps;\n\n  private readonly baseBranch: string;\n  private readonly headSource: string;\n  private readonly exports: { [key: string]: string };\n\n  constructor(parent: Construct, id: string, props: AutoPullRequestProps) {\n    super(parent, id);\n\n    this.props = props;\n\n    this.baseBranch = props.base?.name ?? 'master';\n    this.headSource = props.head.source ?? this.baseBranch;\n    this.exports = props.exports ?? {};\n\n    for (const ex of Object.keys(this.exports)) {\n      if (this.headSource.includes(`\\${${ex}}`) || this.headSource.includes(`\\$${ex}`)) {\n        throw new Error(`head source (${this.headSource}) cannot contain dynamic exports: ${ex}`);\n      }\n    }\n\n    const sshKeySecret = props.repo.sshKeySecret;\n    const commitEmail = props.repo.commitEmail;\n    const commitUsername = props.repo.commitUsername;\n    const cloneDepth = props.cloneDepth === undefined ? 0 : props.cloneDepth;\n\n    const needsGitHubTokenSecret = !this.props.pushOnly || !!this.props.skipIfOpenPrsWithLabels;\n\n    let commands: string[] = [\n\n      ...this.configureSshAccess(),\n\n      // when the job is triggered as a CodePipeline action, the working directory\n      // is populated with the output artifact of the CodeCommitSourceAction, which doesn't include\n      // the .git directory in the zipped s3 archive. (Yeah, fun stuff).\n      // see https://itnext.io/how-to-access-git-metadata-in-codebuild-when-using-codepipeline-codecommit-ceacf2c5c1dc\n      ...this.cloneIfNeeded(),\n    ];\n\n    if (this.props.condition) {\n      // there's no way to stop a BuildSpec execution halfway through without throwing an error. Believe me, I\n      // checked the code. Instead we define a variable that we will switch all other lines on/off.\n      commands.push(`${this.props.condition} ` +\n      '&& { echo \\'Skip condition is met, skipping...\\' && export SKIP=true; } ' +\n      '|| { echo \\'Skip condition is not met, continuing...\\' && export SKIP=false; }');\n    }\n\n    // read the token\n    if (needsGitHubTokenSecret) {\n      commands.push(`export GITHUB_TOKEN=$(aws secretsmanager get-secret-value --secret-id \"${this.props.repo.tokenSecretArn}\" --output=text --query=SecretString)`);\n    }\n\n    if (this.props.skipIfOpenPrsWithLabels) {\n      commands.push(...this.skipIfOpenPrs(this.props.skipIfOpenPrsWithLabels));\n    }\n\n    commands.push(\n      ...this.createHead(),\n      ...this.pushHead(),\n    );\n\n    if (!this.props.pushOnly) {\n      commands.push(...this.createPullRequest());\n    }\n\n    // toggle all commands according to the SKIP variable.\n    commands = commands.map((command: string) => `$SKIP || { ${command} ; }`);\n\n    // intially all commands are enabled.\n    commands.unshift('export SKIP=false');\n\n    this.project = new cbuild.Project(this, 'PullRequest', {\n      source: props.repo.createBuildSource(this, false, { cloneDepth }),\n      description: props.projectDescription,\n      environment: createBuildEnvironment(props.build ?? {}),\n      buildSpec: cbuild.BuildSpec.fromObject({\n        version: '0.2',\n        phases: {\n          pre_build: {\n            commands: [\n              `git config --global user.email \"${commitEmail}\"`,\n              `git config --global user.name \"${commitUsername}\"`,\n            ],\n          },\n          build: { commands },\n        },\n      }),\n      ssmSessionPermissions: true,\n    });\n\n    // Always exists as the project is not a reference\n    const projectRole = this.project.role!;\n    projectRole.addManagedPolicy(iam.ManagedPolicy.fromAwsManagedPolicyName('AmazonElasticContainerRegistryPublicReadOnly'));\n    permissions.grantSecretRead(sshKeySecret, projectRole);\n    if (needsGitHubTokenSecret) {\n      permissions.grantSecretRead({ secretArn: props.repo.tokenSecretArn }, projectRole);\n    }\n\n    if (props.scheduleExpression) {\n      const schedule = events.Schedule.expression(props.scheduleExpression);\n      new events.Rule(this, 'Scheduler', {\n        description: 'Schedules an automatic Pull Request for this repository',\n        schedule,\n        targets: [new events_targets.CodeBuildProject(this.project)],\n      });\n    }\n\n    this.alarm = this.project.metricFailedBuilds({ period: Duration.seconds(300) }).createAlarm(this, 'AutoPullRequestFailedAlarm', {\n      threshold: 1,\n      evaluationPeriods: 1,\n      treatMissingData: cloudwatch.TreatMissingData.IGNORE,\n    });\n  }\n  private createHead(): string[] {\n\n    return [\n      // check if head branch exists\n      `git rev-parse --verify origin/${this.props.head.name} ` +\n\n      // checkout and merge if it does (this might fail due to merge conflicts)\n      `&& { git checkout ${this.props.head.name} && git merge ${this.headSource} && ${this.runCommands()};  } ` +\n\n      // create if it doesnt. we initially use 'temp' to allow using exports in the head branch name. (e.g bump/$VERSION)\n      `|| { git checkout ${this.headSource} && git checkout -b temp && ${this.runCommands()} && git branch -M ${this.props.head.name}; }`,\n\n    ];\n\n  }\n\n  private cloneIfNeeded(): string[] {\n\n    return [\n      // check if .git exist\n      'ls .git ' +\n\n      // all good\n      '&& { echo \".git directory exists\";  } ' +\n\n      // clone if it doesn't\n      `|| { echo \".git directory doesnot exist - cloning...\" && git init . && git remote add origin git@github.com:${this.props.repo.owner}/${this.props.repo.repo}.git && git fetch && git reset --hard origin/${this.baseBranch} && git branch -M ${this.baseBranch} && git clean -fqdx; }`,\n\n    ];\n\n  }\n\n  private runCommands(): string {\n\n    const userCommands = this.props.commands ?? [];\n    const exports = Object.entries(this.exports).map(entry => `export ${entry[0]}=$(${entry[1]})`);\n\n    return [\n\n      ...userCommands,\n\n      // exports should be executed immediately after the user commands (not before)\n      // because they might need access to artifacts produced by them (e.g version file).\n      ...exports,\n\n      'echo Finished running user commands',\n    ].join(' && ');\n\n  }\n\n  private configureSshAccess(): string[] {\n\n    return [\n      'aws secretsmanager get-secret-value '\n        + `--secret-id \"${this.props.repo.sshKeySecret.secretArn}\" `\n        + '--output=text --query=SecretString > ~/.ssh/id_rsa',\n      'mkdir -p ~/.ssh',\n      'chmod 0600 ~/.ssh/id_rsa ~/.ssh/config',\n      'ssh-keyscan -t rsa github.com >> ~/.ssh/known_hosts',\n    ];\n\n  }\n\n  private pushHead(): string[] {\n    // We will do nothing and set `SKIP=true` if the head ref is an ancestor of the base branch (no PR could be created)\n    return [\n      `git merge-base --is-ancestor ${this.props.head.name} origin/${this.baseBranch}`\n        + ` && { echo \"Skipping: ${this.props.head.name} is an ancestor of origin/${this.baseBranch}\"; export SKIP=true; }`\n        + ` || { echo \"Pushing: ${this.props.head.name} is ahead of origin/${this.baseBranch}\"; export SKIP=false; }`,\n      `git remote add origin_ssh ${this.props.repo.repositoryUrlSsh}`,\n      // Need `--atomic`, otherwise `git push` might successfully push the tags but not to `main`.\n      `git push --atomic --follow-tags origin_ssh ${this.props.head.name}:${this.props.head.name}`,\n    ];\n  }\n\n  private skipIfOpenPrs(labels: string[]): string[] {\n    const filters = [\n      `repo:${this.props.repo.owner}/${this.props.repo.repo}`,\n      'is:pr',\n      'is:open',\n      ...labels.map(l => `label:${l}`),\n    ];\n\n    return [\n      `${this.githubCurlGet(`/search/issues?q=${encodeURIComponent(filters.join(' '))}`, '-o search.json')}`,\n      'node -e \\'process.exitCode = require(\"./search.json\").total_count\\''\n        + ` || { echo \"Found open PRs with label ${labels}, skipping PR.\"; export SKIP=true; }`,\n    ];\n  }\n\n\n  private createPullRequest(): string[] {\n\n    const head = this.props.head.name;\n    const base = this.baseBranch;\n\n    if (head === base) {\n      throw new Error(`Head branch (\"${base}\") is the same as the base branch (\"${head}\")`);\n    }\n\n    const props = this.props;\n    const title = props.title ?? `Merge ${head} to ${base}`;\n    const body = this.props.body ?? '';\n\n    const createRequest = { title, base, head };\n\n    const commands = [];\n\n    // create the PR\n    commands.push(`${this.githubCurl('/pulls', '-X POST -o pr.json', createRequest)} && export PR_NUMBER=$(node -p 'require(\"./pr.json\").number')`);\n\n    // update the body\n    commands.push(this.githubCurl('/pulls/$PR_NUMBER', '-X PATCH', { body: body }));\n\n    if (this.props.labels && this.props.labels.length > 0) {\n    // apply labels.\n      commands.push(this.githubCurl('/issues/$PR_NUMBER/labels', '-X POST', { labels: this.props.labels }));\n    }\n\n    return commands;\n\n  }\n\n  private githubCurl(uri: string, command: string, request: any): string {\n    return [\n      'curl --fail',\n      command,\n      '--header \"Authorization: token $GITHUB_TOKEN\"',\n      '--header \"Content-Type: application/json\"',\n      `-d ${JSON.stringify(JSON.stringify(request))}`,\n      `https://api.github.com/repos/${this.props.repo.owner}/${this.props.repo.repo}${uri}`,\n    ].join(' ');\n  }\n\n  private githubCurlGet(uri: string, command: string): string {\n    return [\n      'curl --fail',\n      command,\n      '--header \"Authorization: token $GITHUB_TOKEN\"',\n      '--header \"Content-Type: application/json\"',\n      `'https://api.github.com${uri}'`,\n    ].join(' ');\n  }\n\n}\n\n"
  },
  {
    "path": "lib/registry-sync/ecr-mirror.ts",
    "content": "import {\n  IAspect, Lazy, Stack, Token,\n  aws_ecr as ecr,\n  aws_codebuild as codebuild,\n  aws_events as events,\n  aws_events_targets as targets,\n  aws_iam as iam,\n  aws_s3_assets as s3Assets,\n  aws_secretsmanager as sm,\n  custom_resources as cr,\n  Annotations,\n} from 'aws-cdk-lib';\nimport { Construct, IConstruct } from 'constructs';\nimport { MirrorSource } from './mirror-source';\nimport { DEFAULT_SUPERCHAIN_IMAGE } from '../constants';\n\n/**\n * Authentication details for DockerHub.\n *\n * @see https://docs.aws.amazon.com/codebuild/latest/userguide/build-spec-ref.html#build-spec.env.secrets-manager\n */\nexport interface DockerHubCredentials {\n\n  /**\n   * The secret that contains the username and password for Dockerhub\n   */\n  readonly secret: sm.ISecret;\n\n  /**\n   * The secret key that contains the username in the specified secret.\n   */\n  readonly usernameKey: string;\n\n  /**\n   * The secret key that contains the password in the specified secret.\n   */\n  readonly passwordKey: string;\n\n  /**\n   * Version stage of the secret.\n   *\n   * @default 'AWSCURRENT'\n   */\n  readonly versionStage?: string;\n}\n\n/**\n * Properties to initialize EcrRegistrySync\n */\nexport interface EcrMirrorProps {\n  /**\n   * The list of images to keep sync'ed.\n   */\n  readonly sources: MirrorSource[];\n\n  /**\n   * Credentials to signing into Dockerhub.\n   */\n  readonly dockerHubCredentials: DockerHubCredentials;\n\n  /**\n   * The image used to run the mirror step itself.\n   *\n   * Prefer to supply the image yourself here.\n   *\n   * @default - Some superchain image that may grow outdated.\n   */\n  readonly buildImage?: codebuild.IBuildImage;\n\n  /**\n   * Sync job runs on a schedule.\n   * Throws an error if neither this nor `autoStart` are specified.\n   * @default - does not run on schedule\n   */\n  readonly schedule?: events.Schedule;\n\n  /**\n   * Start the sync job immediately after the deployment.\n   * This injects a custom resource that is executed as part of the deployment.\n   * Throws an error if neither this nor `schedule` are specified.\n   * @default false\n   */\n  readonly autoStart?: boolean;\n}\n\n/**\n * Synchronize images from DockerHub to an ECR registry in the AWS account.\n * This is particularly useful to workaround DockerHub's throttling on pulls and use ECR instead.\n */\nexport class EcrMirror extends Construct {\n\n  private readonly _repos: Map<string, ecr.Repository> = new Map();\n  private readonly _repoTagsSeen = new Set<string>();\n\n  public readonly project: codebuild.Project;\n\n  constructor(scope: Construct, id: string, props: EcrMirrorProps) {\n    super(scope, id);\n\n    if (!props.schedule && !props.autoStart) {\n      throw new Error('Either schedule or autoStart must be provided');\n    }\n\n    const ecrRegistry = `${Stack.of(scope).account}.dkr.ecr.${Stack.of(scope).region}.amazonaws.com`;\n    const commands: string[] = [];\n    const assets = new Array<s3Assets.Asset>();\n\n    const codeBuildSecretValue = (key: string, auth: DockerHubCredentials) => {\n      return `${props.dockerHubCredentials.secret.secretName}:${key}:${auth.versionStage ?? 'AWSCURRENT'}`;\n    };\n\n    const username = codeBuildSecretValue(props.dockerHubCredentials.usernameKey, props.dockerHubCredentials);\n    const password = codeBuildSecretValue(props.dockerHubCredentials.passwordKey, props.dockerHubCredentials);\n\n    if (!props.buildImage) {\n      Annotations.of(this).addWarningV2('aws-delivlib:EcrMirror.missingBuildImage', 'Prefer supplying an explicit build image to relying on the default superchain.');\n    }\n\n    this.project = new codebuild.Project(this, 'EcrPushImages', {\n      description: Lazy.string({ produce: () => `Synchronize ${props.sources.length} images from DockerHub to local ECR` }),\n      environment: {\n        privileged: true,\n        buildImage: props.buildImage ?? codebuild.LinuxBuildImage.fromDockerRegistry(DEFAULT_SUPERCHAIN_IMAGE),\n      },\n      environmentVariables: {\n        // DockerHub credentials to avoid throttling\n        DOCKERHUB_USERNAME: { value: username, type: codebuild.BuildEnvironmentVariableType.SECRETS_MANAGER },\n        DOCKERHUB_PASSWORD: { value: password, type: codebuild.BuildEnvironmentVariableType.SECRETS_MANAGER },\n      },\n      buildSpec: codebuild.BuildSpec.fromObject(Lazy.any({\n        produce: () => {\n          return {\n            version: '0.2',\n            phases: {\n              build: {\n                commands: [\n\n                  // start the docker daemon\n                  'nohup /usr/bin/dockerd --host=unix:///var/run/docker.sock --host=tcp://127.0.0.1:2375 --storage-driver=overlay2&',\n                  'timeout 15 sh -c \"until docker info; do echo .; sleep 1; done\"',\n\n                  // login to dockerhub so we won't get throttled\n                  'docker login -u ${DOCKERHUB_USERNAME} -p ${DOCKERHUB_PASSWORD}',\n\n                  // login to ecr so we can push to it\n                  `aws ecr get-login-password | docker login --username AWS --password-stdin ${ecrRegistry}`,\n\n                  // login to ecr-public so we can pull from it with improved rate limits\n                  'aws ecr-public get-login-password --region us-east-1 | docker login --username AWS --password-stdin public.ecr.aws',\n\n                  ...commands,\n                ],\n              },\n            },\n          };\n        },\n      })),\n      ssmSessionPermissions: true,\n    });\n\n    // Ensure the runner has PULL access to ECR-Public.\n    this.project.role!.addManagedPolicy(iam.ManagedPolicy.fromAwsManagedPolicyName('AmazonElasticContainerRegistryPublicReadOnly'));\n\n    // Give the project access to the Docker Hub credentials\n    // Required for access to private images and to avoid throttling of unauthorized requests\n    props.dockerHubCredentials.secret.grantRead(this.project);\n\n    for (const image of props.sources) {\n      const result = image.bind({\n        scope: this,\n        ecrRegistry,\n        syncJob: this.project,\n      });\n      commands.push(...result.commands);\n\n      const repoTag = `${result.repositoryName}:${result.tag}`;\n      if (this._repoTagsSeen.has(repoTag)) {\n        throw new Error(`Mirror source with repository name [${result.repositoryName}] and tag [${result.tag}] already exists.`);\n      }\n      this._repoTagsSeen.add(repoTag);\n\n      this.createMirrorRepo(result.repositoryName);\n\n      const ecrImageUri = `${ecrRegistry}/${result.repositoryName}:${result.tag}`;\n      commands.push(`docker push ${ecrImageUri}`);\n\n      // clean after each push so that we don't fillup disk space\n      // possibly failing the next pull.\n      commands.push('docker image prune --all --force');\n    }\n\n    // CodeBuild needs to read the secret to resolve environment variables\n    props.dockerHubCredentials.secret.grantRead(this.project);\n\n    ecr.AuthorizationToken.grantRead(this.project);\n    this._repos.forEach((r, _) => r.grantPullPush(this.project));\n\n    // this project needs to download the assets so it can build them\n    assets.forEach(a => a.grantRead(this.project));\n\n    if (props.autoStart) {\n      new cr.AwsCustomResource(this, 'BuildExecution', {\n        installLatestAwsSdk: false,\n        policy: cr.AwsCustomResourcePolicy.fromSdkCalls({ resources: [this.project.projectArn] }),\n        onUpdate: {\n          action: 'startBuild',\n          service: 'CodeBuild',\n          parameters: {\n            projectName: this.project.projectName,\n            // to tigger the build on every update\n            idempotencyToken: `${Date.now()}`,\n          },\n          physicalResourceId: cr.PhysicalResourceId.of('EcrRegistryExecution'),\n\n          // need since the default reponse if greater than the 4k limit for custom resources.\n          outputPaths: ['build.id'],\n        },\n      });\n    }\n\n    if (props.schedule) {\n      new events.Rule(this, 'ScheduledTrigger', {\n        description: 'Trigger ECR mirror job',\n        schedule: props.schedule,\n        targets: [new targets.CodeBuildProject(this.project)],\n      });\n    }\n  }\n\n  private createMirrorRepo(ecrRepositoryName: string) {\n    if (this._repos.get(ecrRepositoryName)) {\n      return;\n    }\n\n    const repository = new ecr.Repository(this, `Repo${ecrRepositoryName}`, {\n      repositoryName: ecrRepositoryName,\n    });\n    this._repos.set(ecrRepositoryName, repository);\n  }\n\n  /**\n   * Get the target ECR repository for the given repository name and tag.\n   * @param repositoryName The ECR repository with this name\n   * @param tag the tag for the repository, defaults to 'latest'\n   */\n  public ecrRepository(repositoryName: string): ecr.IRepository | undefined {\n    return this._repos.get(repositoryName);\n  }\n};\n\n/**\n * An aspect that walks through the construct tree and replaces CodeBuild jobs with Docker images\n * with ECR equivalents found in the EcrMirror.\n */\nexport class EcrMirrorAspect implements IAspect {\n  constructor(private readonly mirror: EcrMirror) {}\n\n  public visit(construct: IConstruct) {\n    if (construct instanceof codebuild.Project) {\n      const cfnproject = construct.node.defaultChild as codebuild.CfnProject;\n      if (!Token.isUnresolved(cfnproject.environment)) {\n        const env = cfnproject.environment as codebuild.CfnProject.EnvironmentProperty;\n        const imageName = env.image.split(':')[0];\n        const tag = env.image.split(':')[1];\n        const replacement = this.mirror.ecrRepository(imageName);\n        if (replacement) {\n          cfnproject.environment = {\n            ...env,\n            image: codebuild.LinuxBuildImage.fromEcrRepository(replacement, tag).imageId,\n          };\n          replacement.grantPull(construct);\n          ecr.AuthorizationToken.grantRead(construct);\n        }\n      }\n    }\n  }\n}\n"
  },
  {
    "path": "lib/registry-sync/index.ts",
    "content": "export * from './ecr-mirror';\nexport * from './mirror-source';"
  },
  {
    "path": "lib/registry-sync/mirror-source.ts",
    "content": "import * as path from 'node:path';\nimport {\n  aws_codebuild as codebuild,\n  aws_s3_assets as s3Assets,\n} from 'aws-cdk-lib';\nimport { Construct } from 'constructs';\n\nexport interface MirrorSourceBindOptions {\n  /**\n   * The target ECR registry\n   */\n  readonly ecrRegistry: string;\n  /**\n   * The scope to attach any constructs that may also be needed.\n   */\n  readonly scope: Construct;\n\n  /**\n   * The CodeBuild project that will run the synchronization between DockerHub and ECR.\n   * @default - either no sync job is present or it's not defined yet.\n   */\n  readonly syncJob?: codebuild.IProject;\n}\n\nexport interface MirrorSourceConfig {\n  /**\n   * The commands to run to retrieve the docker image.\n   * e.g. ['docker pull <image-id>']\n   */\n  readonly commands: string[];\n\n  /**\n   * The name of the target ECR repository.\n   */\n  readonly repositoryName: string;\n\n  /**\n   * The tag to be use for the target ECR image.\n   */\n  readonly tag: string;\n}\n\n/** Additional options when configuring a Mirror Source from a local directory */\nexport interface MirrorSourceDirectoryOptions {\n  /**\n   * Tag of the built image.\n   * @default 'latest'\n   */\n  readonly tag?: string;\n\n  /**\n   * Build args to pass to the `docker build` command.\n   *\n   * @default - no build args are passed\n   */\n  readonly buildArgs?: { [key: string]: string };\n}\n\n/**\n * Source of the image.\n */\nexport abstract class MirrorSource {\n\n  /**\n   * Configure an image from DockerHub.\n   *\n   * @param image e.g jsii/superchain\n   * @param tag optional, defaults to 'latest'\n   *\n   * @deprecated This method's name inaccurately expresses that the image comes\n   * from DockerHub, when any publicly-accessible repository can be used. Prefer\n   * using `fromImageName(string, string?)` instead, which is more aptly named.\n   */\n  public static fromDockerHub(image: string, tag: string = 'latest'): MirrorSource {\n    return this.fromPublicImage(image, tag);\n  }\n\n  /**\n   * Configure an image from DockerHub or a repository-qualified image name.\n   *\n   * @param image e.g public.ecr.aws/jsii/superchain\n   * @param tag optional, defaults to 'latest'\n   * @param ecrRepositoryName the name of the ECR Repository to use (e.g: jsii/superchain)\n   */\n  public static fromPublicImage(image: string, tag: string = 'latest', ecrRepositoryName: string = image.includes('/') ? image : `library/${image}`): MirrorSource {\n    class DockerHubMirrorSource extends MirrorSource {\n      constructor() {\n        if (image.includes(':')) {\n          throw new Error('image must not include tag');\n        }\n        // simulates DockerHub by prefixing library/ to official images\n        const repositoryName = image.includes('/') ? image : `library/${image}`;\n        super(repositoryName, tag, undefined, ecrRepositoryName);\n      }\n\n      public bind(options: MirrorSourceBindOptions): MirrorSourceConfig {\n        const ecrImageUri = `${options.ecrRegistry}/${this.ecrRepositoryName}:${this.tag}`;\n        return {\n          commands: [\n            `docker pull ${this.repositoryName}:${this.tag}`,\n            `docker tag ${this.repositoryName}:${this.tag} ${ecrImageUri}`,\n          ],\n          repositoryName: this.ecrRepositoryName,\n          tag: this.tag,\n        };\n      }\n    }\n\n    return new DockerHubMirrorSource();\n  }\n\n  /**\n   * DEPRECATED\n   * @deprecated use fromDir()\n   */\n  public static fromDirectory(directory: string, repositoryName: string, tag?: string): MirrorSource {\n    return this.fromDir(directory, repositoryName, { tag });\n  }\n\n  /**\n   * Configure an image from a local directory.\n   *\n   * @param directory Path to directory containing the Dockerfile.\n   * @param repositoryName Repository name of the built image.\n   * @param options additional configuration options\n   */\n  public static fromDir(directory: string, repositoryName: string, opts: MirrorSourceDirectoryOptions = {}): MirrorSource {\n    class DirectoryMirrorSource extends MirrorSource {\n      constructor() {\n        super(repositoryName, opts.tag ?? 'latest', directory);\n      }\n\n      public bind(options: MirrorSourceBindOptions): MirrorSourceConfig {\n        const asset = new s3Assets.Asset(options.scope, `BuildContext${this.directory}${JSON.stringify(opts.buildArgs ?? {})}`, {\n          path: this.directory!,\n\n          // Need to give an explicit displayName, because the directory might\n          // be absolute, and the directories and args are liable to change on\n          // every pipeline run.\n          displayName: `EcrMirror directory ${path.basename(directory)}`,\n        });\n        if (options.syncJob) {\n          asset.grantRead(options.syncJob);\n        }\n        const ecrImageUri = `${options.ecrRegistry}/${this.ecrRepositoryName}:${this.tag}`;\n        const cmdFlags = [];\n        cmdFlags.push('--pull');\n        cmdFlags.push('-t', ecrImageUri);\n\n        if (opts.buildArgs) {\n          Object.entries(opts.buildArgs).forEach(([k, v]) => cmdFlags.push('--build-arg', `${k}=${v}`));\n        }\n\n        const zipFile = `${this.repositoryName}.zip`;\n        const tmpDir = this.repositoryName;\n\n        return {\n          commands: [\n            `rm -rf ${zipFile} ${tmpDir}`,\n            `aws s3 cp ${asset.s3ObjectUrl} ${zipFile}`,\n            `unzip ${zipFile} -d ${tmpDir}`,\n            `docker build ${cmdFlags.join(' ')} ${tmpDir}`,\n          ],\n          repositoryName: this.ecrRepositoryName,\n          tag: this.tag,\n        };\n      }\n    }\n    return new DirectoryMirrorSource();\n  }\n\n  private constructor(\n    protected readonly repositoryName: string,\n    protected readonly tag: string,\n    protected readonly directory?: string,\n    protected readonly ecrRepositoryName = repositoryName,\n  ) {\n  }\n\n  /**\n   * Bind the source with the EcrMirror construct.\n   */\n  public abstract bind(options: MirrorSourceBindOptions): MirrorSourceConfig;\n}\n"
  },
  {
    "path": "lib/release-email.sh",
    "content": "#!/bin/bash\nset -euo pipefail\n\nfiles=\"$(find . -type f | cut -d'/' -f2-)\"\n\necho \"<html>\"\necho \"<body>\"\necho \"<h3>Release Artifacts</h3>\"\n\nfor file in $files; do\n    s3url=\"s3://${RELEASE_BUCKET}${RELEASE_KEY_PREFIX}/${file}\"\n    presigned=\"$(aws s3 presign --expires $EXPIRES --region $REGION $s3url)\"\n    echo \"<li>\"\n    echo \"  <a href=\"$presigned\">\"\n    echo \"    $file\"\n    echo \"  </a>\"\n    echo \"</li>\"\ndone\n\necho \"</body>\"\necho \"</html>\"\n"
  },
  {
    "path": "lib/repo.ts",
    "content": "import {\n  SecretValue, SecretsManagerSecretOptions,\n  aws_codebuild as cbuild, aws_codecommit as ccommit,\n  aws_codepipeline as cpipeline, aws_codepipeline_actions as cpipeline_actions,\n} from 'aws-cdk-lib';\nimport { Construct } from 'constructs';\nimport { ExternalSecret } from './permissions';\n\nexport interface IRepo {\n  repositoryUrlHttp: string;\n  repositoryUrlSsh: string;\n  readonly allowsBadge: boolean;\n  readonly tokenSecretArn?: string;\n  createBuildSource(parent: Construct, webhook: boolean, options?: BuildSourceOptions): cbuild.ISource;\n  createSourceStage(pipeline: cpipeline.Pipeline, branch: string): cpipeline.Artifact;\n  describe(): any;\n}\n\nexport interface BuildSourceOptions {\n  /**\n   * Single branch\n   *\n   * Cannot be specified together with `branches`.\n   *\n   * @default - All branches\n   * @deprecated Use `branches` instead.\n   */\n  branch?: string;\n\n  /**\n   * Multiple branches\n   *\n   * Cannot be specified together with `branch`.\n   *\n   * @default - All branches\n   */\n  branches?: string[];\n  cloneDepth?: number;\n}\n\nexport class CodeCommitRepo implements IRepo {\n  public readonly allowsBadge = false;\n  public readonly tokenSecretArn?: string;\n\n  constructor(private readonly repository: ccommit.IRepository) {\n\n  }\n\n  public createSourceStage(pipeline: cpipeline.Pipeline, branch: string): cpipeline.Artifact {\n    const stage = pipeline.addStage({\n      stageName: 'Source',\n    });\n    const sourceOutput = new cpipeline.Artifact('Source');\n    stage.addAction(new cpipeline_actions.CodeCommitSourceAction({\n      actionName: 'Pull',\n      repository: this.repository,\n      branch,\n      output: sourceOutput,\n    }));\n    return sourceOutput;\n  }\n\n  public get repositoryUrlHttp() {\n    return this.repository.repositoryCloneUrlHttp;\n  }\n\n  public get repositoryUrlSsh() {\n    return this.repository.repositoryCloneUrlSsh;\n  }\n\n  public createBuildSource(_: Construct, _webhook: boolean, options: BuildSourceOptions = { }): cbuild.ISource {\n    return cbuild.Source.codeCommit({\n      repository: this.repository,\n      cloneDepth: options.cloneDepth,\n    });\n  }\n\n  public describe(): any {\n    return this.repository.repositoryName;\n  }\n}\n\ninterface GitHubRepoProps {\n  /**\n   * Secrets Manager ARN of the OAuth token secret that allows access to your github repo.\n   */\n  tokenSecretArn: string;\n\n  /**\n   * Options for referencing a secret value from Secrets Manager\n   */\n  tokenSecretOptions?: SecretsManagerSecretOptions;\n\n  /**\n   * In the form \"account/repo\".\n   */\n  repository: string;\n}\n\nexport class GitHubRepo implements IRepo {\n  public readonly allowsBadge = true;\n  public readonly owner: string;\n  public readonly repo: string;\n  public readonly tokenSecretArn: string;\n  public readonly tokenSecretOptions?: SecretsManagerSecretOptions;\n\n  constructor(props: GitHubRepoProps) {\n    const repository = props.repository;\n    if (repository.indexOf('/') == -1) {\n      throw new Error('Repository must be of the form \"account/repo\"');\n    }\n    const [owner, repo] = repository.split('/');\n\n    this.owner = owner;\n    this.repo = repo;\n    this.tokenSecretArn = props.tokenSecretArn;\n    this.tokenSecretOptions = props.tokenSecretOptions;\n  }\n\n  public get repositoryUrlHttp() {\n    return `https://github.com/${this.owner}/${this.repo}.git`;\n  }\n\n  public get repositoryUrlSsh() {\n    return `git@github.com:${this.owner}/${this.repo}.git`;\n  }\n\n  public createSourceStage(pipeline: cpipeline.Pipeline, branch: string): cpipeline.Artifact {\n    const stage = pipeline.addStage({ stageName: 'Source' });\n\n    const sourceOutput = new cpipeline.Artifact('Source');\n    stage.addAction(new cpipeline_actions.GitHubSourceAction({\n      actionName: 'Pull',\n      branch,\n      oauthToken: SecretValue.secretsManager(this.tokenSecretArn, this.tokenSecretOptions),\n      owner: this.owner,\n      repo: this.repo,\n      output: sourceOutput,\n    }));\n    return sourceOutput;\n  }\n\n  public createBuildSource(_: Construct, webhook: boolean, options: BuildSourceOptions = { }): cbuild.ISource {\n    if (options.branch && options.branches) {\n      throw new Error('Specify at most one of \\'branch\\' and \\'branches\\'');\n    }\n    const branches = options.branches ?? (options.branch ? [options.branch] : []);\n\n    return cbuild.Source.gitHub({\n      owner: this.owner,\n      repo: this.repo,\n      webhook,\n      cloneDepth: options.cloneDepth,\n      reportBuildStatus: webhook,\n      webhookFilters: webhook\n        ? this.createWebhookFilters(branches)\n        : undefined,\n    });\n  }\n\n  public describe() {\n    return `${this.owner}/${this.repo}`;\n  }\n\n  private createWebhookFilters(branches: string[]) {\n    if (branches.length > 0) {\n      // Turn the list of branches into a regex\n      const branchExpr = branches.map(b => `^refs/heads/${b}$`).join('|');\n\n      return [\n        cbuild.FilterGroup.inEventOf(cbuild.EventAction.PUSH)\n          .andHeadRefIs(branchExpr),\n        cbuild.FilterGroup.inEventOf(cbuild.EventAction.PULL_REQUEST_CREATED, cbuild.EventAction.PULL_REQUEST_UPDATED)\n          .andBaseRefIs(branchExpr),\n      ];\n    }\n    return [\n      cbuild.FilterGroup.inEventOf(\n        cbuild.EventAction.PUSH,\n        cbuild.EventAction.PULL_REQUEST_CREATED,\n        cbuild.EventAction.PULL_REQUEST_UPDATED,\n      ),\n    ];\n  }\n}\n\nexport interface WritableGitHubRepoProps extends GitHubRepoProps {\n  /**\n   * SSH key associated with this repository.\n   *\n   * This is required if you wish to be able to use actions that write to the repo\n   * such as docs publishing and automatic bumps.\n   */\n  sshKeySecret: ExternalSecret;\n\n  /**\n   * The username to use for the published commits\n   */\n  commitUsername: string;\n\n  /**\n   * The email address to use for the published commits\n   */\n  commitEmail: string;\n\n}\n\nexport class WritableGitHubRepo extends GitHubRepo {\n\n  public static isWritableGitHubRepo(repo: IRepo): repo is WritableGitHubRepo {\n    const obj = repo as any;\n\n    return 'sshKeySecret' in obj\n      && 'commitEmail' in obj\n      && 'commitUsername' in obj;\n  }\n\n  public readonly sshKeySecret: ExternalSecret;\n  public readonly commitEmail: string;\n  public readonly commitUsername: string;\n\n  constructor(props: WritableGitHubRepoProps) {\n    super(props);\n\n    this.sshKeySecret = props.sshKeySecret;\n    this.commitEmail = props.commitEmail;\n    this.commitUsername = props.commitUsername;\n  }\n}\n"
  },
  {
    "path": "lib/shellable.ts",
    "content": "import * as fs from 'fs';\nimport * as path from 'path';\nimport {\n  Duration,\n  aws_cloudwatch as cloudwatch, aws_codebuild as cbuild,\n  aws_codepipeline as cpipeline, aws_codepipeline_actions as cpipeline_actions,\n  aws_iam as iam, aws_s3_assets as assets, aws_secretsmanager, aws_ssm, IgnoreMode,\n} from 'aws-cdk-lib';\nimport { IRole } from 'aws-cdk-lib/aws-iam';\nimport { Construct } from 'constructs';\nimport { BuildSpec } from './build-spec';\nimport { renderEnvironmentVariables } from './util';\n\nconst S3_BUCKET_ENV = 'SCRIPT_S3_BUCKET';\nconst S3_KEY_ENV = 'SCRIPT_S3_KEY';\n\nexport interface ShellableOptions {\n  /**\n   * Description for the CodeBuild Project\n   */\n  readonly description?: string;\n\n  /**\n   * Source for the CodeBuild project\n   *\n   * @default no source\n   */\n  source?: cbuild.ISource;\n\n  /**\n   * What platform to us to run the scripts on\n   *\n   * @default ShellPlatform.LinuxUbuntu\n   */\n  platform?: ShellPlatform;\n\n  /**\n   * Additional environment variables to set.\n   *\n   * @default No additional environment variables\n   */\n  environment?: { [key: string]: string | undefined };\n\n  /**\n   * Environment variables with secrets manager values. The values must be complete Secret Manager ARNs.\n   *\n   * @default no additional environment variables\n   */\n  environmentSecrets?: { [key: string]: string };\n\n  /**\n   * Environment variables with SSM parameter values.\n   *\n   * @default no additional environment variables\n   */\n  environmentParameters?: { [key: string]: string };\n\n  /**\n   * The compute type to use for the build container.\n   *\n   * Note that not all combinations are available. For example,\n   * Windows images cannot be run on ComputeType.Small.\n   *\n   * @default ComputeType.Medium\n   */\n  computeType?: cbuild.ComputeType;\n\n  /**\n   * Indicates how the project builds Docker images. Specify true to enable\n   * running the Docker daemon inside a Docker container. This value must be\n   * set to true only if this build project will be used to build Docker\n   * images, and the specified build environment image is not one provided by\n   * AWS CodeBuild with Docker support. Otherwise, all associated builds that\n   * attempt to interact with the Docker daemon will fail.\n   *\n   * @default false\n   */\n  privileged?: boolean;\n\n  /**\n   * The name for the build project.\n   *\n   * @default a name is generated by CloudFormation.\n   */\n  buildProjectName?: string;\n\n  /**\n   * Indicates if Regional AWS STS endpoints should be used instead\n   * of the global endpoint. Specify true to use Regional AWS STS endpoints.\n   *\n   * @default false\n   */\n  useRegionalStsEndpoints?: boolean;\n\n  /**\n   * Can be used to run this build using a specific IAM role. This can be used,\n   * for example, to execute in the context of another account (e.g. to run\n   * tests in isolation).\n   */\n  assumeRole?: AssumeRole;\n\n  /**\n   * Additional buildspec (for artifacts etc.)\n   *\n   * @default No additional buildspec\n   */\n  buildSpec?: BuildSpec;\n\n  /**\n   * The timeout of the build.\n   *\n   * @default the CodeBuild default (1 hour)\n   */\n  timeout?: Duration;\n\n  /**\n   * Alarm period.\n   *\n   * @default 300 seconds (5 minutes)\n   */\n  alarmPeriod?: Duration;\n\n  /**\n   * Alarm threshold.\n   * @default 1\n   */\n  alarmThreshold?: number;\n\n  /**\n   * Alarm evaluation periods.\n   * @default 1\n   */\n  alarmEvaluationPeriods?: number;\n\n  secondaryArtifactNames?: string[];\n\n  /**\n   * Clarify whether this Shellable produces any artifacts\n   *\n   * @default true\n   */\n  readonly producesArtifacts?: boolean;\n\n  /**\n   * Namespace to use when adding as an action to the pipeline\n   *\n   * @default No namespace\n   */\n  readonly actionNamespace?: string;\n\n  /**\n   * Additional environment variables to set from the pipeline action\n   *\n   * @default No environment variables\n   */\n  readonly pipelineEnvironmentVars?: Record<string, string>;\n\n  /**\n   * The service role to assume while running the build\n   *\n   * @default A role will be created\n   */\n  readonly serviceRole?: IRole;\n}\n\n/**\n * Properties used to create a Shellable\n */\nexport interface ShellableProps extends ShellableOptions {\n  /**\n   * Directory with the scripts.\n   *\n   * By default the whole directory will be uploaded. Use `excludeFilePatterns` to ignore files.\n   */\n  scriptDirectory: string;\n\n  /**\n   * File paths matching the glob patterns will be excluded from the script dir.\n   */\n  excludeFilePatterns?: string[];\n\n  /**\n   * Filename of the initial script to start, relative to scriptDirectory.\n   */\n  entrypoint: string;\n\n  /**\n   * Additional arguments to pass to the entrypoint script.\n   *\n   * (NOTE: not named 'arguments' because that's a reserved identifier in JavaScript)\n   *\n   * @default No arguments\n   */\n  readonly args?: string[];\n}\n\nexport interface AssumeRole {\n  /**\n   * The Amazon Resource Name (ARN) of the role to assume.\n   */\n  roleArn: string;\n\n  /**\n   * An identifier for the assumed role session.\n   *\n   * Use  the  role  session name to uniquely identify a session when the same\n   * role is assumed by different principals or for different reasons. In\n   * cross-account scenarios, the role session name is visible to, and can be\n   * logged by the account that owns the role.  The role session name is also\n   * used in the ARN of the assumed role principal. This means that subsequent\n   * cross-account API requests using the tem- porary security credentials will\n   * expose the role session name to the external account in their CloudTrail\n   * logs.\n   *\n   * The regex used to validate this parameter is a string of characters\n   * consisting  of upper- and lower-case alphanumeric characters with no\n   * spaces. You can also include underscores or any of the following\n   * characters: =,.@-\n   */\n  sessionName: string;\n\n  /**\n   * A  unique  identifier  that  is  used by third parties when assuming roles\n   * in their customers' accounts. For each  role  that  the  third party can\n   * assume, they should instruct their customers to ensure the role's trust\n   * policy checks for the external ID that the third  party generated.  Each\n   * time the third party assumes the role, they should pass the customer's\n   * external ID. The external ID is useful in  order to  help  third  parties\n   * bind a role to the customer who created it. For more information about the\n   * external ID, see How to Use an Exter- nal  ID  When Granting Access to Your\n   * AWS Resources to a Third Party in the IAM User Guide .\n   *\n   * This parameter must be a string of characters consisting  of upper- and\n   * lower-case alphanumeric characters with no spaces. You can also include\n   * underscores or  any  of  the  following characters: =,.@:/-\n   */\n  externalId?: string;\n\n  /**\n   * When a profie name is configured, an assumed role configuration will be created\n   * in the shared aws configuration file (~/.aws/config). This is in contrary of simply invoking\n   * an `sts assume-role` command that creates a session with a fixed expiry date.\n   *\n   * Using a profile will delegate credential refreshing to the SDK/CLI.\n   * This is needed to support long running sessions that needs sessions that are longer than\n   * the session duration that can be configured with a `sts assume-role`.\n   *\n   * The application code will access to this profile in the `AWS_PROFILE` env variable.\n   *\n   * Only relevant if `refresh` is specified.\n   *\n   * @see https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-files.html\n   *\n   * @default 'long-running-profile'\n   */\n  profileName?: string;\n\n  /**\n   * Specify this if you have a long running execution that needs long running sessions.\n   * This will create a profile and use it to delegate credential refreshing to the SDK/CLI\n   *\n   * @default false\n   */\n  refresh?: boolean;\n\n}\n\n/**\n * A CodeBuild project that runs arbitrary scripts.\n *\n * The scripts to be run are specified by supplying a directory.\n * All files in the directory are uploaded, then the script designated\n * as the entry point is started.\n *\n * The script is executed in the directory where the build project's\n * input is stored. The directory where the script files are stored\n * is in the $SCRIPT_DIR environment variable.\n *\n * Supports both Windows and Linux computes.\n */\nexport class Shellable extends Construct {\n  public readonly project: cbuild.Project;\n  public readonly role: iam.IRole;\n\n  /**\n   * CloudWatch alarm that will be triggered if this action fails.\n   */\n  public readonly alarm: cloudwatch.Alarm;\n\n  private readonly platform: ShellPlatform;\n  private readonly buildSpec: BuildSpec;\n\n  private readonly outputArtifactName?: string;\n\n  constructor(parent: Construct, id: string, private readonly props: ShellableProps) {\n    super(parent, id);\n\n    this.platform = props.platform || ShellPlatform.LinuxUbuntu;\n\n    const entrypoint = path.join(props.scriptDirectory, props.entrypoint);\n    if (!fs.existsSync(entrypoint)) {\n      throw new Error(`Cannot find test entrypoint: ${entrypoint}`);\n    }\n\n    const asset = new assets.Asset(this, 'ScriptDirectory', {\n      path: props.scriptDirectory,\n      exclude: props.excludeFilePatterns,\n      ignoreMode: IgnoreMode.GLOB,\n    });\n\n    this.outputArtifactName = (props.producesArtifacts ?? true) ? `Artifact_${this.node.addr}` : undefined;\n    if (this.outputArtifactName && this.outputArtifactName.length > 100) {\n      throw new Error(`Whoops, too long: ${this.outputArtifactName}`);\n    }\n\n    this.buildSpec = BuildSpec.simple({\n      install: this.platform.installCommands(),\n      preBuild: this.platform.prebuildCommands(props.assumeRole, props.useRegionalStsEndpoints),\n      build: this.platform.buildCommands(props.entrypoint, props.args),\n    }).merge(props.buildSpec || BuildSpec.empty());\n\n    const environmentSecretsAsSecretNames = this.convertEnvironmentSecretArnsToSecretNames(props.environmentSecrets);\n\n    this.project = new cbuild.Project(this, 'Resource', {\n      projectName: props.buildProjectName,\n      description: props.description,\n      source: props.source,\n      role: props.serviceRole,\n      environment: {\n        buildImage: this.platform.buildImage,\n        computeType: props.computeType || cbuild.ComputeType.MEDIUM,\n        privileged: props.privileged,\n      },\n      environmentVariables: {\n        [S3_BUCKET_ENV]: { value: asset.s3BucketName },\n        [S3_KEY_ENV]: { value: asset.s3ObjectKey },\n        ...renderEnvironmentVariables(props.environment),\n        ...renderEnvironmentVariables(environmentSecretsAsSecretNames, cbuild.BuildEnvironmentVariableType.SECRETS_MANAGER),\n        ...renderEnvironmentVariables(props.environmentParameters, cbuild.BuildEnvironmentVariableType.PARAMETER_STORE),\n      },\n      timeout: props.timeout,\n      buildSpec: cbuild.BuildSpec.fromObject(this.buildSpec.render({ primaryArtifactName: this.outputArtifactName })),\n      ssmSessionPermissions: true,\n    });\n\n    this.role = this.project.role!; // not undefined, as it's a new Project\n    this.role.addManagedPolicy(iam.ManagedPolicy.fromAwsManagedPolicyName('AmazonElasticContainerRegistryPublicReadOnly'));\n    asset.grantRead(this.role);\n\n    // Grant read access to secrets\n    Object.entries(props.environmentSecrets ?? {}).forEach(([name, secretArn]) => {\n      const secret = aws_secretsmanager.Secret.fromSecretCompleteArn(this, `${name}Secret`, secretArn);\n      secret.grantRead(this.role);\n    });\n\n    // Grant read access to parameters\n    Object.entries(props.environmentParameters ?? {}).forEach(([name, parameterName]) => {\n      const parameter = aws_ssm.StringParameter.fromStringParameterName(this, `${name}Parameter`, parameterName);\n      parameter.grantRead(this.role);\n    });\n\n    if (props.assumeRole) {\n      this.role.addToPrincipalPolicy(new iam.PolicyStatement({\n        actions: ['sts:AssumeRole'],\n        resources: [props.assumeRole.roleArn],\n      }));\n    }\n\n    this.alarm = new cloudwatch.Alarm(this, 'Alarm', {\n      metric: this.project.metricFailedBuilds({ period: props.alarmPeriod || Duration.seconds(300) }),\n      threshold: props.alarmThreshold || 1,\n      comparisonOperator: cloudwatch.ComparisonOperator.GREATER_THAN_OR_EQUAL_TO_THRESHOLD,\n      evaluationPeriods: props.alarmEvaluationPeriods || 1,\n      treatMissingData: cloudwatch.TreatMissingData.IGNORE,\n    });\n  }\n\n  public addToPipeline(stage: cpipeline.IStage, name: string, inputArtifact: cpipeline.Artifact, runOrder?: number):\n  cpipeline_actions.CodeBuildAction {\n    const codeBuildAction = new cpipeline_actions.CodeBuildAction({\n      actionName: name,\n      project: this.project,\n      runOrder,\n      input: inputArtifact,\n      variablesNamespace: this.props.actionNamespace,\n      environmentVariables: this.props.pipelineEnvironmentVars\n        ? Object.fromEntries(Object.entries(this.props.pipelineEnvironmentVars)\n          .map(([k, v]) => ([k, { type: cbuild.BuildEnvironmentVariableType.PLAINTEXT, value: v }] as const)))\n        : undefined,\n      outputs: this.outputArtifactName\n        ? [this.outputArtifactName, ...this.buildSpec.additionalArtifactNames ?? []].map(n => new cpipeline.Artifact(n))\n        : undefined,\n    });\n    stage.addAction(codeBuildAction);\n    return codeBuildAction;\n  }\n\n  /**\n   * The contract of `environmentSecrets` is that the values are complete Secret ARNs;\n   * however, the CodeBuild construct expects secret names as the inputs for environment variables.\n   * This method converts the environment secrets from ARNs to names.\n   */\n  private convertEnvironmentSecretArnsToSecretNames(environmentSecrets?: { [key: string]: string }) {\n    if (!environmentSecrets) {\n      return undefined;\n    }\n\n    const out: { [key: string]: string } = { };\n    Object.entries(environmentSecrets ?? {}).forEach(([name, secretArn]) => {\n      const secret = aws_secretsmanager.Secret.fromSecretCompleteArn(this, `${name}SecretFromArn`, secretArn);\n      out[name] = secret.secretName;\n    });\n    return out;\n  }\n}\n\n/**\n * Platform archetype\n */\nexport enum PlatformType {\n  Linux = 'Linux',\n  Windows = 'Windows',\n}\n\n/**\n * The platform type to run the scripts on\n */\nexport abstract class ShellPlatform {\n  /**\n   * Return a default Ubuntu Linux platform\n   */\n  public static get LinuxUbuntu(): ShellPlatform {\n    // Cannot be static member because of initialization order\n    return new LinuxPlatform(cbuild.LinuxBuildImage.STANDARD_7_0);\n  }\n\n  /**\n   * Return a default Windows platform\n   */\n  public static get Windows(): ShellPlatform {\n    // Cannot be static member because of initialization order\n    return new WindowsPlatform(cbuild.WindowsBuildImage.WIN_SERVER_CORE_2019_BASE);\n  }\n\n  constructor(public readonly buildImage: cbuild.IBuildImage) {\n  }\n\n  /**\n   * Retrn commands to prepare the host for the shellable.\n   */\n  public abstract installCommands(): string[] | undefined;\n\n  /**\n   * Return commands to download the script bundle\n   */\n  public abstract prebuildCommands(assumeRole?: AssumeRole, useRegionalStsEndpoints?: boolean): string[];\n\n  /**\n   * Return commands to start the entrypoint script\n   */\n  public abstract buildCommands(entrypoint: string, args?: string[]): string[];\n\n  /**\n   * Type of platform\n   */\n  public abstract get platformType(): PlatformType;\n}\n\n/**\n * A Linux Platform\n */\nexport class LinuxPlatform extends ShellPlatform {\n  public readonly platformType = PlatformType.Linux;\n\n  public installCommands(): string[] | undefined {\n    return [\n      'command -v yarn > /dev/null || npm install --global yarn',\n    ];\n  }\n\n  public prebuildCommands(assumeRole?: AssumeRole, useRegionalStsEndpoints?: boolean): string[] {\n    const lines = new Array<string>();\n    // Better echo the location here; if this fails, the error message only contains\n    // the unexpanded variables by default. It might fail if you're running an old\n    // definition of the CodeBuild project--the permissions will have been changed\n    // to only allow downloading the very latest version.\n    lines.push(`echo \"Downloading scripts from s3://\\${${S3_BUCKET_ENV}}/\\${${S3_KEY_ENV}}\"`);\n    lines.push(`aws s3 cp s3://\\${${S3_BUCKET_ENV}}/\\${${S3_KEY_ENV}} /tmp`);\n    lines.push('mkdir -p /tmp/scriptdir');\n    lines.push(`unzip /tmp/$(basename \\$${S3_KEY_ENV}) -d /tmp/scriptdir`);\n\n    if (assumeRole) {\n\n      if (assumeRole.refresh) {\n\n        const awsHome = '~/.aws';\n\n        const profileName = assumeRole.profileName ?? 'long-running-profile';\n\n        lines.push(`mkdir -p ${awsHome}`);\n        lines.push(`touch ${awsHome}/credentials`);\n        lines.push(`config=${awsHome}/config`);\n        lines.push(`echo [profile ${profileName}]>> $\\{config\\}`);\n        lines.push('echo credential_source = EcsContainer >> $\\{config\\}');\n        lines.push(`echo role_session_name = ${assumeRole.sessionName} >> $\\{config\\}`);\n        lines.push(`echo role_arn = ${assumeRole.roleArn} >> $config`);\n\n        if (assumeRole.externalId) {\n          lines.push(`echo external_id = ${assumeRole.externalId} >> $config`);\n        }\n\n        // let the application code know which role is being used.\n        lines.push(`export AWS_PROFILE=${profileName}`);\n\n        // force the AWS SDK for JavaScript to actually load the config file (do automatically so users don't forget)\n        lines.push('export AWS_SDK_LOAD_CONFIG=1');\n\n      } else {\n\n        const externalId = assumeRole.externalId ? `--external-id \"${assumeRole.externalId}\"` : '';\n        const StsEndpoints = useRegionalStsEndpoints ? 'regional' : 'legacy';\n\n        lines.push('creds=$(mktemp -d)/creds.json');\n        lines.push(`AWS_STS_REGIONAL_ENDPOINTS=${StsEndpoints} aws sts assume-role --role-arn \"${assumeRole.roleArn}\" --role-session-name \"${assumeRole.sessionName}\" ${externalId} > $creds`);\n        lines.push('export AWS_ACCESS_KEY_ID=\"$(cat ${creds} | grep \"AccessKeyId\" | cut -d\\'\"\\' -f 4)\"');\n        lines.push('export AWS_SECRET_ACCESS_KEY=\"$(cat ${creds} | grep \"SecretAccessKey\" | cut -d\\'\"\\' -f 4)\"');\n        lines.push('export AWS_SESSION_TOKEN=\"$(cat ${creds} | grep \"SessionToken\" | cut -d\\'\"\\' -f 4)\"');\n      }\n    }\n\n    return lines;\n  }\n\n  public buildCommands(entrypoint: string, args?: string[]): string[] {\n    return [\n      'export SCRIPT_DIR=/tmp/scriptdir',\n      `echo \"Running ${entrypoint}\"`,\n      `/bin/bash /tmp/scriptdir/${entrypoint} ${(args ?? []).join(' ')}`.trimRight(),\n    ];\n  }\n}\n\n/**\n * Options for WindowsPlatform\n */\nexport interface WindowsPlatformOptions {\n  /**\n   * Whether to upgrade Node.js using Chocolatey during the install phase.\n   *\n   * @default true\n   */\n  readonly upgradeNodeWithChocolatey?: boolean;\n}\n\n/**\n * A Windows Platform\n */\nexport class WindowsPlatform extends ShellPlatform {\n  public readonly platformType = PlatformType.Windows;\n  private readonly upgradeNodeWithChocolatey: boolean;\n\n  constructor(buildImage: cbuild.IBuildImage, options: WindowsPlatformOptions = {}) {\n    super(buildImage);\n    this.upgradeNodeWithChocolatey = options.upgradeNodeWithChocolatey ?? true;\n  }\n\n  public installCommands(): string[] | undefined {\n    if (!this.upgradeNodeWithChocolatey) {\n      return undefined;\n    }\n\n    return [\n      // Update the image's nodejs to the latest LTS release.\n      'Import-Module \"C:\\\\ProgramData\\\\chocolatey\\\\helpers\\\\chocolateyProfile.psm1\"',\n      'C:\\\\ProgramData\\\\chocolatey\\\\bin\\\\choco.exe upgrade nodejs-lts -y',\n    ];\n  }\n\n  public prebuildCommands(assumeRole?: AssumeRole, _useRegionalStsEndpoints?: boolean): string[] {\n    if (assumeRole) {\n      throw new Error('assumeRole is not supported on Windows: https://github.com/cdklabs/aws-delivlib/issues/57');\n    }\n\n    return [\n      // Would love to do downloading here and executing in the next step,\n      // but I don't know how to propagate the value of $TEMPDIR.\n      //\n      // Punting for someone who knows PowerShell well enough.\n    ];\n  }\n\n  public buildCommands(entrypoint: string, args?: string[]): string[] {\n    return [\n      'Set-Variable -Name TEMPDIR -Value (New-TemporaryFile).DirectoryName',\n      `aws s3 cp s3://$env:${S3_BUCKET_ENV}/$env:${S3_KEY_ENV} $TEMPDIR\\\\scripts.zip`,\n      'New-Item -ItemType Directory -Path $TEMPDIR\\\\scriptdir',\n      'Expand-Archive -Path $TEMPDIR/scripts.zip -DestinationPath $TEMPDIR\\\\scriptdir',\n      '$env:SCRIPT_DIR = \"$TEMPDIR\\\\scriptdir\"',\n      `& $TEMPDIR\\\\scriptdir\\\\${entrypoint} ${(args ?? []).join(' ')}`.trimRight(),\n    ];\n  }\n}\n"
  },
  {
    "path": "lib/signing/nuget/sign.sh",
    "content": "#!/bin/bash\nset -euo pipefail\n\necho \"Installing required CLI tools: jq\"\nif command -v yum &>/dev/null; then\n    yum install -y jq\nelif command -v apt-get &>/dev/null; then\n    apt-get update\n    apt-get install -y jq\nelse\n    echo \"!!! Neither an apt nor yum distribution - could not install jq, things might break!\"\nfi\n\nif [ -n \"${ACCESS_ROLE_ARN:-}\" ]; then\n  ROLE=$(aws sts assume-role --role-arn \"${ACCESS_ROLE_ARN:-}\" --role-session-name \"signer_access\")\n  export AWS_ACCESS_KEY_ID=$(echo $ROLE | jq -r .Credentials.AccessKeyId)\n  export AWS_SECRET_ACCESS_KEY=$(echo $ROLE | jq -r .Credentials.SecretAccessKey)\n  export AWS_SESSION_TOKEN=$(echo $ROLE | jq -r .Credentials.SessionToken)\nfi\n\nfound=false\nfor nuget_package_path in $(find dotnet -name *.nupkg -not -iname *.symbols.nupkg); do\n  found=true\n  nuget_package=$(cd $(dirname ${nuget_package_path}) && echo $PWD)/$(basename ${nuget_package_path})\n  echo \"🔑 Applying authenticode signatures to assemblies in ${nuget_package}\"\n  for file in $(unzip -Z1 ${nuget_package} '*.dll'); do\n    echo \"📄 Assembly: ${file}\"\n    tmp=$(mktemp -d)\n    # extract the dll from the zip file\n    unzip -q ${nuget_package} -d ${tmp} ${file}\n    # need to set appropriate permissions, otherwise the file has none\n    chmod u+rw ${tmp}/${file}\n    # upload dll to signer bucket\n    version_id=$(aws s3api put-object \\\n      --bucket ${SIGNING_BUCKET_NAME:-} \\\n      --key unsigned/${file} \\\n      --body ${tmp}/${file} | jq -r '.VersionId')\n    # invoke signer lambda\n    aws lambda invoke \\\n      --function-name ${SIGNING_LAMBDA_ARN:-} \\\n      --invocation-type RequestResponse \\\n      --cli-binary-format raw-in-base64-out \\\n      --payload '{ \"artifactKey\": \"'\"unsigned/${file}\"'\", \"artifactVersion\": \"'\"${version_id}\"'\", \"profileName\": \"'\"${SIGNER_PROFILE_NAME:-}\"'\", \"profileOwner\": \"'\"${SIGNER_PROFILE_OWNER:-}\"'\" }' \\\n      ${tmp}/response.json >/dev/null\n    signed_artifact_key=$(cat ${tmp}/response.json | jq -r '.signedArtifactKey')\n    # download signed dll from signer bucket\n    aws s3api get-object \\\n      --bucket ${SIGNING_BUCKET_NAME:-} \\\n      --key ${signed_artifact_key} \\\n      ${tmp}/${file} >/dev/null\n    # replace the dll in the nuget package\n    (\n      cd ${tmp}\n      zip -qfr ${nuget_package} ${file}\n    )\n    # clean up temporary directory\n    rm -rf ${tmp}\n  done\n  echo \"🔐 All Done!\"\ndone\n\nif ! ${found}; then\n  echo \"❌ No nupkg files found under the dotnet/ directory. Nothing to sign\"\n  exit 1\nfi\n"
  },
  {
    "path": "lib/signing-key.ts",
    "content": "import { aws_iam as iam, aws_kms as kms } from 'aws-cdk-lib';\nimport { Construct } from 'constructs';\nimport { OpenPGPKeyPair } from './open-pgp-key-pair';\n\n\n/**\n * Construction properties for a SigningKey\n */\nexport interface SigningKeyProps {\n  /**\n   * The AWS Secrets Manager secret name to use for this key.\n   *\n   * The secret will be named \"<scope>/SigningKey\".\n   *\n   * @default A unique secret name will be automatically generated\n   */\n  secretName?: string;\n\n  /**\n   * Name to put on key\n   */\n  identity: string;\n\n  /**\n   * Email address to put on key\n   */\n  email: string;\n}\n\n/**\n * A combination of a Secrets Manager secret and a unique KMS key per secret\n *\n * The KMS key is there to control access to the secret, as the secret\n * itself doesn't support resource policies yet.\n *\n * @deprecated Use the OpenPGPKeyPair class instead.\n */\nexport class OpenPgpKey extends Construct {\n  public readonly scope: string;\n\n  private readonly key: kms.IKey;\n  private readonly secret: OpenPGPKeyPair;\n\n  constructor(parent: Construct, name: string, props: SigningKeyProps) {\n    super(parent, name);\n\n    this.scope = props.secretName || this.node.addr;\n    const secretName = `${this.scope}/SigningKey`;\n\n    this.key = new kms.Key(this, 'Key', {\n      description: `Encryption key for PGP secret ${secretName}`,\n    });\n\n    // The key has an alias for descriptive purposes, but the alias is not used\n    this.key.addAlias(`alias/${secretName}Key`);\n\n    this.secret = new OpenPGPKeyPair(this, 'Secret', {\n      identity: props.identity,\n      email: props.email,\n      keySizeBits: 4096,\n      expiry: '4y',\n      secretName,\n      pubKeyParameterName: `/${secretName}.pub`,\n      encryptionKey: this.key,\n      version: 1,\n    });\n  }\n\n  public grantRead(identity: iam.IPrincipal) {\n    return this.secret.grantRead(identity);\n  }\n}\n"
  },
  {
    "path": "lib/signing.ts",
    "content": "import * as path from 'path';\nimport { IBuildImage, LinuxBuildImage, Project } from 'aws-cdk-lib/aws-codebuild';\nimport { Artifact, IStage } from 'aws-cdk-lib/aws-codepipeline';\nimport { CodeBuildAction } from 'aws-cdk-lib/aws-codepipeline-actions';\nimport { IRole } from 'aws-cdk-lib/aws-iam';\nimport { IFunction } from 'aws-cdk-lib/aws-lambda';\nimport { IBucket } from 'aws-cdk-lib/aws-s3';\nimport { Construct, IConstruct } from 'constructs';\nimport { BuildSpec } from './build-spec';\nimport { DEFAULT_SUPERCHAIN_IMAGE } from './constants';\nimport { AddToPipelineOptions } from './pipeline';\nimport { LinuxPlatform, Shellable } from './shellable';\n\nexport interface ISigner extends IConstruct {\n  addToPipeline(stage: IStage, id: string, options: AddToPipelineOptions): Artifact;\n}\n\nexport interface AddSigningOptions {\n  /**\n   * The input artifact to use\n   *\n   * @default Build output artifact\n   */\n  readonly inputArtifact?: Artifact;\n\n  /**\n   * Stage name to add signing job to\n   *\n   * @default \"Sign\"\n   */\n  readonly stageName?: string;\n}\n\nexport interface SignNuGetWithSignerProps {\n  /**\n   * An S3 bucket used to store signed and unsigned DLL files\n   */\n  readonly signingBucket: IBucket;\n\n  /**\n   * A Lambda function used to perform signing operations with AWS Signer\n   */\n  readonly signingLambda: IFunction;\n\n  /**\n   * A role used provide access to the signing bucket and signing lambda\n   */\n  readonly accessRole: IRole;\n\n  /**\n   * The name of the signer profile to use for signing\n   *\n   * @default no signing profile name\n   */\n  readonly signerProfileName?: string;\n\n  /**\n   * The owner of the signer profile to use for signing\n   *\n   * @default no signing profile owner\n   */\n  readonly signerProfileOwner?: string;\n\n  /*\n   * The service role that will be used to allow CodeBuild to perform operations\n   * on your behalf.\n   *\n   * @default A role will be created\n   */\n  readonly serviceRole?: IRole;\n\n  /**\n   * The build image to do the signing in\n   *\n   * Needs to have NuGet preinstalled.\n   *\n   * @default Latest superchain\n   */\n  readonly buildImage?: IBuildImage;\n}\n\nexport class SignNuGetWithSigner extends Construct implements ISigner {\n  public readonly role: IRole;\n  public readonly project: Project;\n\n  public constructor(scope: Construct, id: string, props: SignNuGetWithSignerProps) {\n    super(scope, id);\n\n    const environment: { [key: string]: string } = {\n      SIGNING_BUCKET_NAME: props.signingBucket.bucketName,\n      SIGNING_LAMBDA_ARN: props.signingLambda.functionArn,\n      ACCESS_ROLE_ARN: props.accessRole.roleArn,\n    };\n\n    if (props.signerProfileName) {\n      environment.SIGNER_PROFILE_NAME = props.signerProfileName;\n    }\n\n    if (props.signerProfileOwner) {\n      environment.SIGNER_PROFILE_OWNER = props.signerProfileOwner;\n    }\n\n    const shellable = new Shellable(this, 'Default', {\n      platform: new LinuxPlatform(props.buildImage ?? LinuxBuildImage.fromDockerRegistry(DEFAULT_SUPERCHAIN_IMAGE)),\n      scriptDirectory: path.join(__dirname, 'signing', 'nuget'),\n      entrypoint: 'sign.sh',\n      serviceRole: props.serviceRole,\n      buildSpec: BuildSpec.literal({\n        version: '0.2',\n        artifacts: {\n          files: ['**/*'],\n          ['base-directory']: '.',\n        },\n      }),\n      environment,\n    });\n\n    this.role = shellable.role;\n    this.project = shellable.project;\n  }\n\n  public addToPipeline(stage: IStage, id: string, options: AddToPipelineOptions) {\n    const signingInput = options.inputArtifact || new Artifact();\n    const signingOutput = new Artifact();\n\n    stage.addAction(new CodeBuildAction({\n      actionName: id,\n      input: signingInput,\n      runOrder: options.runOrder,\n      project: this.project,\n      outputs: [signingOutput],\n    }));\n\n    return signingOutput;\n  }\n}"
  },
  {
    "path": "lib/util.ts",
    "content": "import * as crypto from 'crypto';\nimport * as fs from 'fs';\nimport * as path from 'path';\nimport { aws_codebuild as cbuild } from 'aws-cdk-lib';\n\n\n/**\n * Determines the \"RunOrder\" property for the next action to be added to a stage.\n * @param index Index of new action\n * @param concurrency The concurrency limit\n */\nexport function determineRunOrder(index: number, concurrency?: number): number | undefined {\n  // no runOrder if we are at unlimited concurrency\n  if (concurrency === undefined) {\n    return undefined;\n  }\n\n  return Math.floor(index / concurrency) + 1;\n}\n\n/**\n * Hashes the contents of a file or directory. If the argument is a directory,\n * it is assumed not to contain symlinks that would result in a cyclic tree.\n *\n * @param fileOrDir the path to the file or directory that should be hashed.\n *\n * @returns a SHA256 hash, base-64 encoded.\n */\nexport function hashFileOrDirectory(fileOrDir: string): string {\n  const hash = crypto.createHash('SHA256');\n  hash.update(path.basename(fileOrDir)).update('\\0');\n  const stat = fs.statSync(fileOrDir);\n  if (stat.isDirectory()) {\n    for (const item of fs.readdirSync(fileOrDir).sort()) {\n      hash.update(hashFileOrDirectory(path.join(fileOrDir, item)));\n    }\n  } else {\n    hash.update(fs.readFileSync(fileOrDir));\n  }\n  return hash.digest('base64');\n}\n\nexport function renderEnvironmentVariables(env?: { [key: string]: string | undefined }, type?: cbuild.BuildEnvironmentVariableType) {\n  if (!env) {\n    return undefined;\n  }\n\n  const out: { [key: string]: cbuild.BuildEnvironmentVariable } = { };\n  for (const [key, value] of Object.entries(env)) {\n    if (value !== undefined) {\n      out[key] = { value, type };\n    }\n  }\n  return out;\n}\n\nexport function noUndefined<T extends object>(xs: T): { [k in keyof T]: NonNullable<T[k]> } {\n  const ret: any = {};\n  for (const [k, v] of Object.entries(xs)) {\n    if (v !== undefined) {\n      ret[k] = v;\n    }\n  }\n  return ret;\n}\n\nexport function mapValues<T, U>(xs: { [key: string]: T }, fn: (x: T) => U): { [key: string]: U } {\n  const ret: { [key: string]: U } = {};\n  for (const [k, v] of Object.entries(xs)) {\n    ret[k] = fn(v);\n  }\n  return ret;\n}\n\nexport function flatMap<T, U>(xs: T[], fn: (x: T) => U[]): U[] {\n  const ret = new Array<U>();\n  for (const x of xs) {\n    ret.push(...fn(x));\n  }\n  return ret;\n}\n"
  },
  {
    "path": "package.json",
    "content": "{\n  \"name\": \"aws-delivlib\",\n  \"description\": \"A fabulous library for defining continuous pipelines for building, testing and releasing code libraries.\",\n  \"repository\": {\n    \"type\": \"git\",\n    \"url\": \"https://github.com/cdklabs/aws-delivlib.git\"\n  },\n  \"scripts\": {\n    \"build\": \"npx projen build\",\n    \"build:publishing/github\": \"npx projen build:publishing/github\",\n    \"bump\": \"npx projen bump\",\n    \"bundle:package-integrity\": \"npx projen bundle:package-integrity\",\n    \"clobber\": \"npx projen clobber\",\n    \"compile\": \"npx projen compile\",\n    \"compile:custom-resource-handlers\": \"npx projen compile:custom-resource-handlers\",\n    \"default\": \"npx projen default\",\n    \"eject\": \"npx projen eject\",\n    \"eslint\": \"npx projen eslint\",\n    \"integ:diff\": \"npx projen integ:diff\",\n    \"integ:update\": \"npx projen integ:update\",\n    \"package\": \"npx projen package\",\n    \"post-compile\": \"npx projen post-compile\",\n    \"post-upgrade\": \"npx projen post-upgrade\",\n    \"pre-compile\": \"npx projen pre-compile\",\n    \"release\": \"npx projen release\",\n    \"test\": \"npx projen test\",\n    \"test:watch\": \"npx projen test:watch\",\n    \"unbump\": \"npx projen unbump\",\n    \"upgrade\": \"npx projen upgrade\",\n    \"upgrade-cdklabs-projen-project-types\": \"npx projen upgrade-cdklabs-projen-project-types\",\n    \"upgrade-dev-deps\": \"npx projen upgrade-dev-deps\",\n    \"watch\": \"npx projen watch\",\n    \"projen\": \"npx projen\",\n    \"cdk\": \"npx cdk\"\n  },\n  \"author\": {\n    \"name\": \"Amazon Web Services\",\n    \"email\": \"aws-cdk-dev@amazon.com\",\n    \"url\": \"https://aws.amazon.com\",\n    \"organization\": true\n  },\n  \"devDependencies\": {\n    \"@aws-sdk/client-cloudwatch\": \"^3.1042.0\",\n    \"@aws-sdk/client-codepipeline\": \"^3.1042.0\",\n    \"@aws-sdk/client-s3\": \"^3.1042.0\",\n    \"@aws-sdk/client-secrets-manager\": \"^3.1042.0\",\n    \"@aws-sdk/client-ssm\": \"^3.1042.0\",\n    \"@babel/plugin-transform-modules-commonjs\": \"^7.28.6\",\n    \"@stylistic/eslint-plugin\": \"^2\",\n    \"@types/adm-zip\": \"^0.5.8\",\n    \"@types/aws-lambda\": \"^8.10.161\",\n    \"@types/follow-redirects\": \"^1.14.4\",\n    \"@types/fs-extra\": \"^9.0.13\",\n    \"@types/jest\": \"^29.5.14\",\n    \"@types/node\": \"^18\",\n    \"@types/tar\": \"^6.1.13\",\n    \"@typescript-eslint/eslint-plugin\": \"^8\",\n    \"@typescript-eslint/parser\": \"^8\",\n    \"adm-zip\": \"^0.5.17\",\n    \"aws-cdk\": \"2.1120.0\",\n    \"aws-cdk-lib\": \"2.187.0\",\n    \"cdklabs-projen-project-types\": \"^0.3.7\",\n    \"commit-and-tag-version\": \"^12\",\n    \"constructs\": \"10.1.31\",\n    \"esbuild\": \"^0.28.0\",\n    \"eslint\": \"^9\",\n    \"eslint-import-resolver-typescript\": \"^2.7.1\",\n    \"eslint-plugin-import\": \"^2.32.0\",\n    \"follow-redirects\": \"^1.16.0\",\n    \"fs-extra\": \"^10.1.0\",\n    \"jest\": \"^29\",\n    \"jest-junit\": \"^16\",\n    \"JSONStream\": \"^1.3.5\",\n    \"minipass\": \"3.2.1\",\n    \"node-ical\": \"0.15.1\",\n    \"projen\": \"^0.98.4\",\n    \"rrule\": \"^2.8.1\",\n    \"standard-version\": \"^9\",\n    \"tar\": \"^6.2.1\",\n    \"ts-jest\": \"^29.4.9\",\n    \"ts-node\": \"^10.9.2\",\n    \"typescript\": \"~5.0.0\"\n  },\n  \"peerDependencies\": {\n    \"aws-cdk-lib\": \"^2.187.0\",\n    \"constructs\": \"^10.1.31\"\n  },\n  \"dependencies\": {\n    \"changelog-parser\": \"^2.8.1\"\n  },\n  \"keywords\": [\n    \"aws-cdk\",\n    \"ci-cd\",\n    \"continuous-delivery\",\n    \"continuous-integration\"\n  ],\n  \"engines\": {\n    \"node\": \">= 18.12.0\"\n  },\n  \"main\": \"lib/index.js\",\n  \"license\": \"Apache-2.0\",\n  \"publishConfig\": {\n    \"access\": \"public\"\n  },\n  \"version\": \"0.0.0\",\n  \"jest\": {\n    \"coverageProvider\": \"v8\",\n    \"testMatch\": [\n      \"<rootDir>/@(lib/__tests__)/**/*(*.)@(spec|test).js?(x)\",\n      \"<rootDir>/@(lib/__tests__)/**/__tests__/**/*.js?(x)\",\n      \"<rootDir>/@(projenrc)/**/*(*.)@(spec|test).ts?(x)\",\n      \"<rootDir>/@(projenrc)/**/__tests__/**/*.ts?(x)\"\n    ],\n    \"clearMocks\": true,\n    \"collectCoverage\": true,\n    \"coverageReporters\": [\n      \"json\",\n      \"lcov\",\n      \"clover\",\n      \"cobertura\",\n      \"text\"\n    ],\n    \"coverageDirectory\": \"coverage\",\n    \"coveragePathIgnorePatterns\": [\n      \"/node_modules/\"\n    ],\n    \"testPathIgnorePatterns\": [\n      \"/node_modules/\"\n    ],\n    \"watchPathIgnorePatterns\": [\n      \"/node_modules/\",\n      \"/lib/\"\n    ],\n    \"reporters\": [\n      \"default\",\n      [\n        \"jest-junit\",\n        {\n          \"outputDirectory\": \"test-reports\"\n        }\n      ]\n    ],\n    \"snapshotResolver\": \"./.projen/jest-snapshot-resolver.js\",\n    \"transformIgnorePatterns\": [\n      \"node_modules/(?!(@nodable/entities)/)\"\n    ],\n    \"transform\": {\n      \"node_modules/@nodable/entities/.+\\\\.js$\": [\n        \"babel-jest\",\n        {\n          \"plugins\": [\n            \"@babel/plugin-transform-modules-commonjs\"\n          ]\n        }\n      ]\n    }\n  },\n  \"types\": \"lib/index.d.ts\",\n  \"//\": \"~~ Generated by projen. To modify, edit .projenrc.ts and run \\\"npx projen\\\".\"\n}\n"
  },
  {
    "path": "tsconfig.dev.json",
    "content": "// ~~ Generated by projen. To modify, edit .projenrc.ts and run \"npx projen\".\n{\n  \"compilerOptions\": {\n    \"alwaysStrict\": true,\n    \"declaration\": true,\n    \"esModuleInterop\": true,\n    \"experimentalDecorators\": true,\n    \"inlineSourceMap\": true,\n    \"inlineSources\": true,\n    \"lib\": [\n      \"es2020\"\n    ],\n    \"module\": \"CommonJS\",\n    \"noEmitOnError\": false,\n    \"noFallthroughCasesInSwitch\": true,\n    \"noImplicitAny\": true,\n    \"noImplicitReturns\": true,\n    \"noImplicitThis\": true,\n    \"noUnusedLocals\": true,\n    \"noUnusedParameters\": true,\n    \"resolveJsonModule\": true,\n    \"strict\": true,\n    \"strictNullChecks\": true,\n    \"strictPropertyInitialization\": true,\n    \"stripInternal\": true,\n    \"target\": \"ES2020\"\n  },\n  \"include\": [\n    \"lib/**/*.ts\",\n    \"lib/__tests__/**/*.ts\",\n    \".projenrc.ts\",\n    \"projenrc/**/*.ts\"\n  ],\n  \"exclude\": [\n    \"node_modules\"\n  ]\n}\n"
  },
  {
    "path": "tsconfig.json",
    "content": "// ~~ Generated by projen. To modify, edit .projenrc.ts and run \"npx projen\".\n{\n  \"compilerOptions\": {\n    \"rootDir\": \"lib\",\n    \"outDir\": \"lib\",\n    \"alwaysStrict\": true,\n    \"declaration\": true,\n    \"esModuleInterop\": true,\n    \"experimentalDecorators\": true,\n    \"inlineSourceMap\": true,\n    \"inlineSources\": true,\n    \"lib\": [\n      \"es2020\"\n    ],\n    \"module\": \"CommonJS\",\n    \"noEmitOnError\": false,\n    \"noFallthroughCasesInSwitch\": true,\n    \"noImplicitAny\": true,\n    \"noImplicitReturns\": true,\n    \"noImplicitThis\": true,\n    \"noUnusedLocals\": true,\n    \"noUnusedParameters\": true,\n    \"resolveJsonModule\": true,\n    \"strict\": true,\n    \"strictNullChecks\": true,\n    \"strictPropertyInitialization\": true,\n    \"stripInternal\": true,\n    \"target\": \"ES2020\"\n  },\n  \"include\": [\n    \"lib/**/*.ts\"\n  ],\n  \"exclude\": [\n    \"lib/publishing/github\"\n  ],\n  \"references\": [\n    {\n      \"path\": \"lib/publishing/github\"\n    }\n  ]\n}\n"
  }
]