[
  {
    "path": ".github/ISSUE_TEMPLATE/1-bug-report.yaml",
    "content": "name: 🐞 Bug report\ndescription: Create a bug report to help us improve Test-at-scale\ntitle: \"[Bug]: \"\nlabels: [bug, needs-triaging]\nassignees:\n- nevilm-lt\nbody:\n- type: markdown\n  attributes:\n    value: |\n      Thanks for taking the time to fill out this bug report! Please fill the form in English\n- type: checkboxes\n  attributes:\n    label: Is there an existing issue for this?\n    description: Please search to see if an issue already exists for the bug you encountered.\n    options:\n    - label: I have searched the existing issues\n      required: true\n- type: textarea\n  attributes:\n    label: What is the current behavior?\n    description: Add a brief description of what you are experiencing.\n  validations:\n    required: true\n- type: textarea\n  attributes:\n    label: What is the expected behavior?\n    description: A brief description of what you expect.\n  validations:\n    required: true\n- type: textarea\n  attributes:\n    label: Steps To Reproduce\n    description: Add steps to reproduce this behavior, include console / network logs & screenshots\n    placeholder: |\n      1. \n      2. \n      3.\n      4.\n  validations:\n    required: true\n- type: dropdown\n  id: version\n  attributes:\n    label: Version\n    options:\n      - Test-at-scale Cloud\n      - Test-at-scale Community Edition\n      - Test-at-scale Enterprise Edition\n  validations:\n    required: true\n"
  },
  {
    "path": ".github/ISSUE_TEMPLATE/2-feature-request.yaml",
    "content": "name: 🛠️ Feature request\ndescription: Suggest an idea to improve Test-at-scale\ntitle: \"[Feature]: \"\nlabels: [enhancement]\nassignees:\n- anmol-LT\nbody:\n- type: checkboxes\n  attributes:\n    label: Is there an existing issue for this?\n    description: Please search to see if an issue related to this feature request already exists.\n    options:\n    - label: I have searched the existing issues\n      required: true\n- type: textarea\n  attributes:\n    label: What would you like to add?\n    description: A clear description of the feature or enhancement wanted in Test-at-scale.\n  validations:\n    required: true\n- type: textarea\n  attributes:\n    label: Why should this be worked on?\n    description: A concise description of the problems or use cases for this feature request. \n  validations:\n    required: true\n- type: textarea\n  attributes:\n    label: Other details\n  validations:\n    required: false\n"
  },
  {
    "path": ".github/ISSUE_TEMPLATE/3-documentation-improvement.yaml",
    "content": "name: 📖 Docs & Tutorials Improvement\ndescription: Suggest improvements to our docs and tutorials \ntitle: \"[Docs & Tutorials]: \"\nlabels: [docs-tutorials]\nassignees:\n- nevilm-lt\nbody:\n- type: markdown\n  attributes:\n    value: |\n      Thanks for taking the time to fill out this docs/tutorials improvement request!\n- type: checkboxes\n  attributes:\n    label: Is there an existing issue for this?\n    description: Please search to see if an issue realated to this already exists.\n    options:\n    - label: I have searched the existing issues\n      required: true\n- type: input\n  attributes:\n    label: Doc/tutorial link \n    description: Add a link to the page which needs improvement (optional)\n  validations:\n    required: false\n- type: textarea\n  attributes:\n    label: Explain the problem\n    description: Is the documentation/tutorial missing? Or is it not clear? What is not clear?\n  validations:\n    required: true\n- type: textarea\n  attributes:\n    label: Your inputs to improve\n    description: Your inputs to improve/add new documentation or tutorial.  \n  validations:\n    required: true\n"
  },
  {
    "path": ".github/pull_request_template.md",
    "content": "# Issue Link\n\nAdd GitHub issue links here.\n\n# Description\n\nPlease include a summary of the change and which issue is fixed. Please also include relevant motivation and context. List any dependencies that are required for this change.\n\nFixes # (issue)\n\n## Type of change\n\nPlease delete options that are not relevant.\n\n- [ ] Bug fix (non-breaking change which fixes an issue)\n- [ ] New feature (non-breaking change which adds functionality)\n- [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected)\n- [ ] This change requires a documentation update\n\n# How Has This Been Tested?\n\nPlease describe the tests that you ran to verify your changes. Provide instructions so we can reproduce. Please also list any relevant details for your test configuration\n\n- [ ] Test A\n- [ ] Test B\n\n# Checklist:\n\n- [ ] My code follows the style guidelines of this project\n- [ ] I have performed a self-review of my own code\n- [ ] I have commented my code, particularly in hard-to-understand areas\n- [ ] I have made corresponding changes to the documentation\n- [ ] My changes generate no new warnings\n- [ ] I have added tests that prove my fix is effective or that my feature works\n- [ ] New and existing unit tests pass locally with my changes\n- [ ] Any dependent changes have been merged and published in downstream modules\n"
  },
  {
    "path": ".github/workflows/dockerhub-description.yml",
    "content": "name: Update Docker Hub Description\n\non:\n  push:\n    branches:\n      - main\n    paths:\n      - README.md\n      - .github/workflows/dockerhub-description.yml\n\njobs:\n  dockerHubDescription:\n    name: Update DockerHub Description - Nucleus\n    runs-on: ubuntu-latest\n    steps:\n\n    - name: Checkout Codebase\n      uses: actions/checkout@v3\n\n    - name: Docker Hub Description\n      uses: peter-evans/dockerhub-description@v3\n      with:\n        # DOCKER_PASSWORD are actual password not token\n        # DOCKER_TOKEN is not supported for readme/docs updation in dockerhub\n        username: ${{ secrets.DOCKER_USERNAME }}\n        password: ${{ secrets.DOCKER_PASSWORD }}\n        repository: lambdatest/nucleus\n        short-description: ${{ github.event.repository.description }}\n\n    - name: Docker Hub Description - Synapse\n      uses: peter-evans/dockerhub-description@v3\n      with:\n        username: ${{ secrets.DOCKER_USERNAME }}\n        password: ${{ secrets.DOCKER_PASSWORD }}\n        repository: lambdatest/synapse\n        short-description: ${{ github.event.repository.description }}\n"
  },
  {
    "path": ".github/workflows/env-release-nucleus.yml",
    "content": "name: Release to Environment Nucleus\non:\n  workflow_dispatch:\n    inputs:\n      environment:\n        description: 'Environment to Deploy'\n        required: true\n        type: choice\n        options:\n        - beta\n        - prod\n      version:\n        description: 'Version to be Published'\n        required: true\n        type: string\n\njobs:\n  env-release:\n\n    runs-on: ubuntu-latest\n\n    steps:\n\n      - name: Docker Login\n        uses: docker/login-action@v1.13.0\n        with:\n          username: ${{ secrets.DOCKER_USERNAME }}\n          password: ${{ secrets.DOCKER_TOKEN }}\n          logout: true\n\n      - name: Setup Environment\n        run: |\n          echo \"BOTNAME=Test-at-Scale Nucleus Promoted: Version ${ENVIRONMENT} to ${VERSION}\" >> $GITHUB_ENV\n          if [ ${ENVIRONMENT} == \"prod\" ] ; then\n            echo \"IMAGE_TAG=latest-base\" >> $GITHUB_ENV\n          else\n            echo \"IMAGE_TAG=${ENVIRONMENT}-base\" >> $GITHUB_ENV\n          fi\n        env:\n          ENVIRONMENT: ${{ github.event.inputs.environment }}\n          VERSION: ${{ github.event.inputs.version }}\n\n      - name: Promote Docker Image\n        run: |\n          docker pull lambdatest/nucleus:${VERSION}-base\n          docker tag lambdatest/nucleus:${VERSION}-base lambdatest/nucleus:${{ env.IMAGE_TAG }}\n          docker push lambdatest/nucleus:${{ env.IMAGE_TAG }}\n        env:\n          VERSION: ${{ github.event.inputs.version }}\n\n      - name: Build Cloud Runners\n        run: |\n          gh workflow run -R ${{ secrets.WF_REPO }} ${{ secrets.WF_NAME }} -r main -f environment=${ENVIRONMENT} -f version=${VERSION}\n        env:\n          GITHUB_TOKEN: ${{secrets.GH_API_TOKEN}}\n          ENVIRONMENT: ${{ github.event.inputs.environment }}\n          VERSION: ${{ github.event.inputs.version }}\n"
  },
  {
    "path": ".github/workflows/env-release-synapse.yml",
    "content": "name: Release to Environment Synapse\non:\n  workflow_dispatch:\n    inputs:\n      environment:\n        description: 'Environment to Deploy'\n        required: true\n        type: choice\n        options:\n        - beta\n        - prod\n      version:\n        description: 'Version to be Published'\n        required: true\n        type: string\n\njobs:\n  env-release:\n\n    runs-on: ubuntu-latest\n\n    steps:\n\n      - name: Docker Login\n        uses: docker/login-action@v1.13.0\n        with:\n          username: ${{ secrets.DOCKER_USERNAME }}\n          password: ${{ secrets.DOCKER_TOKEN }}\n          logout: true\n\n      - name: Setup Environment\n        run: |\n          echo \"BOTNAME=Test-at-Scale Synapse Promoted: Version ${ENVIRONMENT} to ${VERSION}\" >> $GITHUB_ENV\n          if [ ${ENVIRONMENT} == \"prod\" ] ; then\n            echo \"IMAGE_TAG=latest\" >> $GITHUB_ENV\n          else\n            echo \"IMAGE_TAG=${ENVIRONMENT}\" >> $GITHUB_ENV\n          fi\n        env:\n          ENVIRONMENT: ${{ github.event.inputs.environment }}\n          VERSION: ${{ github.event.inputs.version }}\n\n      - name: Promote Docker Image\n        run: |\n          docker pull lambdatest/synapse:${VERSION}\n          docker tag lambdatest/synapse:${VERSION} lambdatest/synapse:${{ env.IMAGE_TAG }}\n          docker push lambdatest/synapse:${{ env.IMAGE_TAG }}\n        env:\n          ENVIRONMENT: ${{ github.event.inputs.environment }}\n          VERSION: ${{ github.event.inputs.version }}\n"
  },
  {
    "path": ".github/workflows/premerge.yml",
    "content": "name: CI\n\non:\n  pull_request:\n    branches:\n      - main\n\njobs:\n\n  Linting:\n\n    name: Golang CI - Linting\n    runs-on: ubuntu-latest\n\n    steps:\n\n      - name: Install Go\n        uses: actions/setup-go@v2\n        with:\n          go-version: 1.17\n\n      - name: Checkout code\n        uses: actions/checkout@v2\n\n      - name: Run golangci-lint\n        uses: golangci/golangci-lint-action@v2.5.2\n        with:\n          version: latest\n          # skip cache because of flaky behaviors\n          skip-build-cache: true\n          skip-pkg-cache: true\n          skip-go-installation: true\n          only-new-issues: true\n          args: --skip-dirs=pkg/docs --timeout=3m\n\n  Unit_Test_Cases:\n    \n    name: Unit Test Cases\n    runs-on: ubuntu-latest\n    needs: [ Linting ]\n\n    steps:\n\n      - name: Install Go\n        uses: actions/setup-go@v2\n        with:\n          go-version: 1.17\n\n      - name: Checkout code\n        uses: actions/checkout@v2\n\n      - name: Unit Test Cases\n        env:\n          ENV: \"dev\"\n        run: go test ./... -parallel 4\n\n  Performance_Test_Cases:\n\n    name: Performance Test Cases\n    runs-on: ubuntu-latest\n    needs: [ Linting ]\n\n    steps:\n\n      - name: Install Go\n        uses: actions/setup-go@v2\n        with:\n          go-version: 1.17\n\n      - name: Checkout code\n        uses: actions/checkout@v2\n\n      - name: Performance Test Cases\n        env:\n          ENV: \"dev\"\n        run: go test ./... -parallel 4 -bench=. -benchmem\n\n  Test_Coverage:\n\n    name: Test Coverage\n    runs-on: ubuntu-latest\n    needs: [ Unit_Test_Cases, Performance_Test_Cases ]\n\n    steps:\n      - name: Install Go\n        uses: actions/setup-go@v2\n        with:\n          go-version: 1.17\n\n      - name: Checkout code\n        uses: actions/checkout@v2\n\n      - name: Test Code Coverage\n        env:\n          ENV: \"dev\"\n        run: |\n          go test -parallel 4 -coverpkg=./... -coverprofile=profile.cov ./...\n          go tool cover -func profile.cov\n\n  Go_Report_Card:\n\n    name: Go Report Card\n    runs-on: ubuntu-latest\n    needs: [ Unit_Test_Cases, Performance_Test_Cases ]\n\n    steps:\n      - name: Install Go\n        uses: actions/setup-go@v2\n        with:\n          go-version: 1.17\n\n      - name: Checkout code\n        uses: actions/checkout@v2\n\n      - name: Run Go Report Card\n        run: |\n          issues_threshold=6\n          gofmt_score_threshold=100\n          go_vet_score_threshold=100\n          gocyclo_score_threshold=93\n          git clone https://github.com/gojp/goreportcard.git\n          cd goreportcard\n          make install\n          go install ./cmd/goreportcard-cli\n          cd ..\n          rm -rf goreportcard\n          goreportcard-cli | tee reportcard.txt\n          files=$(cat reportcard.txt| grep 'Files ' | awk  '{print $3}' | tr -d \\%)\n          issues=$(cat reportcard.txt| grep 'Issues ' | awk  '{print $3}' | tr -d \\%)\n          gofmt_score=$(cat reportcard.txt| grep 'gofmt ' | awk  '{print $3}' | tr -d \\%)\n          go_vet_score=$(cat reportcard.txt| grep 'go_vet ' | awk  '{print $3}' | tr -d \\%)\n          gocyclo_score=$(cat reportcard.txt| grep 'gocyclo ' | awk  '{print $3}' | tr -d \\%)\n          rm reportcard.txt\n          failed_checks=0\n          failure_reason=\"\"\n          if [[ $issues -gt $issues_threshold ]]; then\n            failure_reason=\"${failure_reason}\\nIssues: $issues. Threshold was: $issues_threshold.\"\n            ((failed_checks+=1))\n          fi\n          if [[ $gofmt_score -lt $gofmt_score_threshold ]]; then\n            failure_reason=\"${failure_reason}\\ngo-fmt score: $gofmt_score. Threshold was: $gofmt_score_threshold.\"\n            ((failed_checks+=1))\n          fi\n          if [[ $go_vet_score -lt $go_vet_score_threshold ]]; then\n            failure_reason=\"${failure_reason}\\ngo-vet score: $go_vet_score. Threshold was: $go_vet_score_threshold.\"\n            ((failed_checks+=1))\n          fi\n          if [[ $gocyclo_score -lt $gocyclo_score_threshold ]]; then\n            failure_reason=\"${failure_reason}\\ngo-cyclo score: $gocyclo_score. Threshold was: $gocyclo_score_threshold.\"\n            ((failed_checks+=1))\n          fi\n          if [[ $failed_checks -gt 0 ]]; then\n            goreportcard-cli -v\n            printf \"\\n\\n\\n${failure_reason}\\nFrom the above output, filter out issues in your touched files and fix them.\"\n            exit 1\n          else\n            exit 0\n          fi\n"
  },
  {
    "path": ".github/workflows/pull_request_lint.yml",
    "content": "name: Pull Request Lint\n\non:\n  pull_request:\n    types: ['opened', 'edited', 'reopened', 'synchronize', 'labeled', 'unlabeled']\n\njobs:\n  label-checker:\n    name: pr label check\n    runs-on: ubuntu-latest\n\n    steps:\n      - name: PR Label Check\n        uses: yashhy/pr-label-check-and-comment-action@master\n        with:\n          required_labels: 'release:minor, release:major, release:patch'\n          GITHUB_TOKEN: '${{secrets.GITHUB_TOKEN}}'\n"
  },
  {
    "path": ".github/workflows/release-patch-wf.yml",
    "content": "# This workflow will release a new patch version of nucleus and synapse\nname: Release Patch Version\n\non:\n  workflow_dispatch:\n\njobs:\n\n  Release:\n\n    runs-on: ubuntu-latest\n\n    steps:\n\n      - name: Retrieving Release Type\n        run: |\n          echo \"RELEASE_TYPE=patch\" >> $GITHUB_ENV\n          echo \"Releasing: ${release_type}\"\n\n      - name: Inject slug/short variables\n        uses: rlespinasse/github-slug-action@v3.x\n\n      - name: Checkout\n        uses: actions/checkout@v2.4.0\n\n      - name: Bump version and push tag\n        id: tag_version\n        uses: mathieudutour/github-tag-action@v6.0\n        with:\n          github_token: ${{ secrets.GITHUB_TOKEN }}\n          default_bump: ${{ env.RELEASE_TYPE }}\n\n      - name: Build Cloud Runners\n        run: |\n          gh workflow run -R ${{ secrets.WF_REPO }} ${{ secrets.WF_NAME }} -r main -f environment=dev -f version=${{ steps.tag_version.outputs.new_tag }}\n        env:\n          GITHUB_TOKEN: ${{secrets.GH_API_TOKEN}}\n\n      - name: Setup Environment\n        run: |\n          echo \"BOTNAME=Test-at-Scale Deployment Status: Version ${{ steps.tag_version.outputs.new_tag }} to dev\" >> $GITHUB_ENV\n\n      - name: Setup Docker Image Tags\n        run: |\n          echo \"NUCLEUS_TAGS=lambdatest/nucleus:dev-base\",\"lambdatest/nucleus:${{ steps.tag_version.outputs.new_tag }}-base\" >> $GITHUB_ENV\n          echo \"SYNAPSE_TAGS=lambdatest/synapse:dev\",\"lambdatest/synapse:${{ steps.tag_version.outputs.new_tag }}\" >> $GITHUB_ENV\n\n      - name: Docker Login\n        uses: docker/login-action@v1.13.0\n        with:\n          username: ${{ secrets.DOCKER_USERNAME }}\n          password: ${{ secrets.DOCKER_TOKEN }}\n          logout: true\n\n      - name: Build and push Nucleus images\n        uses: docker/build-push-action@v2.9.0\n        with:\n          context: .\n          tags: ${{ env.NUCLEUS_TAGS }}\n          file: build/nucleus/Dockerfile\n          push: true\n          build-args: |\n            VERSION=${{ steps.tag_version.outputs.new_tag }}\n\n      - name: Build and push Synapse images\n        uses: docker/build-push-action@v2.9.0\n        with:\n          context: .\n          tags: ${{ env.SYNAPSE_TAGS }}\n          file: build/synapse/Dockerfile\n          push: true\n          build-args: |\n            VERSION=${{ steps.tag_version.outputs.new_tag }}\n"
  },
  {
    "path": ".github/workflows/release.yml",
    "content": "name: Release on Dev\n\non:\n  push:\n    branches:\n      - main\n\njobs:\n\n  Release:\n\n    runs-on: ubuntu-latest\n\n    steps:\n\n      - name: Check Patch Label\n        id: check_pr_labels_patch\n        uses: shioyang/check-pr-labels-on-push-action@v1.0.3\n        with:\n          github-token: ${{ secrets.GITHUB_TOKEN }}\n          labels: '[\"release:patch\"]'\n\n      - name: Check Minor Label\n        id: check_pr_labels_minor\n        uses: shioyang/check-pr-labels-on-push-action@v1.0.3\n        with:\n          github-token: ${{ secrets.GITHUB_TOKEN }}\n          labels: '[\"release:minor\"]'\n\n      - name: Check Major Label\n        id: check_pr_labels_major\n        uses: shioyang/check-pr-labels-on-push-action@v1.0.3\n        with:\n          github-token: ${{ secrets.GITHUB_TOKEN }}\n          labels: '[\"release:major\"]'\n\n      - name: Release Type\n        run: |\n          if [ ${MAJOR} == \"true\" ] ; then\n            echo \"RELEASE_TYPE=major\" >> $GITHUB_ENV\n          elif [ ${MINOR} == \"true\" ] ; then\n            echo \"RELEASE_TYPE=minor\" >> $GITHUB_ENV\n          elif [ ${PATCH} == \"true\" ] ; then\n            echo \"RELEASE_TYPE=patch\" >> $GITHUB_ENV\n          else\n          echo \"RELEASE_TYPE=none\" >> $GITHUB_ENV\n          fi\n        env:\n          PATCH: ${{ steps.check_pr_labels_patch.outputs.result }}\n          MINOR: ${{ steps.check_pr_labels_minor.outputs.result }}\n          MAJOR: ${{ steps.check_pr_labels_major.outputs.result }}\n\n      - name: Testing Release Type\n        if: env.RELEASE_TYPE == 'none'\n        uses: actions/github-script@v3\n        with:\n          script: |\n              core.setFailed('Release labels were not present in the PR!')\n\n      - name: Inject slug/short variables\n        uses: rlespinasse/github-slug-action@v3.x\n\n      - name: Checkout\n        uses: actions/checkout@v2.4.0\n\n      - name: Bump version and push tag\n        id: tag_version\n        uses: mathieudutour/github-tag-action@v6.0\n        with:\n          github_token: ${{ secrets.GITHUB_TOKEN }}\n          default_bump: ${{ env.RELEASE_TYPE }}\n\n      - name: Creating Github Release\n        uses: ncipollo/release-action@v1\n        with:\n          token: ${{ secrets.GITHUB_TOKEN }}\n          draft: false\n          generateReleaseNotes: true\n          prerelease: false\n          tag: ${{ steps.tag_version.outputs.new_tag }}\n          name: Release ${{ steps.tag_version.outputs.new_tag }}\n          body: ${{ steps.changelog.outputs.changelog }}\n\n      - name: Build Cloud Runners\n        run: |\n          gh workflow run -R ${{ secrets.WF_REPO }} ${{ secrets.WF_NAME }} -r main -f environment=dev -f version=${{ steps.tag_version.outputs.new_tag }}\n        env:\n          GITHUB_TOKEN: ${{secrets.GH_API_TOKEN}}\n\n      - name: Setup Environment\n        run: |\n          echo \"BOTNAME=Test-at-Scale Deployment Status: Version ${{ steps.tag_version.outputs.new_tag }} to dev\" >> $GITHUB_ENV\n\n      - name: Setup Docker Image Tags\n        run: |\n          echo \"NUCLEUS_TAGS=lambdatest/nucleus:dev-base\",\"lambdatest/nucleus:${{ steps.tag_version.outputs.new_tag }}-base\" >> $GITHUB_ENV\n          echo \"SYNAPSE_TAGS=lambdatest/synapse:dev\",\"lambdatest/synapse:${{ steps.tag_version.outputs.new_tag }}\" >> $GITHUB_ENV\n\n      - name: Docker Login\n        uses: docker/login-action@v1.13.0\n        with:\n          username: ${{ secrets.DOCKER_USERNAME }}\n          password: ${{ secrets.DOCKER_TOKEN }}\n          logout: true\n\n      - name: Build and push Nucleus images\n        uses: docker/build-push-action@v2.9.0\n        with:\n          context: .\n          tags: ${{ env.NUCLEUS_TAGS }}\n          file: build/nucleus/Dockerfile\n          push: true\n          build-args: |\n            VERSION=${{ steps.tag_version.outputs.new_tag }}\n\n      - name: Build and push Synapse images\n        uses: docker/build-push-action@v2.9.0\n        with:\n          context: .\n          tags: ${{ env.SYNAPSE_TAGS }}\n          file: build/synapse/Dockerfile\n          push: true\n          build-args: |\n            VERSION=${{ steps.tag_version.outputs.new_tag }}\n\n"
  },
  {
    "path": ".github/workflows/stale.yml",
    "content": "name: 'Close stale issues and PRs'\non:\n  schedule:\n    - cron: '30 1 * * *'\n\njobs:\n  stale:\n    runs-on: ubuntu-latest\n    steps:\n      - uses: actions/stale@v3\n        with:\n          stale-issue-message: 'This issue is stale because it has been open for 60 days with no activity.'\n          stale-pr-message: 'This PR is stale because it has been open for 60 days with no activity.'\n          days-before-close: 1\n"
  },
  {
    "path": ".gitignore",
    "content": "# File created using '.gitignore Generator' for Visual Studio Code: https://bit.ly/vscode-gig\n\n# Created by https://www.gitignore.io/api/macos,visualstudiocode,go\n# Edit at https://www.gitignore.io/?templates=macos,visualstudiocode,go\n\n### Go ###\n# Binaries for programs and plugins\n*.exe\n*.exe~\n*.dll\n*.so\n*.dylib\n\n# Test binary, built with `go test -c`\n*.test\n\n# Output of the go coverage tool, specifically when used with LiteIDE\n*.out\n\n# Ignore all patch files\n*.patch\n\n### Go Patch ###\n/vendor/\n/Godeps/\n\n### macOS ###\n# General\n.DS_Store\n.AppleDouble\n.LSOverride\n\n# Icon must end with two \\r\nIcon\n\n# Thumbnails\n._*\n\n# Files that might appear in the root of a volume\n.DocumentRevisions-V100\n.fseventsd\n.Spotlight-V100\n.TemporaryItems\n.Trashes\n.VolumeIcon.icns\n.com.apple.timemachine.donotpresent\n\n# Directories potentially created on remote AFP share\n.AppleDB\n.AppleDesktop\nNetwork Trash Folder\nTemporary Items\n.apdisk\n\n### VisualStudioCode ###\n.vscode/*\n!.vscode/settings.json\n!.vscode/tasks.json\n!.vscode/launch.json\n!.vscode/extensions.json\n\n### VisualStudioCode Patch ###\n# Ignore all local history of files\n.history\n\n# End of https://www.gitignore.io/api/macos,visualstudiocode,go\n\n# Custom rules (everything added below won't be overriden by 'Generate .gitignore File' if you use 'Update' option)\n\nbuilds\n\n# ignore raven config files\n.mould*\n\n# ignore log file\nmould.log\n\n.ansible/ansible.cfg\n\n.env\n\n.synapse.json\nlogs\n"
  },
  {
    "path": ".golangci.yml",
    "content": "# Uncomment following lines after fixing linting issues in test files\n\nlinters-settings:\n  depguard:\n    list-type: blacklist\n  funlen:\n    lines: 100\n    statements: 50\n  gci:\n    local-prefixes: github.com/golangci/golangci-lint\n  goconst:\n    min-len: 2\n    min-occurrences: 2\n  gocritic:\n    enabled-tags:\n      - diagnostic\n      - experimental\n      - opinionated\n      - performance\n      - style\n    disabled-checks:\n      - dupImport # https://github.com/go-critic/go-critic/issues/845\n      - ifElseChain\n      - octalLiteral\n      - whyNoLint\n      - wrapperFunc\n  gocyclo:\n    min-complexity: 15\n  goimports:\n    local-prefixes: github.com/golangci/golangci-lint\n  gomnd:\n    settings:\n      mnd:\n        # don't include the \"operation\" and \"assign\"\n        checks:\n          - argument\n          - case\n          - condition\n          - return\n  govet:\n    check-shadowing: true\n    settings:\n      printf:\n        funcs:\n          - (github.com/golangci/golangci-lint/pkg/logutils.Log).Infof\n          - (github.com/golangci/golangci-lint/pkg/logutils.Log).Warnf\n          - (github.com/golangci/golangci-lint/pkg/logutils.Log).Errorf\n          - (github.com/golangci/golangci-lint/pkg/logutils.Log).Fatalf\n  lll:\n    line-length: 140\n  maligned:\n    suggest-new: true\n  misspell:\n    locale: US\n  nolintlint:\n    allow-leading-space: true # don't require machine-readable nolint directives (i.e. with no leading space)\n    allow-unused: false # report any unused nolint directives\n    require-explanation: false # don't require an explanation for nolint directives\n    require-specific: false # don't require nolint directives to be specific about which linter is being skipped\n\nlinters:\n  disable-all: true\n  enable:\n    - bodyclose\n    - deadcode\n    - depguard\n    - dogsled\n    - dupl\n    - errcheck\n    - exportloopref\n    - exhaustive\n    - funlen\n    - gochecknoinits\n    - goconst\n    - gocritic\n    - gocyclo\n    - gofmt\n    - goimports\n    - gomnd\n    - goprintffuncname\n    - gosec\n    - gosimple\n    - govet\n    - ineffassign\n    - lll\n    - misspell\n    - nakedret\n    - noctx\n    - nolintlint\n    - rowserrcheck\n    - staticcheck\n    - structcheck\n    - stylecheck\n    - typecheck\n    - unconvert\n    - unparam\n    - unused\n    - varcheck\n    - whitespace\n\n  # don't enable:\n  # - asciicheck\n  # - scopelint\n  # - gochecknoglobals\n  # - gocognit\n  # - godot\n  # - godox\n  # - goerr113\n  # - interfacer\n  # - maligned\n  # - nestif\n  # - prealloc\n  # - testpackage\n  # - revive\n  # - wsl\n\nissues:\n  # Excluding configuration per-path, per-linter, per-text and per-source\n  exclude-rules:\n    - path: _test\\.go\n      linters:\n        - gomnd\n\n    - path: pkg/golinters/errcheck.go\n      text: \"SA1019: errCfg.Exclude is deprecated: use ExcludeFunctions instead\"\n    - path: pkg/commands/run.go\n      text: \"SA1019: lsc.Errcheck.Exclude is deprecated: use ExcludeFunctions instead\"\n\nrun:\n  timeout: 2m\n  skip-dirs:\n    - test/testdata_etc\n    - internal/cache\n    - internal/renameio\n    - internal/robustio\n    - pkg/docs\n"
  },
  {
    "path": ".sample.synapse.json",
    "content": "{\n  \"Name\": \"my-synapse-1\",\n  \"LogConfig\": {\n    \"EnableConsole\": true,\n    \"ConsoleJSONFormat\": true,\n    \"Consolelevel\": \"debug\"\n  },\n  \"Lambdatest\": {\n    \"SecretKey\": \"add-your-secret-key-here\"\n  },\n  \"Git\": {\n    \"Token\": \"add-your-git-token-here\",\n    \"TokenType\": \"Bearer\"\n  },\n  \"ContainerRegistry\": {\n    \"PullPolicy\": \"always\",\n    \"Mode\": \"public\"\n  },\n  \"RepoSecrets\": {\n    \"synapse\": {\n      \"SAMPLE_SECRET_KEY\": \"sample_secret_value\"\n    }\n  }\n}"
  },
  {
    "path": ".vscode/settings.json",
    "content": "{\n    \"go.lintTool\": \"golangci-lint\",\n    \"go.lintFlags\": [\n        \"--fast\"\n    ],\n    \"go.testTimeout\": \"90s\"\n}"
  },
  {
    "path": "CHANGELOG.md",
    "content": ""
  },
  {
    "path": "CODE_OF_CONDUCT.md",
    "content": "\n# Contributor Covenant Code of Conduct\n\n## Our Pledge\n\nWe as members, contributors, and leaders of this open-source project pledge \nto make participation in our community and contribution to our project an \ninclusive experience for everyone, regardless of age, body size, visible or \ninvisible disability, ethnicity, gender identity, and expression, sexual identity \nand orientation, level of experience, education, socio-economic status, \nnationality, personal appearance, race, caste, color, or religion.\n\nWe pledge to act and interact in ways that contribute to an open, welcoming,\ndiverse, inclusive, and healthy community.\n\n## Our Standards\n\nExamples of behavior that contributes to a positive environment for our\ncommunity include:\n\n* Demonstrating empathy and kindness toward other people.\n* Being respectful of differing opinions, viewpoints, and experiences.\n* Giving and gracefully accepting constructive feedback.\n* Accepting responsibility and apologizing to those affected by our mistakes,\n  and learning from the experience.\n* Focusing on what is best not just for us as individuals, but for the overall\n  community.\n\nExamples of unacceptable behavior include:\n\n* The use of sexualized language or imagery, and sexual attention or advances of\n  any kind\n* Trolling, insulting or derogatory comments, and personal or political attacks\n* Public or private harassment\n* Publishing others' private information, such as a physical or email address,\n  without their explicit permission\n* Other conduct which could reasonably be considered inappropriate in a\n  professional setting\n\n## Enforcement Responsibilities\n\nCommunity leaders are responsible for clarifying and enforcing our standards of\nacceptable behavior and will take appropriate and fair corrective action in\nresponse to any behavior that they deem inappropriate, threatening, offensive,\nor harmful.\n\nCommunity leaders have the right and responsibility to remove, edit, or reject\ncomments, commits, code, wiki edits, issues, and other contributions that are\nnot aligned to this Code of Conduct, and will communicate reasons for moderation\ndecisions when appropriate.\n\n## Scope\n\nThis Code of Conduct applies within all community spaces, and also applies when\nan individual is officially representing the community in public spaces.\nExamples of representing our community include using an official e-mail address,\nposting via an official social media account, or acting as an appointed\nrepresentative at an online or offline event.\n\n## Enforcement\n\nInstances of abusive, harassing, or otherwise unacceptable behavior may be\nreported to the community leaders responsible for enforcement at\nhello.tas@lambdatest.com\nAll complaints will be reviewed and investigated promptly and fairly.\n\nAll community leaders are obligated to respect the privacy and security of the\nreporter of any incident.\n\n## Enforcement Guidelines\n\nCommunity leaders will follow these Community Impact Guidelines in determining\nthe consequences for any action they deem in violation of this Code of Conduct:\n\n### 1. Correction\n\n**Community Impact**: Use of inappropriate language or other behavior deemed\nunprofessional or unwelcome in the community.\n\n**Consequence**: A private, written warning from community leaders, providing\nclarity around the nature of the violation and an explanation of why the\nbehavior was inappropriate. A public apology may be requested.\n\n### 2. Warning\n\n**Community Impact**: A violation through a single incident or series of\nactions.\n\n**Consequence**: A warning with consequences for continued behavior. No\ninteraction with the people involved, including unsolicited interaction with\nthose enforcing the Code of Conduct, for a specified period of time. This\nincludes avoiding interactions in community spaces as well as external channels\nlike social media. Violating these terms may lead to a temporary or permanent\nban.\n\n### 3. Temporary Ban\n\n**Community Impact**: A serious violation of community standards, including\nsustained inappropriate behavior.\n\n**Consequence**: A temporary ban from any sort of interaction or public\ncommunication with the community for a specified period of time. No public or\nprivate interaction with the people involved, including unsolicited interaction\nwith those enforcing the Code of Conduct, is allowed during this period.\nViolating these terms may lead to a permanent ban.\n\n### 4. Permanent Ban\n\n**Community Impact**: Demonstrating a pattern of violation of community\nstandards, including sustained inappropriate behavior, harassment of an\nindividual, or aggression toward or disparagement of classes of individuals.\n\n**Consequence**: A permanent ban from any sort of public interaction within the\ncommunity.\n\n## Attribution\n\nThis Code of Conduct is adapted from the [Contributor Covenant][homepage],\nversion 2.1, available at\n[https://www.contributor-covenant.org/version/2/1/code_of_conduct.html][v2.1].\n\nCommunity Impact Guidelines were inspired by\n[Mozilla's code of conduct enforcement ladder][Mozilla CoC].\n\nFor answers to common questions about this code of conduct, see the FAQ at\n[https://www.contributor-covenant.org/faq][FAQ]. Translations are available at\n[https://www.contributor-covenant.org/translations][translations].\n\n[homepage]: https://www.contributor-covenant.org\n[v2.1]: https://www.contributor-covenant.org/version/2/1/code_of_conduct.html\n[Mozilla CoC]: https://github.com/mozilla/diversity\n[FAQ]: https://www.contributor-covenant.org/faq\n[translations]: https://www.contributor-covenant.org/translations\n"
  },
  {
    "path": "CONTRIBUTING.md",
    "content": "# Contributing to Test-at-scale\n\nThank you for your interest in Test-at-scale and for taking the time to contribute to this project. If you feel insecure about how to start contributing, feel free to ask us on our [Discord Server](https://discord.gg/Wyf8srhf6K) in the #contribute channel.\n\n## **Code of conduct**\n\nRead our [Code of Conduct](CODE_OF_CONDUCT.md) before contributing.\n\n\n## **How can I contribute?**\n\nThere are many ways in which you can contribute to Test-at-scale.\n\n#### 👥 Join the community\n&emsp;&emsp;Join our [Discord server](https://discord.gg/Wyf8srhf6K), help others use Test-at-scale for their test automation requirements.\n\n#### 🗣️ Give a talk about Test-at-scale\n&emsp;&emsp;You can talk about Test-at-scale in online/offline meetups. Drop a line to [hello.tas@lambdatest.com](mailto:hello.tas@lambdatest.com) ahead of time and we'll send you some swag. 👕\n\n#### 🧩 Build an Add-on \n&emsp;&emsp;Enhance Test-at-scale’s capabilities by building add-ons to solve unique problems. \n\n#### 🐞 Report a bug\n&emsp;&emsp;Report all issues through GitHub Issues and provide as much information as you can.\n\n#### 🛠 Create a feature request\n&emsp;&emsp;We welcome all feature requests, whether for new features or enhancements to existing features. File your feature request through GitHub Issues.\n\n#### 📝 Improve the documentation\n&emsp;&emsp;Suggest improvements to our documentation using the [Documentation Improvement](https://github.com/LambdaTest/test-at-scale/issues/new) template. Test-at-scale docs are published on [here](https://www.lambdatest.com/support/docs/getting-started-with-tas/)\n\n\n#### 📚 Contribute to Tutorials \n&emsp;&emsp;You can help by suggesting improvements to our tutorials using the [Tutorials Improvement](https://github.com/LambdaTest/test-at-scale/issues/new) template or create a new tutorial. \n\n\n#### ⚙️ Write code to fix a Bug / new Feature Request\n&emsp;&emsp;We welcome contributions that help make Test-at-scale bug-free & improve the test automation experience for our users. You can also find issues tagged [Good First Issues](https://github.com/LambdaTest/test-at-scale/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22\"). Check out the below sections to begin.\n\n&emsp;\n\n## **Writing Code**\nAll submissions, including submissions by project members, require review. Before raising a pull request, ensure you have raised a corresponding issue and discussed a possible solution with a maintainer. This gives your pull request the highest chance of getting merged quickly. Join our [Discord Server](https://discord.gg/Wyf8srhf6K) if you need any help.\n\n \n### First-time contributors\nWe appreciate first-time contributors and we are happy to assist you in getting started. In case of questions, just [reach out to us!](https://discord.gg/Wyf8srhf6K)\nYou find all issues suitable for first-time contributors [here.](https://github.com/LambdaTest/test-at-scale/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22)\n\n\n### Repo overview\n\n[LambdaTest/test-at-scale](https://github.com/LambdaTest/test-at-scale/) consists of 2 components:\n\n- **Synapse:** is the agent responsible for fetching jobs from Test at Scale servers to execute them on the self hosted environment (your laptop or your server farm). Synapse coordinates with nucleus (test runner) and TAS cloud to execute tests and push out test details such as test name, test suite, execution logs, execution metrics.\n- **Test Runners:** component is the driving agent of the container executed to run the actions received by synapse. All actions will be executed on Linux containers and itself manages the lifecycle of the container. It provides functionalities such as logging, metric collections, etc. It primarily conducts two primary stages viz. test discovery and test execution. Both of these stage are accomplished by using plugins for language and framework to make sure nucleus is not tightly coupled with specific languages.\n \n<details>\n<summary>Read More</summary>\nWe've engineered the platform such that you can setup the test-runners anywhere, from your local workstation to any cloud (AWS, Azure, GCP etc), as per your convenience. \n<p align=\"center\">\n<img loading=\"lazy\" src={require('https://www.lambdatest.com/support/assets/images/synapse-tas-interaction-a70a50f02b2e6e99491777ce636538f4.png').default} alt=\"Synapse Architecture\" width=\"1340\" height=\"617\" className=\"doc_img\"/>\n</p>\n\nWhen you configure TAS to run in a self-hosted environment, all the test execution jobs are executed inside your  environment. Your code stays within your setup environment. To provide you with test-insights on the TAS portal we store information only related to tests like name of testFile, testCase, testSuite and execution logs. At no point, we collect business logic of your code.\n\n\nHere is a sample flow to understand how it works:\n- After Configure TAS self-hosted mode and integrating your repositories into TAS platform.\n- Whenever you make a commit, raise a PR or merge a PR, the TAS platform receives a webhook event from your git provider.\n- This webhook event is simply sent to your self-hosted environment to initate jobs for test execution.\n- The Test-at-scale binary running on your self hosted enviroment spawns containers to execute those jobs.\n- Basic test metadata is sent to the TAS server to provide you with test insights and other relevant statistics over the TAS dashboard.\n- Your code or business logic never leaves your setup environment.\n- As your workload increases you can add more servers running Test-at-scale binary, which will distribute the load amongst them automatically.\n- Routing: TAS platform will send the test execution jobs  to the connected self hosted environments  which are online and have enough resources to run the job.\n- If the resources are insufficient or fully occupied, the jobs will remain queued on for 2.5 hour and keep checking for resource availability every 30 seconds.\n- If TAS platform is unable to find any connected self-hosted binary which can execute the job, it will be marked as failed.\n \n</details>\n\n### Set up your branch to write code\n\nWe use [Github Flow](https://guides.github.com/introduction/flow/index.html), so all code changes happen through pull requests. [Learn more.](https://blog.scottlowe.org/2015/01/27/using-fork-branch-git-workflow/) \n\n 1. Please make sure there is an issue associated with the work that you're doing. If it doesn’t exist, [create an issue.](https://github.com/LambdaTest/test-at-scale/issues)\n 2. If you're working on an issue, please comment that you are doing so to prevent duplicate work by others also.\n 3. Fork the repo and create a new branch from the `dev` branch.\n 4. Please name the branch as <span style=\"color:grey\">issue-[issue-number]-[issue-name(optional)]</span> or <span style=\"color:grey\">feature-[feature-number]–[feature-name(optional)]</span>. For example, if you are fixing Issue #205 name your branch as <span style=\"color:grey\">issue-205 or  issue-205-selectbox-handling-changes</span>\n 5. Squash your commits and refer to the issue using `Fix #<issue-no>` in the commit message, at the start.\n 6. Rebase `dev` with your branch and push your changes.\n 7. Raise a pull request against the staging branch of the main repository.\n\n\n## **Committing code**\n\nThe repository contains two important (protected) branches.\n\n * main contains the code that is tested and released. \n * dev contains recent developments under testing. This branch is set as the default branch, and all pull requests should be made against this branch.\n\nPull requests should be made against the <span style=\"color:grey\">dev</span> branch. <span style=\"color:grey\">staging</span> contains all of the new features and fixes that are under testing and ready to go out in the next release.\n\n\n#### **Commit & Create Pull Requests** \n\n 1. Please make sure there is an issue associated with the work that you're doing. If it doesn’t exist, [create an issue](https://github.com/LambdaTest/test-at-scale/issues).\n 2. Squash your commits and refer to the issue using `Fix #<issue-no>` in the commit message, at the start.\n 3. Rebase `dev` with your branch and push your changes.\n 4. Once you are confident in your code changes, create a pull request in your fork to the `dev` branch in the LambdaTest/test-at-scale base repository.\n 5. Link the issue of the base repository in your Pull request description. [Guide](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue)\n 6. Fill out the [Pull Request Template](./.github/pull_request_template.md) completely within the body of the PR. If you feel some areas are not relevant add `N/A` but don’t delete those sections.\n\n\n####  **Commit messages**\n\n- The first line should be a summary of the changes, not exceeding 50\n  characters, followed by an optional body that has more details about the\n  changes. Refer to [this link](https://github.com/erlang/otp/wiki/writing-good-commit-messages)\n  for more information on writing good commit messages.\n\n- Don't add a period/dot (.) at the end of the summary line.\n"
  },
  {
    "path": "LICENSE",
    "content": "Apache License\n                           Version 2.0, January 2004\n                        http://www.apache.org/licenses/\n\n   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n   1. Definitions.\n\n      \"License\" shall mean the terms and conditions for use, reproduction,\n      and distribution as defined by Sections 1 through 9 of this document.\n\n      \"Licensor\" shall mean the copyright owner or entity authorized by\n      the copyright owner that is granting the License.\n\n      \"Legal Entity\" shall mean the union of the acting entity and all\n      other entities that control, are controlled by, or are under common\n      control with that entity. For the purposes of this definition,\n      \"control\" means (i) the power, direct or indirect, to cause the\n      direction or management of such entity, whether by contract or\n      otherwise, or (ii) ownership of fifty percent (50%) or more of the\n      outstanding shares, or (iii) beneficial ownership of such entity.\n\n      \"You\" (or \"Your\") shall mean an individual or Legal Entity\n      exercising permissions granted by this License.\n\n      \"Source\" form shall mean the preferred form for making modifications,\n      including but not limited to software source code, documentation\n      source, and configuration files.\n\n      \"Object\" form shall mean any form resulting from mechanical\n      transformation or translation of a Source form, including but\n      not limited to compiled object code, generated documentation,\n      and conversions to other media types.\n\n      \"Work\" shall mean the work of authorship, whether in Source or\n      Object form, made available under the License, as indicated by a\n      copyright notice that is included in or attached to the work\n      (an example is provided in the Appendix below).\n\n      \"Derivative Works\" shall mean any work, whether in Source or Object\n      form, that is based on (or derived from) the Work and for which the\n      editorial revisions, annotations, elaborations, or other modifications\n      represent, as a whole, an original work of authorship. For the purposes\n      of this License, Derivative Works shall not include works that remain\n      separable from, or merely link (or bind by name) to the interfaces of,\n      the Work and Derivative Works thereof.\n\n      \"Contribution\" shall mean any work of authorship, including\n      the original version of the Work and any modifications or additions\n      to that Work or Derivative Works thereof, that is intentionally\n      submitted to Licensor for inclusion in the Work by the copyright owner\n      or by an individual or Legal Entity authorized to submit on behalf of\n      the copyright owner. For the purposes of this definition, \"submitted\"\n      means any form of electronic, verbal, or written communication sent\n      to the Licensor or its representatives, including but not limited to\n      communication on electronic mailing lists, source code control systems,\n      and issue tracking systems that are managed by, or on behalf of, the\n      Licensor for the purpose of discussing and improving the Work, but\n      excluding communication that is conspicuously marked or otherwise\n      designated in writing by the copyright owner as \"Not a Contribution.\"\n\n      \"Contributor\" shall mean Licensor and any individual or Legal Entity\n      on behalf of whom a Contribution has been received by Licensor and\n      subsequently incorporated within the Work.\n\n   2. Grant of Copyright License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      copyright license to reproduce, prepare Derivative Works of,\n      publicly display, publicly perform, sublicense, and distribute the\n      Work and such Derivative Works in Source or Object form.\n\n   3. Grant of Patent License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      (except as stated in this section) patent license to make, have made,\n      use, offer to sell, sell, import, and otherwise transfer the Work,\n      where such license applies only to those patent claims licensable\n      by such Contributor that are necessarily infringed by their\n      Contribution(s) alone or by combination of their Contribution(s)\n      with the Work to which such Contribution(s) was submitted. If You\n      institute patent litigation against any entity (including a\n      cross-claim or counterclaim in a lawsuit) alleging that the Work\n      or a Contribution incorporated within the Work constitutes direct\n      or contributory patent infringement, then any patent licenses\n      granted to You under this License for that Work shall terminate\n      as of the date such litigation is filed.\n\n   4. Redistribution. You may reproduce and distribute copies of the\n      Work or Derivative Works thereof in any medium, with or without\n      modifications, and in Source or Object form, provided that You\n      meet the following conditions:\n\n      (a) You must give any other recipients of the Work or\n          Derivative Works a copy of this License; and\n\n      (b) You must cause any modified files to carry prominent notices\n          stating that You changed the files; and\n\n      (c) You must retain, in the Source form of any Derivative Works\n          that You distribute, all copyright, patent, trademark, and\n          attribution notices from the Source form of the Work,\n          excluding those notices that do not pertain to any part of\n          the Derivative Works; and\n\n      (d) If the Work includes a \"NOTICE\" text file as part of its\n          distribution, then any Derivative Works that You distribute must\n          include a readable copy of the attribution notices contained\n          within such NOTICE file, excluding those notices that do not\n          pertain to any part of the Derivative Works, in at least one\n          of the following places: within a NOTICE text file distributed\n          as part of the Derivative Works; within the Source form or\n          documentation, if provided along with the Derivative Works; or,\n          within a display generated by the Derivative Works, if and\n          wherever such third-party notices normally appear. The contents\n          of the NOTICE file are for informational purposes only and\n          do not modify the License. You may add Your own attribution\n          notices within Derivative Works that You distribute, alongside\n          or as an addendum to the NOTICE text from the Work, provided\n          that such additional attribution notices cannot be construed\n          as modifying the License.\n\n      You may add Your own copyright statement to Your modifications and\n      may provide additional or different license terms and conditions\n      for use, reproduction, or distribution of Your modifications, or\n      for any such Derivative Works as a whole, provided Your use,\n      reproduction, and distribution of the Work otherwise complies with\n      the conditions stated in this License.\n\n   5. Submission of Contributions. Unless You explicitly state otherwise,\n      any Contribution intentionally submitted for inclusion in the Work\n      by You to the Licensor shall be under the terms and conditions of\n      this License, without any additional terms or conditions.\n      Notwithstanding the above, nothing herein shall supersede or modify\n      the terms of any separate license agreement you may have executed\n      with Licensor regarding such Contributions.\n\n   6. Trademarks. This License does not grant permission to use the trade\n      names, trademarks, service marks, or product names of the Licensor,\n      except as required for reasonable and customary use in describing the\n      origin of the Work and reproducing the content of the NOTICE file.\n\n   7. Disclaimer of Warranty. Unless required by applicable law or\n      agreed to in writing, Licensor provides the Work (and each\n      Contributor provides its Contributions) on an \"AS IS\" BASIS,\n      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n      implied, including, without limitation, any warranties or conditions\n      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n      PARTICULAR PURPOSE. You are solely responsible for determining the\n      appropriateness of using or redistributing the Work and assume any\n      risks associated with Your exercise of permissions under this License.\n\n   8. Limitation of Liability. In no event and under no legal theory,\n      whether in tort (including negligence), contract, or otherwise,\n      unless required by applicable law (such as deliberate and grossly\n      negligent acts) or agreed to in writing, shall any Contributor be\n      liable to You for damages, including any direct, indirect, special,\n      incidental, or consequential damages of any character arising as a\n      result of this License or out of the use or inability to use the\n      Work (including but not limited to damages for loss of goodwill,\n      work stoppage, computer failure or malfunction, or any and all\n      other commercial damages or losses), even if such Contributor\n      has been advised of the possibility of such damages.\n\n   9. Accepting Warranty or Additional Liability. While redistributing\n      the Work or Derivative Works thereof, You may choose to offer,\n      and charge a fee for, acceptance of support, warranty, indemnity,\n      or other liability obligations and/or rights consistent with this\n      License. However, in accepting such obligations, You may act only\n      on Your own behalf and on Your sole responsibility, not on behalf\n      of any other Contributor, and only if You agree to indemnify,\n      defend, and hold each Contributor harmless for any liability\n      incurred by, or claims asserted against, such Contributor by reason\n      of your accepting any such warranty or additional liability.\n\n   END OF TERMS AND CONDITIONS\n\n   APPENDIX: How to apply the Apache License to your work.\n\n      To apply the Apache License to your work, attach the following\n      boilerplate notice, with the fields enclosed by brackets \"[]\"\n      replaced with your own identifying information. (Don't include\n      the brackets!)  The text should be enclosed in the appropriate\n      comment syntax for the file format. We also recommend that a\n      file or class name and description of purpose be included on the\n      same \"printed page\" as the copyright notice for easier\n      identification within third-party archives.\n\n   Copyright 2021 LambdaTest Inc.\n\n   Licensed under the Apache License, Version 2.0 (the \"License\");\n   you may not use this file except in compliance with the License.\n   You may obtain a copy of the License at\n\n       http://www.apache.org/licenses/LICENSE-2.0\n\n   Unless required by applicable law or agreed to in writing, software\n   distributed under the License is distributed on an \"AS IS\" BASIS,\n   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n   See the License for the specific language governing permissions and\n   limitations under the License.\n   \n"
  },
  {
    "path": "Makefile",
    "content": "NUCLEUS_DOCKER_FILE ?= ./build/nucleus/Dockerfile\nNUCLEUS_IMAGE_NAME ?= lambdatest/nucleus:latest\n\nSYNAPSE_DOCKER_FILE ?= ./build/synapse/Dockerfile\nSYNAPSE_IMAGE_NAME ?= lambdatest/synapse:latest\n\nREV_LIST ?= $(shell git rev-list --tags --max-count=1)\nVERSION ?= $(shell git describe --tags ${REV_LIST})\n\nusage:\t\t\t\t\t\t## Show this help\n\t@fgrep -h \"##\" $(MAKEFILE_LIST) | fgrep -v fgrep | sed -e 's/:.*##\\s*/##/g' | awk -F'##' '{ printf \"%-25s -> %s\\n\", $$1, $$2 }'\n\nlint:\t\t\t\t\t\t## Runs linting\n\tgolangci-lint run\n\nbuild-nucleus-image:\t\t## Builds nucleus docker image\n\tdocker build --build-arg VERSION=${VERSION}-dev -t ${NUCLEUS_IMAGE_NAME} --file $(NUCLEUS_DOCKER_FILE) .\n\nbuild-nucleus-bin:\t\t\t## Builds nucleus binary\n\tbash build/nucleus/build.sh\n\nbuild-synapse-image:\t\t## Builds synapse docker image\n\tdocker build --build-arg VERSION=${VERSION}-dev -t ${SYNAPSE_IMAGE_NAME} --file $(SYNAPSE_DOCKER_FILE) .\n\nbuild-synapse-bin:\t\t\t## Builds synapse binary\n\tbash build/synapse/build.sh\n\ninstall-mockery-mac:\n\tbrew install mockery\n\ninstall-mockery-linux:\n\tapt update && apt install -y mockery\n\ngen-mock-files:\n\tmockery --dir=./pkg --all\n"
  },
  {
    "path": "README.md",
    "content": "<p align=\"center\">\n  <img src=\"https://www.lambdatest.com/blog/wp-content/uploads/2020/08/LambdaTest-320-180.png\" />\n</p>\n<h1 align=\"center\">Test At Scale</h1>\n\n![N|Solid](https://www.lambdatest.com/resources/images/TAS_banner.png)\n\n<p align=\"center\">\n  <b>Test Smarter, Release Faster with test-at-scale.</b>\n</p>\n\n<p align=\"center\">\n  <a href=\"https://github.com/LambdaTest/test-at-scale/blob/main/LICENSE\"><img src=\"https://img.shields.io/badge/license-Apache%20License%202.0.-blue\" /></a>\n  <a href=\"https://github.com/LambdaTest/test-at-scale/blob/main/CONTRIBUTING.md\"><img src=\"https://img.shields.io/badge/contributions-welcome-brightgreen?logo=github\" /></a>\n  <a href=\"#build\"><img src=\"https://github.com/lambdatest/test-at-scale/actions/workflows/main.yml/badge.svg\" /></a>\n  <a href=\"#lint\"><img src=\"https://github.com/lambdatest/test-at-scale/actions/workflows/golangci-lint.yml/badge.svg\" /></a>\n  <a href=\"#stale\"><img src=\"https://github.com/lambdatest/test-at-scale/actions/workflows/stale.yml/badge.svg\" /></a>\n  <a href=\"https://discord.gg/Wyf8srhf6K\"><img src=\"https://img.shields.io/badge/Discord-5865F2\" /></a>\n\n</p>\n\n## Test at scale - TAS\nTAS helps you accelerate your testing, shorten job times and get faster feedback on code changes, manage flaky tests and keep master green at all times.\n<br/>\n\nTo learn more about TAS features and capabilities, see our [product page](https://www.lambdatest.com/test-at-scale). \n\n## Features\n- Smart test selection to run only the subset of tests which get impacted by a commit ⚡\n- Smart auto grouping of test to evenly distribute test execution across multiple containers based on previous execution times\n- Deep insights about test runs and execution metrics\n- Support status checks for pull requests\n- Advanced analytics to surface test performance and quality data\n- YAML driven declarative workflow management\n- Natively integrates with Github and Gitlab\n- Flexible workflow to run pre-merge and post-merge tests\n- Allows blocking and unblocking tests directly from the UI or YAML directive. No more WIP commits!\n- Support for customizing testing environment using raw commands in pre and poststeps\n- Supports Javascript monorepos\n- Smart depdency caching to speedup subsequent test runs\n- Easily customizable to support all major language and frameworks\n- Available as [hosted solution](https://lambdatest.com/test-at-scale) as well as self-hosted opensource runner\n- [Upcoming] Smart flaky test management 🪄\n\n## Table of contents \n- 🚀 [Getting Started](#getting-started)\n- 💡 [Tutorials](#tutorials)\n- 💖 [Contribute](#contribute)\n- 📖 [Docs](https://www.lambdatest.com/support/docs/tas-overview)\n\n## Getting Started\n\n### Step 1 - Setting up a New Account\n\nIn order to create an account, visit [TAS Login Page](https://tas.lambdatest.com/login/). (Or [TAS Home Page](https://tas.lambdatest.com/))\n- Login using a suitable git provider and select your organization you want to continue with.\n- Tell us your specialization, team size. \n\n  ![N|Solid](https://www.lambdatest.com/support/assets/images/signup_gi-f776f9b5a6ad4d877e6c427094969e1e.gif)\n  \n- Select **TAS Self Hosted** and click on Proceed.\n- You will find your **LambdaTest Secret Key** on this page which will be required in the next steps.\n\n  ![N|Solid](https://www.lambdatest.com/support/assets/images/synapse-e3e691d8f3d08ff6b3b2ced1a9ef61ed.gif)\n\n<br>\n\n### Step 2 - Creating a configuration file for self hosted setup\n\nBefore installation we need to create a file that will be used for configuring test-at-scale. \n\n- Open any `Terminal` of your choice.\n- Move to your desired directory or you can create a new directory and move to it using the following command.\n- Download our sample configuration file using the given command.\n\n```bash\nmkdir ~/test-at-scale\ncd ~/test-at-scale\ncurl https://raw.githubusercontent.com/LambdaTest/test-at-scale/main/.sample.synapse.json -o .synapse.json\n```\n\n- Open the downloaded `.synapse.json` configuration file in any editor of your choice such as `vi`, `nano`, `code`, etc.\n> **NOTE**: `.synapse.json` file is hidden by default. You can list it using `ls -la` command.\n- You will need to add the following in this file:\n  - 1- **LambdaTest Secret Key**, that you got at the end of **Step 1**.\n  - 2- **Git Token**, that would be required to clone the repositories after Step 3. Generating [GitHub](https://www.lambdatest.com/support/docs/tas-how-to-guides-gh-token), [GitLab](https://www.lambdatest.com/support/docs/tas-how-to-guides-gl-token) personal access token.\n- This file will also be used to store certain other parameters such as **Repository Secrets** (Optional), **Container Registry** (Optional) etc that might be required in configuring test-at-scale on your local/self-hosted environment. You can learn more about the configuration options [here](https://www.lambdatest.com/support/docs/tas-self-hosted-configuration#parameters).\n\n<br>\n\n### Step 3 - Installation\n\n#### Installation on Docker\n\n##### Prerequisites\n- [Docker](https://docs.docker.com/get-docker/) and [Docker-Compose](https://docs.docker.com/compose/install/) (Recommended)\n\n##### Docker Compose\n- Run the docker application.\n  \n  ```bash\n  docker info --format \"CPU: {{.NCPU}}, RAM: {{.MemTotal}}\"\n  ```\n- Execute the above command to ensure that resources usable by Docker are atleast `CPU: 2, RAM: 4294967296`.\n  > **NOTE:** In order to run test-at-scale you require a minimum configuration of 2 CPU cores and 4 GiBs of RAM.\n\n- The `.synapse.json` configuration file made in [Step 2](#step-2---creating-a-configuration-file-for-self-hosted-setup) will be required before executing the next command.\n- Download and run the docker compose file using the following command.\n  \n  ```bash\n  cd ~/test-at-scale\n  curl -L https://raw.githubusercontent.com/LambdaTest/test-at-scale/main/docker-compose.yml -o docker-compose.yml\n  docker-compose up -d\n  ```\n\n> **NOTE:** This docker-compose file will pull the latest version of test-at-scale and install on your self hosted environment.\n\n<details id=\"docker\">\n<summary>Installation without <b>Docker Compose</b></summary>\n\nTo get up and running quickly, you can use the following instructions to setup Test at Scale on Self hosted environment without docker-compose.\n\n- The `.synapse.json` configuration file made in [Step 2](#step-2---creating-a-configuration-file-for-self-hosted-setup) will be required before executing the next command.\n- Execute the following command to run Test at Scale docker container\n\n```bash\ncd ~/test-at-scale\ndocker network create --internal test-at-scale\ndocker run --name synapse --restart always \\\n    -v /var/run/docker.sock:/var/run/docker.sock \\\n    -v /tmp/synapse:/tmp/synapse \\\n    -v ${PWD}/.synapse.json:/home/synapse/.synapse.json \\\n    -v /etc/machine-id:/etc/machine-id \\\n    --network=test-at-scale \\\n    lambdatest/synapse:latest\n```\n> **WARNING:** We strongly recommend to use docker-compose while Test at Scale on Self hosted environment.\n\n</details>  \n\n<details>\n<summary>Installation on <b> Local Machine </b> & <b> Supported Cloud Platforms </b> </summary>\n\n- Local Machine - Setup using [docker](#docker).\n- Setup on [Azure](https://www.lambdatest.com/support/docs/tas-self-hosted-installation#azure)\n- Setup on [AWS](https://www.lambdatest.com/support/docs/tas-self-hosted-installation#aws)\n- Setup on [GCP](https://www.lambdatest.com/support/docs/tas-self-hosted-installation#gcp)\n</details>\n\n- Once the installation is complete, go back to the TAS portal.\n- Click the 'Test Connection' button to ensure `test-at-scale` self hosted environment is connected and ready.\n- Hit `Proceed` to move forward to [Step 4](#step-4---importing-your-repo)\n\n<br>\n\n### Step 4 - Importing your repo\n> **NOTE:** Currently we support Mocha, Jest and Jasmine for testing Javascript codebases.\n- Click the Import button for the `JS` repository you want to integrate with TAS.\n- Once Imported successfully, click on `Go to Project` to proceed further.\n- You will be asked to setup a `post-merge` here. We recommend to proceed ahead with default settings. (You can change these later.) \n\n  ![N|Solid](https://www.lambdatest.com/support/assets/images/import-postmerge-c1b26a9e78a1b63dc23dd2129b16f9d6.gif)\n\n<br>\n\n### Step 5 - Configuring TAS yml\nA `.tas.yml` file is a basic yaml configuration file that contains steps required for installing necessary dependencies and executing the tests present in your repository.\n- In order to configure your imported repository, follow the steps given on the `.tas.yml`  configuration page.\n- You can also know more about `.tas.yml` configuration parameters [here](https://www.lambdatest.com/support/docs/tas-configuring-tas-yml).\n\n  ![N|Solid](https://www.lambdatest.com/support/assets/images/yml-download-6e7366b290de36ef8cb74f3d564850af.gif)\n  \n- Placing the `.tas.yml` configuration file.\n  - Create a new file as **.tas.yml** at the root level of your repository .\n  - **Copy** the configuration from the TAS yml configuration page and **paste** them in the **.tas.yml** file you just created.\n  - **Commit and Push** the changes to your repo.\n  \n  ![N|Solid](https://www.lambdatest.com/support/assets/images/yml_placing-72cd952b403e499a938151c955540e18.gif)\n\n## **Language & Framework Support**\nCurrently we support Mocha, Jest and Jasmine for testing Javascript codebases.\n\n## **Tutorials**\n- [Setting up you first repo on TAS - Cloud](https://www.lambdatest.com/support/docs/tas-getting-started-integrating-your-first-repo/) \n- [Setting up you first repo on TAS - Self Hosted](https://www.lambdatest.com/support/docs/tas-self-hosted-installation) \n- Sample repos : [Mocha](https://github.com/LambdaTest/mocha-demos), [Jest](https://github.com/LambdaTest/jest-demos), [Jasmine](https://github.com/LambdaTest/jasmine-node-js-example).\n- [How to configure a .tas.yml file](https://www.lambdatest.com/support/docs/tas-configuring-tas-yml)\n\n## **Contribute**\nWe love our contributors! If you'd like to contribute anything from a bug fix to a feature update, start here:\n\n- 📕 Read our Code of Conduct [Code of Conduct](https://github.com/LambdaTest/test-at-scale/blob/main/CODE_OF_CONDUCT.md).\n- 📖 Know more about [test-at-scale](https://github.com/LambdaTest/test-at-scale/blob/main/CONTRIBUTING.md#repo-overview) and contributing from our [Contribution Guide](https://github.com/LambdaTest/test-at-scale/blob/main/CONTRIBUTING.md).\n- 👾 Explore some good first issues [good first issues](https://github.com/LambdaTest/test-at-scale/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22).\n\n### **Join our community**\nEngage with Developers, SDETs, and Testers around the world. \n- Get the latest product updates. \n- Discuss testing philosophies and more. \nJoin the Test-at-scale Community on [Discord](https://discord.gg/Wyf8srhf6K). Click [here](https://discord.com/channels/940635450509504523/941297958954102846) if you are already an existing member.\n\n### **Support & Troubleshooting** \nThe documentation and community will help you troubleshoot most issues. If you have encountered a bug, you can contact us using one of the following channels:\n- Help yourself with our [Documentation](https://www.lambdatest.com/support/docs/tas-overview)📚, and [FAQs](https://www.lambdatest.com/support/docs/tas-faq-and-troubleshooting/).\n- In case of Issue & bugs go to [GitHub issues](https://github.com/LambdaTest/test-at-scale/issues)🐛.\n- For support & feedback join our [Discord](https://discord.gg/Wyf8srhf6K) or reach out to us on our [email](mailto:hello.tas@lambdatest.com)💬.\n\nWe are committed to fostering an open and welcoming environment in the community. Please see the Code of Conduct.\n\n## **License**\n\nTestAtScale is available under the [Apache License 2.0](https://github.com/LambdaTest/test-at-scale/blob/main/LICENSE). Use it wisely.\n"
  },
  {
    "path": "build/nucleus/Dockerfile",
    "content": "FROM golang:latest as builder\n\n# create a working directory\nCOPY . /nucleus\nWORKDIR /nucleus\n\n\n# Build binary\nRUN GOARCH=amd64 GOOS=linux go build -ldflags=\"-w -s\" -o nucleus cmd/nucleus/*.go\n# Uncomment only when build is highly stable. Compress binary.\n# RUN strip --strip-unneeded ts\n# RUN upx ts\n\n# use a minimal alpine image\nFROM nikolaik/python-nodejs:python3.10-nodejs16-slim\n\nARG VERSION\nENV VERSION=$VERSION\n\n# Installing chromium so that all linux libs get automatically installed for running puppeteer tests\nRUN apt update && apt install -y git zstd chromium curl unzip zip xmlstarlet build-essential\nRUN curl -LJO https://go.dev/dl/go1.18.3.linux-amd64.tar.gz\nRUN tar -C /usr/local -xzf go1.18.3.linux-amd64.tar.gz\n\nCOPY bundle /usr/local/bin/bundle\nRUN chmod +x /usr/local/bin/bundle\nENV SMART_BINARY=/usr/local/bin/bundle\n\n# Install Custom Runners\nRUN mkdir /custom-runners\nRUN mkdir /tmp/custom-runners\n\nWORKDIR /tmp/custom-runners\nRUN npm init -y\nRUN npm install -g pnpm\nRUN npm i --global-style --legacy-peer-deps \\\n    @lambdatest/test-at-scale-jasmine-runner@~0.3.0 \\\n    @lambdatest/test-at-scale-mocha-runner@~0.3.0 \\\n    @lambdatest/test-at-scale-jest-runner@~0.3.0\nRUN npm i -g nyc@^15.1.0\n\nRUN tar -zcf /custom-runners/custom-runners.tgz node_modules\nRUN rm -rf /tmp/custom-runners\nRUN mkdir /home/nucleus\nRUN mkdir /home/nucleus/.nvm\nENV NVM_DIR=/home/nucleus/.nvm\n\nENV GOROOT /usr/local/go\nENV GOPATH /home/nucleus\nENV PATH /usr/local/go/bin:/home/nucleus/bin:$PATH\n\nCOPY ./build/nucleus/golang/server /home/nucleus\n\nRUN chmod 744 /home/nucleus/server\n\n# install nvm for nucleus user\nRUN curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.1/install.sh | /bin/bash\n\nWORKDIR /home/nucleus\n# copy the binary from builder\nCOPY --from=builder /nucleus/nucleus /usr/local/bin/\n# run the binary\nCOPY ./build/nucleus/entrypoint.sh /\n\nRUN apt update -y && apt upgrade -y\n\nRUN  curl -s https://get.sdkman.io | bash\nRUN /bin/bash -c \"source $HOME/.sdkman/bin/sdkman-init.sh;sdk install java 18.0.1-oracle\"\n\nENV JAVA_HOME=\"/root/.sdkman/candidates/java/current\"\nENV PATH=$JAVA_HOME:$PATH\nENV PATH=$JAVA_HOME/bin:$PATH\n\nARG MAVEN_VERSION=3.6.3\n\n# Define a constant with the working directory\nARG USER_HOME_DIR=\"/root\"\n# Define the URL where maven can be downloaded from\nARG BASE_URL=https://apache.osuosl.org/maven/maven-3/${MAVEN_VERSION}/binaries\n\n# Create the directories, download maven, validate the download, install it, remove downloaded file and set links\nRUN mkdir -p /usr/share/maven /usr/share/maven/ref \\\n  && echo \"Downlaoding maven\" \\\n  && curl -fsSL -o /tmp/apache-maven.tar.gz ${BASE_URL}/apache-maven-${MAVEN_VERSION}-bin.tar.gz \\\n  \\\n  && echo \"Unziping maven\" \\\n  && tar -xzf /tmp/apache-maven.tar.gz -C /usr/share/maven --strip-components=1 \\\n  \\\n  && echo \"Cleaning and setting links\" \\\n  && rm -f /tmp/apache-maven.tar.gz \\\n  && ln -s /usr/share/maven/bin/mvn /usr/bin/mvn\n\n# Define environmental variables required by Maven, like Maven_Home directory and where the maven repo is located\nENV MAVEN_HOME /usr/share/maven\nRUN mkdir -p /home/nucleus/.m2\n\n#update settings.xml file for new maven local repo location\nRUN xmlstarlet ed -O --inplace -N a='http://maven.apache.org/SETTINGS/1.0.0' -s /a:settings --type elem --name \"localRepository\" -v /home/nucleus/.m2/repository /usr/share/maven/conf/settings.xml\n\nCOPY ./build/nucleus/java/test-at-scale-java.jar /\nRUN curl -o /home/nucleus/junit-platform-console-standalone-1.8.2.jar https://repo1.maven.org/maven2/org/junit/platform/junit-platform-console-standalone/1.8.2/junit-platform-console-standalone-1.8.2.jar\nCOPY ./build/nucleus/entrypoint.sh /\nENTRYPOINT  [\"/bin/sh\", \"/entrypoint.sh\"]"
  },
  {
    "path": "build/nucleus/build.sh",
    "content": "#!/usr/bin\n# exit when any command fails\nset -e\n\n# keep track of the last executed command\ntrap 'last_command=$current_command; current_command=$BASH_COMMAND' DEBUG\n# echo an error message before exiting\ntrap 'echo \"\\\"${last_command}\\\" command filed with exit code $?.\"' EXIT\n\necho 'Building binary'\ngo build -o nucleus ./cmd/nucleus/*.go\necho 'Binary successfully build by the name of `nucleus`'\n"
  },
  {
    "path": "build/nucleus/entrypoint.sh",
    "content": "#!/bin/sh\n\nexec  /usr/local/bin/nucleus \"$@\"\n"
  },
  {
    "path": "build/nucleus/java/test-at-scale-java.jar",
    "content": ""
  },
  {
    "path": "build/synapse/Dockerfile",
    "content": "FROM golang:latest as builder\n\n# create a working directory\nCOPY . /synapse\nWORKDIR /synapse\n\n# Build binary\nRUN go build -o synapse cmd/synapse/*.go\n\n# use a minimal alpine image\nFROM docker:latest\n\nARG VERSION\nENV VERSION=$VERSION\n\n# add ca-certificates in case you need them\nRUN apk update && apk add ca-certificates && rm -rf /var/cache/apk/*\n\n# Create a group and user\nRUN addgroup -S synapse && adduser -S synapse -G synapse\n\n# set working directory\nWORKDIR /home/synapse\n\n# copy the binary from builder\nCOPY --chown=synapse:synapse --from=builder /synapse/synapse .\n\nCOPY ./build/synapse/entrypoint.sh /\n# run the binary\nENTRYPOINT  [\"/bin/sh\", \"/entrypoint.sh\"]"
  },
  {
    "path": "build/synapse/build.sh",
    "content": "#!/usr/bin\n# exit when any command fails\nset -e\n\n# keep track of the last executed command\ntrap 'last_command=$current_command; current_command=$BASH_COMMAND' DEBUG\n# echo an error message before exiting\ntrap 'echo \"\\\"${last_command}\\\" command filed with exit code $?.\"' EXIT\n\necho 'Building binary'\ngo build -o synapse ./cmd/synapse/*.go\necho 'Binary successfully build by the name of `synapse`'\n"
  },
  {
    "path": "build/synapse/entrypoint.sh",
    "content": "#!/bin/sh\nexec -- /home/synapse/synapse \"$@\"\n"
  },
  {
    "path": "bundle",
    "content": "\n"
  },
  {
    "path": "cmd/nucleus/bin.go",
    "content": "package main\n\n// this is cmd/root_cmd.go\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\t\"log\"\n\t\"os\"\n\t\"os/signal\"\n\t\"path/filepath\"\n\t\"strings\"\n\t\"sync\"\n\t\"time\"\n\n\t\"github.com/LambdaTest/test-at-scale/config\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/api\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/azure\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/blocktestservice\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/cachemanager\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/command\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/diffmanager\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/driver\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/gitmanager\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/global\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/listsubmoduleservice\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/payloadmanager\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/requestutils\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/secret\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/server\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/service/coverage\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/service/teststats\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/tasconfigmanager\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/task\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/testdiscoveryservice\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/testexecutionservice\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/zstd\"\n\t\"github.com/cenkalti/backoff/v4\"\n\t\"github.com/spf13/cobra\"\n)\n\n// RootCommand will setup and return the root command\nfunc RootCommand() *cobra.Command {\n\trootCmd := cobra.Command{\n\t\tUse:     \"nucleus\",\n\t\tLong:    `nucleus is a coordinator binary used as entrypoint in tas containers`,\n\t\tVersion: global.NucleusBinaryVersion,\n\t\tRun:     run,\n\t}\n\n\t// define flags used for this command\n\tAttachCLIFlags(&rootCmd)\n\n\treturn &rootCmd\n}\n\nfunc run(cmd *cobra.Command, args []string) {\n\t// create a context that we can cancel\n\tctx, cancel := context.WithCancel(context.Background())\n\tdefer cancel()\n\n\t// timeout in seconds\n\tconst gracefulTimeout = 5000 * time.Millisecond\n\n\t// a WaitGroup for the goroutines to tell us they've stopped\n\twg := sync.WaitGroup{}\n\n\tcfg, err := config.LoadNucleusConfig(cmd)\n\tif err != nil {\n\t\tfmt.Printf(\"[Error] Failed to load config: \" + err.Error())\n\t\tos.Exit(1)\n\t}\n\n\t// patch logconfig file location with root level log file location\n\tif cfg.LogFile != \"\" {\n\t\tcfg.LogConfig.FileLocation = filepath.Join(cfg.LogFile, \"nucleus.log\")\n\t}\n\n\t// You can also use logrus implementation\n\t// by using lumber.InstanceLogrusLogger\n\tlogger, err := lumber.NewLogger(cfg.LogConfig, cfg.Verbose, lumber.InstanceZapLogger)\n\tif err != nil {\n\t\tlog.Fatalf(\"Could not instantiate logger %s\", err.Error())\n\t}\n\tlogger.Debugf(\"Running on local: %t\", cfg.LocalRunner)\n\n\tif cfg.LocalRunner {\n\t\tlogger.Infof(\"Local runner detected , changing IP from: %s to: %s\", global.NeuronHost, cfg.SynapseHost)\n\t\tglobal.SetNeuronHost(strings.TrimSpace(cfg.SynapseHost))\n\n\t\tlogger.Infof(\"change neuron host to %s\", global.NeuronHost)\n\t} else {\n\t\tglobal.SetNeuronHost(global.NeuronRemoteHost)\n\t}\n\tpl, err := core.NewPipeline(cfg, logger)\n\tif err != nil {\n\t\tlogger.Errorf(\"Unable to create the pipeline: %+v\\n\", err)\n\t\tlogger.Errorf(\"Aborting ...\")\n\t\tos.Exit(1)\n\t}\n\n\tts, err := teststats.New(cfg, logger)\n\tif err != nil {\n\t\tlogger.Fatalf(\"failed to initialize test stats service: %v\", err)\n\t}\n\tdefaultRequests := requestutils.New(logger, global.DefaultAPITimeout, backoff.NewExponentialBackOff())\n\n\tazureClient, err := azure.NewAzureBlobEnv(cfg, defaultRequests, logger)\n\tif err != nil {\n\t\tlogger.Fatalf(\"failed to initialize azure blob: %v\", err)\n\t}\n\tif err != nil && !cfg.LocalRunner {\n\t\tlogger.Fatalf(\"failed to initialize azure blob: %v\", err)\n\t}\n\n\t// attach plugins to pipeline\n\tpm := payloadmanager.NewPayloadManger(azureClient, logger, cfg, defaultRequests)\n\tsecretParser := secret.New(logger)\n\ttcm := tasconfigmanager.NewTASConfigManager(logger)\n\texecManager := command.NewExecutionManager(secretParser, azureClient, logger)\n\tgm := gitmanager.NewGitManager(logger, execManager)\n\tdm := diffmanager.NewDiffManager(cfg, logger)\n\n\ttdResChan := make(chan core.DiscoveryResult)\n\ttds := testdiscoveryservice.NewTestDiscoveryService(ctx, tdResChan, execManager, defaultRequests, logger)\n\ttes := testexecutionservice.NewTestExecutionService(cfg, defaultRequests, execManager, azureClient, ts, logger)\n\ttbs := blocktestservice.NewTestBlockTestService(cfg, defaultRequests, logger)\n\trouter := api.NewRouter(logger, ts, tdResChan)\n\n\tt, err := task.New(defaultRequests, logger)\n\tif err != nil {\n\t\tlogger.Fatalf(\"failed to initialize task: %v\", err)\n\t}\n\n\tzstd, err := zstd.New(execManager, logger)\n\tif err != nil {\n\t\tlogger.Fatalf(\"failed to initialize zstd compressor: %v\", err)\n\t}\n\tcache, err := cachemanager.New(zstd, azureClient, logger)\n\tif err != nil {\n\t\tlogger.Fatalf(\"failed to initialize cache manager: %v\", err)\n\t}\n\n\tcoverageService, err := coverage.New(execManager, azureClient, zstd, cfg, logger)\n\tif err != nil {\n\t\tlogger.Fatalf(\"failed to initialize coverage service: %v\", err)\n\t}\n\tlistsubmodule := listsubmoduleservice.New(defaultRequests, logger)\n\n\tbuilder := driver.Builder{\n\t\tLogger:               logger,\n\t\tTestExecutionService: tes,\n\t\tTestDiscoveryService: tds,\n\t\tAzureClient:          azureClient,\n\t\tBlockTestService:     tbs,\n\t\tExecutionManager:     execManager,\n\t\tTASConfigManager:     tcm,\n\t\tCacheStore:           cache,\n\t\tDiffManager:          dm,\n\t\tListSubModuleService: listsubmodule,\n\t}\n\n\tpl.PayloadManager = pm\n\tpl.TASConfigManager = tcm\n\tpl.GitManager = gm\n\tpl.DiffManager = dm\n\tpl.TestDiscoveryService = tds\n\tpl.BlockTestService = tbs\n\tpl.TestExecutionService = tes\n\tpl.ExecutionManager = execManager\n\tpl.CoverageService = coverageService\n\tpl.TestStats = ts\n\tpl.Task = t\n\tpl.CacheStore = cache\n\tpl.SecretParser = secretParser\n\tpl.Builder = &builder\n\n\tlogger.Infof(\"LambdaTest Nucleus version: %s\", global.NucleusBinaryVersion)\n\n\twg.Add(1)\n\tgo func() {\n\t\tdefer cancel()\n\t\tdefer wg.Done()\n\t\t// starting pipeline\n\t\tpl.Start(ctx)\n\t}()\n\twg.Add(1)\n\tgo func() {\n\t\tdefer cancel()\n\t\tdefer wg.Done()\n\t\tserver.ListenAndServe(ctx, router, cfg, logger)\n\t}()\n\t// listen for C-c\n\tc := make(chan os.Signal, 1)\n\tsignal.Notify(c, os.Interrupt)\n\n\t// create channel to mark status of waitgroup\n\t// this is required to brutally kill application in case of\n\t// timeout\n\tdone := make(chan struct{})\n\n\t// asynchronously wait for all the go routines\n\tgo func() {\n\t\t// and wait for all go routines\n\t\twg.Wait()\n\t\tlogger.Debugf(\"main: all goroutines have finished.\")\n\t\tclose(done)\n\t}()\n\n\t// wait for signal channel\n\tselect {\n\tcase <-c:\n\t\t{\n\t\t\tlogger.Debugf(\"main: received C-c - attempting graceful shutdown ....\")\n\t\t\t// tell the goroutines to stop\n\t\t\tlogger.Debugf(\"main: telling goroutines to stop\")\n\t\t\tcancel()\n\t\t\tselect {\n\t\t\tcase <-done:\n\t\t\t\tlogger.Debugf(\"Go routines exited within timeout\")\n\t\t\tcase <-time.After(gracefulTimeout):\n\t\t\t\tlogger.Errorf(\"Graceful timeout exceeded. Brutally killing the application\")\n\t\t\t}\n\n\t\t}\n\tcase <-done:\n\t\tos.Exit(0)\n\t}\n\n}\n"
  },
  {
    "path": "cmd/nucleus/flags.go",
    "content": "package main\n\nimport (\n\t\"github.com/spf13/cobra\"\n)\n\n//AttachCLIFlags attaches command line flags to command\nfunc AttachCLIFlags(rootCmd *cobra.Command) error {\n\n\trootCmd.PersistentFlags().StringP(\"config\", \"c\", \"\", \"the config file to use\")\n\trootCmd.PersistentFlags().StringP(\"port\", \"p\", \"\", \"Port for api server to run\")\n\trootCmd.PersistentFlags().StringP(\"payloadAddress\", \"l\", \"\", \"Payload address\")\n\trootCmd.PersistentFlags().String(\"subModule\", \"\", \"submodule of a repo\")\n\trootCmd.PersistentFlags().BoolP(\"verbose\", \"\", false, \"Run in verbose mode\")\n\trootCmd.PersistentFlags().BoolP(\"coverage\", \"\", false, \"Run coverage only mode\")\n\trootCmd.PersistentFlags().BoolP(\"discover\", \"\", false, \"Run nucleus in test discovery mode\")\n\trootCmd.PersistentFlags().BoolP(\"execute\", \"\", false, \"Run nucleus in test execution mode\")\n\trootCmd.PersistentFlags().BoolP(\"flaky\", \"\", false, \"Run nucleus in flaky mode\")\n\trootCmd.PersistentFlags().BoolP(\"collectStats\", \"\", false, \"Collect test execution metrics\")\n\trootCmd.PersistentFlags().IntP(\"consecutiveRuns\", \"\", 1, \"The consecutive test execution runs\")\n\n\trootCmd.PersistentFlags().StringP(\"env\", \"e\", \"prod\", \"Environment.\")\n\trootCmd.PersistentFlags().String(\"taskID\", \"\", \"The unique ID for a task\")\n\trootCmd.PersistentFlags().String(\"locators\", \"\", \"The test locators for a task\")\n\trootCmd.PersistentFlags().String(\"locatorAddress\", \"\", \"The test locators address for a task\")\n\trootCmd.PersistentFlags().String(\"buildID\", \"\", \"The unique ID for a build\")\n\trootCmd.PersistentFlags().String(\"targetCommit\", \"\", \"The target commit for nucleus\")\n\trootCmd.PersistentFlags().String(\"baseCommit\", \"\", \"The base commit for nucleus\")\n\trootCmd.PersistentFlags().StringP(\"synapsehost\", \"\", \"\", \"Local Ip of proxy server.\")\n\trootCmd.PersistentFlags().BoolP(\"local\", \"\", false, \"local mode\")\n\n\treturn nil\n}\n"
  },
  {
    "path": "cmd/nucleus/main.go",
    "content": "package main\n\nimport (\n\t\"log\"\n)\n\n// Main function just executes root command `ts`\n// this project structure is inspired from `cobra` package\nfunc main() {\n\tif err := RootCommand().Execute(); err != nil {\n\t\tlog.Fatal(err)\n\t}\n}\n"
  },
  {
    "path": "cmd/synapse/bin.go",
    "content": "package main\n\n// this is cmd/root_cmd.go\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\t\"log\"\n\t\"os\"\n\t\"os/signal\"\n\t\"path/filepath\"\n\t\"strconv\"\n\t\"sync\"\n\t\"time\"\n\n\t\"github.com/LambdaTest/test-at-scale/config\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/cron\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/global\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/proxyserver\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/runner/docker\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/secrets\"\n\tsynapsepkg \"github.com/LambdaTest/test-at-scale/pkg/synapse\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/tasconfigdownloader\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/utils\"\n\t\"github.com/joho/godotenv\"\n\t\"github.com/spf13/cobra\"\n)\n\n// RootCommand will setup and return the root command\nfunc RootCommand() *cobra.Command {\n\trootCmd := cobra.Command{\n\t\tUse:     \"synapse\",\n\t\tLong:    `Synapse is an opensource runner for TAS`,\n\t\tVersion: global.SynapseBinaryVersion,\n\t\tRun:     run,\n\t}\n\n\t// define flags used for this command\n\tif err := AttachCLIFlags(&rootCmd); err != nil {\n\t\tfmt.Println(\"Error in attaching cli flags\")\n\t}\n\n\treturn &rootCmd\n}\n\nfunc run(cmd *cobra.Command, args []string) {\n\t// create a context that we can cancel\n\tctx, cancel := context.WithCancel(context.Background())\n\tdefer cancel()\n\t// set necessary os env\n\tsetEnv()\n\t// a WaitGroup for the goroutines to tell us they've stopped\n\twg := sync.WaitGroup{}\n\n\t// Load environment variables from .env if available\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tfmt.Printf(\"Warning: No .env file found\\n\")\n\t}\n\n\tcfg, err := config.LoadSynapseConfig(cmd)\n\tif err != nil {\n\t\tfmt.Printf(\"Failed to load config: %s\", err.Error())\n\t}\n\n\terr = config.LoadRepoSecrets(cmd, cfg)\n\tif err != nil {\n\t\tfmt.Printf(\"Error loading repository secrets: %v\", err)\n\t}\n\n\t// patch logconfig file location with root level log file location\n\tif cfg.LogFile != \"\" {\n\t\tcfg.LogConfig.FileLocation = filepath.Join(cfg.LogFile, \"synapse.log\")\n\t}\n\n\t// You can also use logrus implementation\n\t// by using lumber.InstanceLogrusLogger\n\tlogger, err := lumber.NewLogger(cfg.LogConfig, cfg.Verbose, lumber.InstanceZapLogger)\n\tif err != nil {\n\t\tlog.Fatalf(\"Could not instantiate logger %s\", err.Error())\n\t}\n\tif err := config.ValidateCfg(cfg, logger); err != nil {\n\t\tlogger.Fatalf(\"Error loading synapse config: %v\", err)\n\t}\n\tsecretsManager := secrets.New(cfg, logger)\n\n\trunner, err := docker.New(secretsManager, logger, cfg)\n\tif err != nil {\n\t\tlogger.Fatalf(\"could not instantiate k8s runner %v\", err)\n\t}\n\ttasConfigDownloader := tasconfigdownloader.New(logger)\n\tsynapse := synapsepkg.New(runner, logger, secretsManager, tasConfigDownloader)\n\n\tproxyHandler, err := proxyserver.NewProxyHandler(logger)\n\tif err != nil {\n\t\tlogger.Fatalf(\"Could not instantiate proxyhandler %v\", err)\n\t}\n\n\t// setting up cron handler\n\twg.Add(1)\n\tgo cron.Setup(ctx, &wg, logger, runner)\n\n\t// All attempts to connect to lambdatest server failed\n\tconnectionFailed := make(chan struct{})\n\n\twg.Add(1)\n\tgo synapse.InitiateConnection(ctx, &wg, connectionFailed)\n\n\twg.Add(1)\n\tgo func() {\n\t\tdefer cancel()\n\t\tdefer wg.Done()\n\t\tif err := proxyserver.ListenAndServe(ctx, proxyHandler, cfg, logger); err != nil {\n\t\t\tlogger.Fatalf(\"Error starting proxy server: %v\", err)\n\t\t}\n\t}()\n\n\t// listen for C-cInterrupt\n\tc := make(chan os.Signal, 1)\n\tsignal.Notify(c, os.Interrupt)\n\n\t// create channel to mark status of waitgroup\n\t// this is required to brutally kill application in case of\n\t// timeout\n\tdone := make(chan struct{})\n\n\t// asynchronously wait for all the go routines\n\tgo func() {\n\t\t// and wait for all go routines\n\t\twg.Wait()\n\t\tlogger.Debugf(\"main: all goroutines have finished.\")\n\t\tclose(done)\n\t}()\n\n\t// wait for signal channel\n\tselect {\n\tcase <-c:\n\t\t{\n\t\t\tlogger.Debugf(\"main: received OS Interrupt signal, attempting graceful shutdown ....\")\n\t\t\t// tell the goroutines to stop\n\t\t\tlogger.Debugf(\"main: telling goroutines to stop\")\n\t\t\tcancel()\n\t\t\tselect {\n\t\t\tcase <-done:\n\t\t\t\tlogger.Debugf(\"Go routines exited within timeout\")\n\t\t\tcase <-time.After(global.GracefulTimeout):\n\t\t\t\tlogger.Errorf(\"Graceful timeout exceeded. Brutally killing the application\")\n\t\t\t}\n\t\t}\n\n\tcase <-connectionFailed:\n\t\t{\n\t\t\tlogger.Debugf(\"main: all attempts to connect to lamdatest server failed ....\")\n\t\t\t// tell the goroutines to stop\n\t\t\tlogger.Debugf(\"main: telling goroutines to stop\")\n\t\t\tcancel()\n\t\t\tselect {\n\t\t\tcase <-done:\n\t\t\t\tlogger.Debugf(\"Go routines exited within timeout\")\n\t\t\tcase <-time.After(global.GracefulTimeout):\n\t\t\t\tlogger.Errorf(\"Graceful timeout exceeded. Brutally killing the application\")\n\t\t\t}\n\t\t\tos.Exit(0)\n\n\t\t}\n\tcase <-done:\n\t\tos.Exit(0)\n\t}\n\n}\n\nfunc setEnv() {\n\tos.Setenv(global.AutoRemoveEnv, strconv.FormatBool(global.AutoRemove))\n\tos.Setenv(global.LocalEnv, strconv.FormatBool(global.Local))\n\tos.Setenv(global.SynapseHostEnv, utils.GetOutboundIP())\n\tos.Setenv(global.NetworkEnvName, global.NetworkName)\n}\n"
  },
  {
    "path": "cmd/synapse/flags.go",
    "content": "package main\n\nimport (\n\t\"github.com/spf13/cobra\"\n)\n\n//AttachCLIFlags attaches command line flags to command\nfunc AttachCLIFlags(rootCmd *cobra.Command) error {\n\trootCmd.PersistentFlags().StringP(\"config\", \"c\", \"\", \"the config file to use\")\n\trootCmd.PersistentFlags().BoolP(\"verbose\", \"\", false, \"should every proxy request be logged to stdout\")\n\treturn nil\n}\n"
  },
  {
    "path": "cmd/synapse/main.go",
    "content": "package main\n\nimport (\n\t\"log\"\n\n\t\"github.com/LambdaTest/test-at-scale/pkg/global\"\n)\n\n// Main function just executes root command `ts`\n// this project structure is inspired from `cobra` package\nfunc main() {\n\tlog.Printf(\"Starting synapse %s\", global.SynapseBinaryVersion)\n\tif err := RootCommand().Execute(); err != nil {\n\t\tlog.Fatal(err)\n\t}\n}\n"
  },
  {
    "path": "config/default.go",
    "content": "package config\n\nimport (\n\t\"github.com/LambdaTest/test-at-scale/pkg/global\"\n\t\"github.com/spf13/viper\"\n)\n\nfunc setNucleusDefaultConfig() {\n\tviper.SetDefault(\"LogConfig.EnableConsole\", true)\n\tviper.SetDefault(\"LogConfig.ConsoleJSONFormat\", false)\n\tviper.SetDefault(\"LogConfig.ConsoleLevel\", \"debug\")\n\tviper.SetDefault(\"LogConfig.EnableFile\", true)\n\tviper.SetDefault(\"LogConfig.FileJSONFormat\", true)\n\tviper.SetDefault(\"LogConfig.FileLevel\", \"debug\")\n\tviper.SetDefault(\"LogConfig.FileLocation\", global.HomeDir+\"/nucleus.log\")\n\tviper.SetDefault(\"Env\", \"prod\")\n\tviper.SetDefault(\"Port\", \"9876\")\n\tviper.SetDefault(\"Verbose\", false)\n}\n\nfunc setSynapseDefaultConfig() {\n\tviper.SetDefault(\"LogConfig.EnableConsole\", true)\n\tviper.SetDefault(\"LogConfig.ConsoleJSONFormat\", false)\n\tviper.SetDefault(\"LogConfig.ConsoleLevel\", \"info\")\n\tviper.SetDefault(\"LogConfig.EnableFile\", true)\n\tviper.SetDefault(\"LogConfig.FileJSONFormat\", true)\n\tviper.SetDefault(\"LogConfig.FileLevel\", \"debug\")\n\tviper.SetDefault(\"LogConfig.FileLocation\", \"./mould.log\")\n\tviper.SetDefault(\"Env\", \"prod\")\n\tviper.SetDefault(\"Verbose\", false)\n}\n"
  },
  {
    "path": "config/loader.go",
    "content": "package config\n\nimport (\n\t\"encoding/json\"\n\t\"errors\"\n\t\"fmt\"\n\t\"io/ioutil\"\n\t\"strings\"\n\n\t\"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n\t\"github.com/spf13/cobra\"\n\t\"github.com/spf13/viper\"\n)\n\n// GlobalNucleusConfig stores the config instance for global use\nvar GlobalNucleusConfig *NucleusConfig\n\n// GlobalSynapseConfig store the config instance for synapse global use\nvar GlobalSynapseConfig *SynapseConfig\n\ntype tempSecretReader struct {\n\tRepoSecrets map[string]map[string]string `json:\"RepoSecrets\" yaml:\"RepoSecrets\"`\n}\n\n// LoadNucleusConfig loads config from command instance to predefined config variables\nfunc LoadNucleusConfig(cmd *cobra.Command) (*NucleusConfig, error) {\n\terr := viper.BindPFlags(cmd.Flags())\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// default viper configs\n\tviper.SetEnvKeyReplacer(strings.NewReplacer(\".\", \"_\"))\n\tviper.AutomaticEnv()\n\n\t// set default configs\n\tsetNucleusDefaultConfig()\n\n\tif configFile, _ := cmd.Flags().GetString(\"config\"); configFile != \"\" {\n\t\tviper.SetConfigFile(configFile)\n\t} else {\n\t\tviper.SetConfigName(\".nucleus\")\n\t\tviper.AddConfigPath(\"./\")\n\t\tviper.AddConfigPath(\"$HOME/.nucleus\")\n\t}\n\n\tif err := viper.ReadInConfig(); err != nil {\n\t\tfmt.Println(\"Warning: No configuration file found. Proceeding with defaults\")\n\t}\n\n\treturn populateNucleusConfig(new(NucleusConfig))\n}\n\n// LoadSynapseConfig loads config from command instance to predefined config variables\nfunc LoadSynapseConfig(cmd *cobra.Command) (*SynapseConfig, error) {\n\terr := viper.BindPFlags(cmd.Flags())\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// default viper configs\n\tviper.SetEnvPrefix(\"SYN\")\n\tviper.SetEnvKeyReplacer(strings.NewReplacer(\".\", \"_\"))\n\tviper.AutomaticEnv()\n\n\t// set default configs\n\tsetSynapseDefaultConfig()\n\n\tif configFile, _ := cmd.Flags().GetString(\"config\"); configFile != \"\" {\n\t\tviper.SetConfigFile(configFile)\n\t} else {\n\t\tviper.SetConfigName(\".synapse\")\n\t\tviper.AddConfigPath(\"./\")\n\t\tviper.AddConfigPath(\"$HOME/.synapse\")\n\t}\n\n\tif err := viper.ReadInConfig(); err != nil {\n\t\tfmt.Println(\"Warning: No configuration file found. Proceeding with defaults\")\n\t}\n\treturn populateSynapseConfig(new(SynapseConfig))\n}\n\n// LoadRepoSecrets loads repo secrets from configuration file\nfunc LoadRepoSecrets(cmd *cobra.Command, synapseConfig *SynapseConfig) error {\n\tif configFile, _ := cmd.Flags().GetString(\"config\"); configFile != \"\" {\n\t\tviper.SetConfigFile(configFile)\n\t} else {\n\t\tviper.SetConfigName(\".synapse\")\n\t\tviper.AddConfigPath(\"./\")\n\t\tviper.AddConfigPath(\"$HOME/.synapse\")\n\t}\n\n\tif err := viper.ReadInConfig(); err != nil {\n\t\tfmt.Println(\"Warning: No configuration file found. Proceeding with defaults\")\n\t}\n\n\tsecretFile, err := ioutil.ReadFile(viper.GetViper().ConfigFileUsed())\n\tif err != nil {\n\t\tfmt.Printf(\"error in reading config file: %v\\n\", err)\n\t}\n\n\tvar tempSecret tempSecretReader\n\tif err := json.Unmarshal(secretFile, &tempSecret); err != nil {\n\t\tfmt.Printf(\"error in umarshaling secrets: %v\\n\", err)\n\t}\n\n\tsynapseConfig.RepoSecrets = tempSecret.RepoSecrets\n\treturn nil\n}\n\n// ValidateCfg checks the validity of the config\nfunc ValidateCfg(cfg *SynapseConfig, logger lumber.Logger) error {\n\tif cfg.Lambdatest.SecretKey == \"\" {\n\t\treturn errors.New(\"error finding lambdatest secretkey in configuration file\")\n\t}\n\tif cfg.ContainerRegistry.Mode == \"\" {\n\t\treturn errors.New(\"error finding ContainerRegistry Mode in configuration file\")\n\t}\n\tif cfg.RepoSecrets == nil {\n\t\tlogger.Debugf(\"no RepoSecrets found in configuration file.\")\n\t\treturn nil\n\t}\n\treturn nil\n}\n"
  },
  {
    "path": "config/nucleusmodel.go",
    "content": "package config\n\nimport \"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n\n// Model definition for configuration\n\n// NucleusConfig is the application's configuration\ntype NucleusConfig struct {\n\tConfig          string\n\tPort            string\n\tPayloadAddress  string `json:\"payloadAddress\"`\n\tCollectStats    bool   `json:\"collectStats\"`\n\tConsecutiveRuns int    `json:\"consecutiveRuns\"`\n\tLogFile         string\n\tLogConfig       lumber.LoggingConfig\n\tCoverageMode    bool   `json:\"coverage\"`\n\tDiscoverMode    bool   `json:\"discover\"`\n\tExecuteMode     bool   `json:\"execute\"`\n\tFlakyMode       bool   `json:\"flaky\"`\n\tTaskID          string `json:\"taskID\" env:\"TASK_ID\"`\n\tBuildID         string `json:\"buildID\" env:\"BUILD_ID\"`\n\tLocators        string `json:\"locators\"`\n\tLocatorAddress  string `json:\"locatorAddress\"`\n\tEnv             string\n\tVerbose         bool\n\tAzure           Azure  `env:\"AZURE\"`\n\tLocalRunner     bool   `env:\"local\"`\n\tSynapseHost     string `env:\"synapsehost\"`\n\tSubModule       string `json:\"subModule\"`\n}\n\n// Azure providers the storage configuration.\ntype Azure struct {\n\tContainerName      string `env:\"CONTAINER_NAME\"`\n\tStorageAccountName string `env:\"STORAGE_ACCOUNT\"`\n\tStorageAccessKey   string `env:\"STORAGE_ACCESS_KEY\"`\n}\n"
  },
  {
    "path": "config/parse.go",
    "content": "package config\n\nimport (\n\t\"errors\"\n\t\"fmt\"\n\t\"reflect\"\n\n\t\"github.com/spf13/viper\"\n)\n\nconst tagPrefix = \"viper\"\n\n// populateNucleusConfig is used to parse config read through viper\nfunc populateNucleusConfig(config *NucleusConfig) (*NucleusConfig, error) {\n\terr := recursivelySet(reflect.ValueOf(config), \"\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn config, nil\n}\n\n// populateSynapseConfig is used to parse config read through viper\nfunc populateSynapseConfig(config *SynapseConfig) (*SynapseConfig, error) {\n\terr := recursivelySet(reflect.ValueOf(config), \"\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn config, nil\n}\n\n// recursivelySet is used to recursively set conf read from\n// files to golang structs. Since nested values are accessed using periods\n// we need to recursively parse the values\nfunc recursivelySet(val reflect.Value, prefix string) error {\n\tif val.Kind() != reflect.Ptr {\n\t\treturn errors.New(\"WTF\")\n\t}\n\n\t// dereference\n\tval = reflect.Indirect(val)\n\tif val.Kind() != reflect.Struct {\n\t\treturn errors.New(\"FML\")\n\t}\n\n\t// grab the type for this instance\n\tvType := reflect.TypeOf(val.Interface())\n\n\t// go through child fields\n\tfor i := 0; i < val.NumField(); i++ {\n\t\tthisField := val.Field(i)\n\t\tthisType := vType.Field(i)\n\t\ttags := getTags(thisType)\n\t\t// try to fetch value for each key using multiple tags\n\t\tfor _, tag := range tags {\n\t\t\tkey := prefix + tag\n\t\t\tswitch thisField.Kind() {\n\t\t\tcase reflect.Struct:\n\t\t\t\tif err := recursivelySet(thisField.Addr(), key+\".\"); err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\tcase reflect.Int:\n\t\t\t\tfallthrough\n\t\t\tcase reflect.Int32:\n\t\t\t\tfallthrough\n\t\t\tcase reflect.Int64:\n\t\t\t\t// you can only set with an int64 -> int\n\t\t\t\tconfigVal := int64(viper.GetInt(key))\n\t\t\t\t// skip the update if tag is not set in viper\n\t\t\t\tif viper.GetInt(key) == 0 && thisField.Int() != 0 {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\tthisField.SetInt(configVal)\n\t\t\tcase reflect.String:\n\t\t\t\t// skip the update if tag is not set in viper\n\t\t\t\tif viper.GetString(key) == \"\" && thisField.String() != \"\" {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\tthisField.SetString(viper.GetString(key))\n\t\t\tcase reflect.Bool:\n\t\t\t\t// skip the update if tag is not set in viper\n\t\t\t\tif !viper.GetBool(key) && thisField.Bool() {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\tthisField.SetBool(viper.GetBool(key))\n\t\t\tcase reflect.Map:\n\t\t\t\tcontinue\n\t\t\tdefault:\n\t\t\t\treturn fmt.Errorf(\"unexpected type detected ~ aborting: %s\", thisField.Kind())\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}\n\nfunc getTags(field reflect.StructField) []string {\n\t// check if maybe we have a special magic tag\n\ttag := field.Tag\n\tvalues := []string{}\n\tif tag != \"\" {\n\t\tfor _, prefix := range []string{tagPrefix, \"yaml\", \"json\", \"env\", \"mapstructure\"} {\n\t\t\tif v := tag.Get(prefix); v != \"\" {\n\t\t\t\tvalues = append(values, v)\n\t\t\t}\n\t\t}\n\t\treturn values\n\t}\n\n\treturn []string{field.Name}\n}\n"
  },
  {
    "path": "config/parse_test.go",
    "content": "package config\n\nimport (\n\t\"reflect\"\n\t\"testing\"\n\n\t\"github.com/spf13/viper\"\n\t\"github.com/stretchr/testify/assert\"\n)\n\nfunc TestSimpleValues(t *testing.T) {\n\tc := struct {\n\t\tSimple string `json:\"simple\"`\n\t}{}\n\n\tviper.SetDefault(\"simple\", \"i am a simple string\")\n\n\tassert.Nil(t, recursivelySet(reflect.ValueOf(&c), \"\"))\n\tassert.Equal(t, \"i am a simple string\", c.Simple)\n}\n\nfunc TestNestedValues(t *testing.T) {\n\tc := struct {\n\t\tSimple string `json:\"simple\"`\n\t\tNested struct {\n\t\t\tBoolVal   bool   `json:\"bool\"`\n\t\t\tStringVal string `json:\"string\"`\n\t\t\tNumberVal int    `json:\"number\"`\n\t\t} `json:\"nested\"`\n\t}{}\n\n\tviper.SetDefault(\"simple\", \"simple\")\n\tviper.SetDefault(\"nested.bool\", true)\n\tviper.SetDefault(\"nested.string\", \"i am a simple string\")\n\tviper.SetDefault(\"nested.number\", 4)\n\n\tassert.Nil(t, recursivelySet(reflect.ValueOf(&c), \"\"))\n\tassert.Equal(t, \"simple\", c.Simple)\n\tassert.Equal(t, 4, c.Nested.NumberVal)\n\tassert.Equal(t, \"i am a simple string\", c.Nested.StringVal)\n\tassert.Equal(t, true, c.Nested.BoolVal)\n}\n"
  },
  {
    "path": "config/synapsemodel.go",
    "content": "package config\n\nimport \"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n\n// Model definition for configuration\n\n// SynapseConfig the application's configuration\ntype SynapseConfig struct {\n\tName              string\n\tConfig            string\n\tLogFile           string\n\tLogConfig         lumber.LoggingConfig\n\tEnv               string\n\tVerbose           bool\n\tLambdatest        LambdatestConfig\n\tGit               GitConfig\n\tContainerRegistry ContainerRegistryConfig\n\tRepoSecrets       map[string]map[string]string\n}\n\n// LambdatestConfig contains credentials for lambdatest\ntype LambdatestConfig struct {\n\tSecretKey string\n}\n\n// GitConfig contains git token\ntype GitConfig struct {\n\tToken     string\n\tTokenType string\n}\n\n// PullPolicyType defines when to pull docker image\ntype PullPolicyType string\n\n// ModeType define type of container repo\ntype ModeType string\n\n// ContainerRegistryConfig contains repo configuration if private repo is used\ntype ContainerRegistryConfig struct {\n\tPullPolicy PullPolicyType\n\tMode       ModeType\n\tUsername   string\n\tPassword   string\n}\n\n// defines constant for docker config\nconst (\n\tPullAlways  PullPolicyType = \"always\"\n\tPullNever   PullPolicyType = \"never\"\n\tPrivateMode ModeType       = \"private\"\n\tPublicMode  ModeType       = \"public\"\n)\n"
  },
  {
    "path": "docker-compose.yml",
    "content": "version: \"3.9\" \nservices:\n  synapse:\n    image: lambdatest/synapse:latest\n    stop_signal: SIGINT\n    restart: on-failure\n    networks:\n      - test-at-scale\n    hostname: synapse\n    container_name: synapse\n    volumes:\n      # synapse will needs socket access to create containers on host\n      - \"/var/run/docker.sock:/var/run/docker.sock\"\n      - \"/tmp/synapse:/tmp/synapse\"\n      - \".synapse.json:/home/synapse/.synapse.json\"\n      - \"/etc/machine-id:/etc/machine-id\"\n      - \"./logs/synapse:/var/log/synapse\"\n\nnetworks:\n  test-at-scale:\n    external: false\n    name: test-at-scale\n"
  },
  {
    "path": "go.mod",
    "content": "module github.com/LambdaTest/test-at-scale\n\ngo 1.17\n\nrequire (\n\tgithub.com/Azure/azure-sdk-for-go/sdk/azcore v0.21.1\n\tgithub.com/Azure/azure-sdk-for-go/sdk/storage/azblob v0.3.0\n\tgithub.com/bmatcuk/doublestar/v4 v4.0.2\n\tgithub.com/cenkalti/backoff/v4 v4.1.3\n\tgithub.com/denisbrodbeck/machineid v1.0.1\n\tgithub.com/docker/docker v20.10.12+incompatible\n\tgithub.com/docker/go-units v0.4.0\n\tgithub.com/gin-gonic/gin v1.7.7\n\tgithub.com/go-playground/locales v0.14.0\n\tgithub.com/go-playground/universal-translator v0.18.0\n\tgithub.com/go-playground/validator/v10 v10.10.0\n\tgithub.com/google/uuid v1.2.0\n\tgithub.com/gorilla/websocket v1.4.2\n\tgithub.com/joho/godotenv v1.4.0\n\tgithub.com/mholt/archiver/v3 v3.5.1\n\tgithub.com/robfig/cron/v3 v3.0.1\n\tgithub.com/shirou/gopsutil/v3 v3.21.1\n\tgithub.com/sirupsen/logrus v1.8.1\n\tgithub.com/spf13/cobra v1.3.0\n\tgithub.com/spf13/viper v1.10.1\n\tgithub.com/stretchr/testify v1.7.0\n\tgo.uber.org/zap v1.20.0\n\tgolang.org/x/sync v0.0.0-20210220032951-036812b2e83c\n\tgopkg.in/natefinch/lumberjack.v2 v2.0.0\n\tgopkg.in/yaml.v3 v3.0.0\n)\n\nrequire (\n\tgithub.com/Azure/azure-sdk-for-go/sdk/internal v0.8.3 // indirect\n\tgithub.com/Microsoft/go-winio v0.4.17 // indirect\n\tgithub.com/StackExchange/wmi v0.0.0-20190523213315-cbe66965904d // indirect\n\tgithub.com/andybalholm/brotli v1.0.1 // indirect\n\tgithub.com/containerd/containerd v1.5.10 // indirect\n\tgithub.com/davecgh/go-spew v1.1.1 // indirect\n\tgithub.com/docker/distribution v2.8.0+incompatible // indirect\n\tgithub.com/docker/go-connections v0.4.0 // indirect\n\tgithub.com/dsnet/compress v0.0.2-0.20210315054119-f66993602bf5 // indirect\n\tgithub.com/fsnotify/fsnotify v1.5.1 // indirect\n\tgithub.com/gin-contrib/sse v0.1.0 // indirect\n\tgithub.com/go-ole/go-ole v1.2.6 // indirect\n\tgithub.com/gogo/protobuf v1.3.2 // indirect\n\tgithub.com/golang/protobuf v1.5.2 // indirect\n\tgithub.com/golang/snappy v0.0.3 // indirect\n\tgithub.com/gorilla/mux v1.8.0 // indirect\n\tgithub.com/hashicorp/hcl v1.0.0 // indirect\n\tgithub.com/inconshreveable/mousetrap v1.0.0 // indirect\n\tgithub.com/json-iterator/go v1.1.12 // indirect\n\tgithub.com/klauspost/compress v1.11.13 // indirect\n\tgithub.com/klauspost/pgzip v1.2.5 // indirect\n\tgithub.com/leodido/go-urn v1.2.1 // indirect\n\tgithub.com/magiconair/properties v1.8.5 // indirect\n\tgithub.com/mattn/go-isatty v0.0.14 // indirect\n\tgithub.com/mitchellh/mapstructure v1.4.3 // indirect\n\tgithub.com/moby/term v0.0.0-20210619224110-3f7ff695adc6 // indirect\n\tgithub.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect\n\tgithub.com/modern-go/reflect2 v1.0.2 // indirect\n\tgithub.com/morikuni/aec v1.0.0 // indirect\n\tgithub.com/nwaples/rardecode v1.1.0 // indirect\n\tgithub.com/opencontainers/go-digest v1.0.0 // indirect\n\tgithub.com/opencontainers/image-spec v1.0.2 // indirect\n\tgithub.com/pelletier/go-toml v1.9.4 // indirect\n\tgithub.com/pierrec/lz4/v4 v4.1.2 // indirect\n\tgithub.com/pkg/errors v0.9.1 // indirect\n\tgithub.com/pmezard/go-difflib v1.0.0 // indirect\n\tgithub.com/spf13/afero v1.6.0 // indirect\n\tgithub.com/spf13/cast v1.4.1 // indirect\n\tgithub.com/spf13/jwalterweatherman v1.1.0 // indirect\n\tgithub.com/spf13/pflag v1.0.5 // indirect\n\tgithub.com/stretchr/objx v0.2.0 // indirect\n\tgithub.com/subosito/gotenv v1.2.0 // indirect\n\tgithub.com/ugorji/go/codec v1.1.7 // indirect\n\tgithub.com/ulikunitz/xz v0.5.9 // indirect\n\tgithub.com/xi2/xz v0.0.0-20171230120015-48954b6210f8 // indirect\n\tgo.uber.org/atomic v1.7.0 // indirect\n\tgo.uber.org/multierr v1.6.0 // indirect\n\tgolang.org/x/crypto v0.0.0-20210817164053-32db794688a5 // indirect\n\tgolang.org/x/net v0.0.0-20210813160813-60bc85c4be6d // indirect\n\tgolang.org/x/sys v0.0.0-20220111092808-5a964db01320 // indirect\n\tgolang.org/x/text v0.3.7 // indirect\n\tgoogle.golang.org/genproto v0.0.0-20211208223120-3a66f561d7aa // indirect\n\tgoogle.golang.org/grpc v1.43.0 // indirect\n\tgoogle.golang.org/protobuf v1.27.1 // indirect\n\tgopkg.in/ini.v1 v1.66.2 // indirect\n\tgopkg.in/yaml.v2 v2.4.0 // indirect\n\tgotest.tools/v3 v3.1.0 // indirect\n)\n"
  },
  {
    "path": "go.sum",
    "content": "bazil.org/fuse v0.0.0-20160811212531-371fbbdaa898/go.mod h1:Xbm+BRKSBEpa4q4hTSxohYNQpsxXPbPry4JJWOB3LB8=\ncloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=\ncloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=\ncloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU=\ncloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU=\ncloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY=\ncloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc=\ncloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0=\ncloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To=\ncloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4=\ncloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M=\ncloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc=\ncloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk=\ncloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs=\ncloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc=\ncloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY=\ncloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI=\ncloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk=\ncloud.google.com/go v0.78.0/go.mod h1:QjdrLG0uq+YwhjoVOLsS1t7TW8fs36kLs4XO5R5ECHg=\ncloud.google.com/go v0.79.0/go.mod h1:3bzgcEeQlzbuEAYu4mrWhKqWjmpprinYgKJLgKHnbb8=\ncloud.google.com/go v0.81.0/go.mod h1:mk/AM35KwGk/Nm2YSeZbxXdrNK3KZOYHmLkOqC2V6E0=\ncloud.google.com/go v0.83.0/go.mod h1:Z7MJUsANfY0pYPdw0lbnivPx4/vhy/e2FEkSkF7vAVY=\ncloud.google.com/go v0.84.0/go.mod h1:RazrYuxIK6Kb7YrzzhPoLmCVzl7Sup4NrbKPg8KHSUM=\ncloud.google.com/go v0.87.0/go.mod h1:TpDYlFy7vuLzZMMZ+B6iRiELaY7z/gJPaqbMx6mlWcY=\ncloud.google.com/go v0.90.0/go.mod h1:kRX0mNRHe0e2rC6oNakvwQqzyDmg57xJ+SZU1eT2aDQ=\ncloud.google.com/go v0.93.3/go.mod h1:8utlLll2EF5XMAV15woO4lSbWQlk8rer9aLOfLh7+YI=\ncloud.google.com/go v0.94.1/go.mod h1:qAlAugsXlC+JWO+Bke5vCtc9ONxjQT3drlTTnAplMW4=\ncloud.google.com/go v0.97.0/go.mod h1:GF7l59pYBVlXQIBLx3a761cZ41F9bBH3JUlihCt2Udc=\ncloud.google.com/go v0.98.0/go.mod h1:ua6Ush4NALrHk5QXDWnjvZHN93OuF0HfuEPq9I1X0cM=\ncloud.google.com/go v0.99.0/go.mod h1:w0Xx2nLzqWJPuozYQX+hFfCSI8WioryfRDzkoI/Y2ZA=\ncloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o=\ncloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE=\ncloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc=\ncloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg=\ncloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc=\ncloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ=\ncloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE=\ncloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk=\ncloud.google.com/go/firestore v1.6.1/go.mod h1:asNXNOzBdyVQmEU+ggO8UPodTkEVFW5Qx+rwHnAz+EY=\ncloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I=\ncloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw=\ncloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA=\ncloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU=\ncloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw=\ncloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos=\ncloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk=\ncloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs=\ncloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0=\ndmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=\ngithub.com/Azure/azure-sdk-for-go v16.2.1+incompatible h1:KnPIugL51v3N3WwvaSmZbxukD1WuWXOiE9fRdu32f2I=\ngithub.com/Azure/azure-sdk-for-go v16.2.1+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=\ngithub.com/Azure/azure-sdk-for-go/sdk/azcore v0.21.1 h1:qoVeMsc9/fh/yhxVaA0obYjVH/oI/ihrOoMwsLS9KSA=\ngithub.com/Azure/azure-sdk-for-go/sdk/azcore v0.21.1/go.mod h1:fBF9PQNqB8scdgpZ3ufzaLntG0AG7C1WjPMsiFOmfHM=\ngithub.com/Azure/azure-sdk-for-go/sdk/internal v0.8.3 h1:E+m3SkZCN0Bf5q7YdTs5lSm2CYY3CK4spn5OmUIiQtk=\ngithub.com/Azure/azure-sdk-for-go/sdk/internal v0.8.3/go.mod h1:KLF4gFr6DcKFZwSuH8w8yEK6DpFl3LP5rhdvAb7Yz5I=\ngithub.com/Azure/azure-sdk-for-go/sdk/storage/azblob v0.3.0 h1:Px2UA+2RvSSvv+RvJNuUB6n7rs5Wsel4dXLe90Um2n4=\ngithub.com/Azure/azure-sdk-for-go/sdk/storage/azblob v0.3.0/go.mod h1:tPaiy8S5bQ+S5sOiDlINkp7+Ef339+Nz5L5XO+cnOHo=\ngithub.com/Azure/go-ansiterm v0.0.0-20170929234023-d6e3b3328b78/go.mod h1:LmzpDX56iTiv29bbRTIsUNlaFfuhWRQBWjQdVyAevI8=\ngithub.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 h1:UQHMgLO+TxOElx5B5HZ4hJQsoJ/PvUvKRhJHDQXO8P8=\ngithub.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E=\ngithub.com/Azure/go-autorest v10.8.1+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24=\ngithub.com/Azure/go-autorest v14.2.0+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24=\ngithub.com/Azure/go-autorest/autorest v0.11.1/go.mod h1:JFgpikqFJ/MleTTxwepExTKnFUKKszPS8UavbQYUMuw=\ngithub.com/Azure/go-autorest/autorest/adal v0.9.0/go.mod h1:/c022QCutn2P7uY+/oQWWNcK9YU+MH96NgK+jErpbcg=\ngithub.com/Azure/go-autorest/autorest/adal v0.9.5/go.mod h1:B7KF7jKIeC9Mct5spmyCB/A8CG/sEz1vwIRGv/bbw7A=\ngithub.com/Azure/go-autorest/autorest/date v0.3.0/go.mod h1:BI0uouVdmngYNUzGWeSYnokU+TrmwEsOqdt8Y6sso74=\ngithub.com/Azure/go-autorest/autorest/mocks v0.4.0/go.mod h1:LTp+uSrOhSkaKrUy935gNZuuIPPVsHlr9DSOxSayd+k=\ngithub.com/Azure/go-autorest/autorest/mocks v0.4.1/go.mod h1:LTp+uSrOhSkaKrUy935gNZuuIPPVsHlr9DSOxSayd+k=\ngithub.com/Azure/go-autorest/logger v0.2.0/go.mod h1:T9E3cAhj2VqvPOtCYAvby9aBXkZmbF5NWuPV8+WeEW8=\ngithub.com/Azure/go-autorest/tracing v0.6.0/go.mod h1:+vhtPC754Xsa23ID7GlGsrdKBpUA79WCAKPPZVC2DeU=\ngithub.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ=\ngithub.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=\ngithub.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=\ngithub.com/DataDog/datadog-go v3.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ=\ngithub.com/Microsoft/go-winio v0.4.11/go.mod h1:VhR8bwka0BXejwEJY73c50VrPtXAaKcyvVC4A4RozmA=\ngithub.com/Microsoft/go-winio v0.4.14/go.mod h1:qXqCSQ3Xa7+6tgxaGTIe4Kpcdsi+P8jBhyzoq1bpyYA=\ngithub.com/Microsoft/go-winio v0.4.15-0.20190919025122-fc70bd9a86b5/go.mod h1:tTuCMEN+UleMWgg9dVx4Hu52b1bJo+59jBh3ajtinzw=\ngithub.com/Microsoft/go-winio v0.4.16-0.20201130162521-d1ffc52c7331/go.mod h1:XB6nPKklQyQ7GC9LdcBEcBl8PF76WugXOPRXwdLnMv0=\ngithub.com/Microsoft/go-winio v0.4.16/go.mod h1:XB6nPKklQyQ7GC9LdcBEcBl8PF76WugXOPRXwdLnMv0=\ngithub.com/Microsoft/go-winio v0.4.17-0.20210211115548-6eac466e5fa3/go.mod h1:JPGBdM1cNvN/6ISo+n8V5iA4v8pBzdOpzfwIujj1a84=\ngithub.com/Microsoft/go-winio v0.4.17-0.20210324224401-5516f17a5958/go.mod h1:JPGBdM1cNvN/6ISo+n8V5iA4v8pBzdOpzfwIujj1a84=\ngithub.com/Microsoft/go-winio v0.4.17 h1:iT12IBVClFevaf8PuVyi3UmZOVh4OqnaLxDTW2O6j3w=\ngithub.com/Microsoft/go-winio v0.4.17/go.mod h1:JPGBdM1cNvN/6ISo+n8V5iA4v8pBzdOpzfwIujj1a84=\ngithub.com/Microsoft/hcsshim v0.8.6/go.mod h1:Op3hHsoHPAvb6lceZHDtd9OkTew38wNoXnJs8iY7rUg=\ngithub.com/Microsoft/hcsshim v0.8.7-0.20190325164909-8abdbb8205e4/go.mod h1:Op3hHsoHPAvb6lceZHDtd9OkTew38wNoXnJs8iY7rUg=\ngithub.com/Microsoft/hcsshim v0.8.7/go.mod h1:OHd7sQqRFrYd3RmSgbgji+ctCwkbq2wbEYNSzOYtcBQ=\ngithub.com/Microsoft/hcsshim v0.8.9/go.mod h1:5692vkUqntj1idxauYlpoINNKeqCiG6Sg38RRsjT5y8=\ngithub.com/Microsoft/hcsshim v0.8.14/go.mod h1:NtVKoYxQuTLx6gEq0L96c9Ju4JbRJ4nY2ow3VK6a9Lg=\ngithub.com/Microsoft/hcsshim v0.8.15/go.mod h1:x38A4YbHbdxJtc0sF6oIz+RG0npwSCAvn69iY6URG00=\ngithub.com/Microsoft/hcsshim v0.8.16/go.mod h1:o5/SZqmR7x9JNKsW3pu+nqHm0MF8vbA+VxGOoXdC600=\ngithub.com/Microsoft/hcsshim v0.8.23/go.mod h1:4zegtUJth7lAvFyc6cH2gGQ5B3OFQim01nnU2M8jKDg=\ngithub.com/Microsoft/hcsshim/test v0.0.0-20201218223536-d3e5debf77da/go.mod h1:5hlzMzRKMLyo42nCZ9oml8AdTlq/0cvIaBv6tK1RehU=\ngithub.com/Microsoft/hcsshim/test v0.0.0-20210227013316-43a75bb4edd3/go.mod h1:mw7qgWloBUl75W/gVH3cQszUg1+gUITj7D6NY7ywVnY=\ngithub.com/NYTimes/gziphandler v0.0.0-20170623195520-56545f4a5d46/go.mod h1:3wb06e3pkSAbeQ52E9H9iFoQsEEwGN64994WTCIhntQ=\ngithub.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=\ngithub.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0=\ngithub.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE=\ngithub.com/Shopify/logrus-bugsnag v0.0.0-20171204204709-577dee27f20d/go.mod h1:HI8ITrYtUY+O+ZhtlqUnD8+KwNPOyugEhfP9fdUIaEQ=\ngithub.com/StackExchange/wmi v0.0.0-20190523213315-cbe66965904d h1:G0m3OIz70MZUWq3EgK3CesDbo8upS2Vm9/P3FtgI+Jk=\ngithub.com/StackExchange/wmi v0.0.0-20190523213315-cbe66965904d/go.mod h1:3eOhrUMpNV+6aFIbp5/iudMxNCF27Vw2OZgy4xEx0Fg=\ngithub.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=\ngithub.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=\ngithub.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=\ngithub.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=\ngithub.com/alexflint/go-filemutex v0.0.0-20171022225611-72bdc8eae2ae/go.mod h1:CgnQgUtFrFz9mxFNtED3jI5tLDjKlOM+oUF/sTk6ps0=\ngithub.com/andybalholm/brotli v1.0.1 h1:KqhlKozYbRtJvsPrrEeXcO+N2l6NYT5A2QAFmSULpEc=\ngithub.com/andybalholm/brotli v1.0.1/go.mod h1:loMXtMfwqflxFJPmdbJO0a3KNoPuLBgiu3qAvBg8x/Y=\ngithub.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=\ngithub.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o=\ngithub.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8=\ngithub.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY=\ngithub.com/armon/go-metrics v0.3.10/go.mod h1:4O98XIr/9W0sxpJ8UaYkvjk10Iff7SnFrb4QAOwNTFc=\ngithub.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8=\ngithub.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8=\ngithub.com/asaskevich/govalidator v0.0.0-20190424111038-f61b66f89f4a/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY=\ngithub.com/aws/aws-sdk-go v1.15.11/go.mod h1:mFuSZ37Z9YOHbQEwBWztmVzqXrEkub65tZoCYDt7FT0=\ngithub.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8=\ngithub.com/benbjohnson/clock v1.1.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA=\ngithub.com/beorn7/perks v0.0.0-20160804104726-4c0e84591b9a/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=\ngithub.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=\ngithub.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8=\ngithub.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=\ngithub.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs=\ngithub.com/bitly/go-simplejson v0.5.0/go.mod h1:cXHtHw4XUPsvGaxgjIAn8PhEWG9NfngEKAMDJEczWVA=\ngithub.com/bits-and-blooms/bitset v1.2.0/go.mod h1:gIdJ4wp64HaoK2YrL1Q5/N7Y16edYb8uY+O0FJTyyDA=\ngithub.com/blang/semver v3.1.0+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk=\ngithub.com/blang/semver v3.5.1+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk=\ngithub.com/bmatcuk/doublestar/v4 v4.0.2 h1:X0krlUVAVmtr2cRoTqR8aDMrDqnB36ht8wpWTiQ3jsA=\ngithub.com/bmatcuk/doublestar/v4 v4.0.2/go.mod h1:xBQ8jztBU6kakFMg+8WGxn0c6z1fTSPVIjEY1Wr7jzc=\ngithub.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869/go.mod h1:Ekp36dRnpXw/yCqJaO+ZrUyxD+3VXMFFr56k5XYrpB4=\ngithub.com/bshuster-repo/logrus-logstash-hook v0.4.1/go.mod h1:zsTqEiSzDgAa/8GZR7E1qaXrhYNDKBYy5/dWPTIflbk=\ngithub.com/buger/jsonparser v0.0.0-20180808090653-f4dd9f5a6b44/go.mod h1:bbYlZJ7hK1yFx9hf58LP0zeX7UjIGs20ufpu3evjr+s=\ngithub.com/bugsnag/bugsnag-go v0.0.0-20141110184014-b1d153021fcd/go.mod h1:2oa8nejYd4cQ/b0hMIopN0lCRxU0bueqREvZLWFrtK8=\ngithub.com/bugsnag/osext v0.0.0-20130617224835-0dd3f918b21b/go.mod h1:obH5gd0BsqsP2LwDJ9aOkm/6J86V6lyAXCoQWGw3K50=\ngithub.com/bugsnag/panicwrap v0.0.0-20151223152923-e2c28503fcd0/go.mod h1:D/8v3kj0zr8ZAKg1AQ6crr+5VwKN5eIywRkfhyM/+dE=\ngithub.com/cenkalti/backoff/v4 v4.1.1/go.mod h1:scbssz8iZGpm3xbr14ovlUdkxfGXNInqkPWOWmG2CLw=\ngithub.com/cenkalti/backoff/v4 v4.1.3 h1:cFAlzYUlVYDysBEH2T5hyJZMh3+5+WCBvSnK6Q8UtC4=\ngithub.com/cenkalti/backoff/v4 v4.1.3/go.mod h1:scbssz8iZGpm3xbr14ovlUdkxfGXNInqkPWOWmG2CLw=\ngithub.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=\ngithub.com/census-instrumentation/opencensus-proto v0.3.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=\ngithub.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=\ngithub.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=\ngithub.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=\ngithub.com/checkpoint-restore/go-criu/v4 v4.1.0/go.mod h1:xUQBLp4RLc5zJtWY++yjOoMoB5lihDt7fai+75m+rGw=\ngithub.com/checkpoint-restore/go-criu/v5 v5.0.0/go.mod h1:cfwC0EG7HMUenopBsUf9d89JlCLQIfgVcNsNN0t6T2M=\ngithub.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=\ngithub.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI=\ngithub.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=\ngithub.com/cilium/ebpf v0.0.0-20200110133405-4032b1d8aae3/go.mod h1:MA5e5Lr8slmEg9bt0VpxxWqJlO4iwu3FBdHUzV7wQVg=\ngithub.com/cilium/ebpf v0.0.0-20200702112145-1c8d4c9ef775/go.mod h1:7cR51M8ViRLIdUjrmSXlK9pkrsDlLHbO8jiB8X8JnOc=\ngithub.com/cilium/ebpf v0.2.0/go.mod h1:To2CFviqOWL/M0gIMsvSMlqe7em/l1ALkX1PyjrX2Qs=\ngithub.com/cilium/ebpf v0.4.0/go.mod h1:4tRaxcgiL706VnOzHOdBlY8IEAIdxINsQBcU4xJJXRs=\ngithub.com/cilium/ebpf v0.6.2/go.mod h1:4tRaxcgiL706VnOzHOdBlY8IEAIdxINsQBcU4xJJXRs=\ngithub.com/circonus-labs/circonus-gometrics v2.3.1+incompatible/go.mod h1:nmEj6Dob7S7YxXgwXpfOuvO54S+tGdZdw9fuRZt25Ag=\ngithub.com/circonus-labs/circonusllhist v0.1.3/go.mod h1:kMXHVDlOchFAehlya5ePtbp5jckzBHf4XRpQvBOLI+I=\ngithub.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=\ngithub.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=\ngithub.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=\ngithub.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=\ngithub.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI=\ngithub.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=\ngithub.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=\ngithub.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=\ngithub.com/cncf/xds/go v0.0.0-20211001041855-01bcc9b48dfe/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=\ngithub.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=\ngithub.com/cncf/xds/go v0.0.0-20211130200136-a8f946100490/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=\ngithub.com/cockroachdb/datadriven v0.0.0-20190809214429-80d97fb3cbaa/go.mod h1:zn76sxSg3SzpJ0PPJaLDCu+Bu0Lg3sKTORVIj19EIF8=\ngithub.com/containerd/aufs v0.0.0-20200908144142-dab0cbea06f4/go.mod h1:nukgQABAEopAHvB6j7cnP5zJ+/3aVcE7hCYqvIwAHyE=\ngithub.com/containerd/aufs v0.0.0-20201003224125-76a6863f2989/go.mod h1:AkGGQs9NM2vtYHaUen+NljV0/baGCAPELGm2q9ZXpWU=\ngithub.com/containerd/aufs v0.0.0-20210316121734-20793ff83c97/go.mod h1:kL5kd6KM5TzQjR79jljyi4olc1Vrx6XBlcyj3gNv2PU=\ngithub.com/containerd/aufs v1.0.0/go.mod h1:kL5kd6KM5TzQjR79jljyi4olc1Vrx6XBlcyj3gNv2PU=\ngithub.com/containerd/btrfs v0.0.0-20201111183144-404b9149801e/go.mod h1:jg2QkJcsabfHugurUvvPhS3E08Oxiuh5W/g1ybB4e0E=\ngithub.com/containerd/btrfs v0.0.0-20210316141732-918d888fb676/go.mod h1:zMcX3qkXTAi9GI50+0HOeuV8LU2ryCE/V2vG/ZBiTss=\ngithub.com/containerd/btrfs v1.0.0/go.mod h1:zMcX3qkXTAi9GI50+0HOeuV8LU2ryCE/V2vG/ZBiTss=\ngithub.com/containerd/cgroups v0.0.0-20190717030353-c4b9ac5c7601/go.mod h1:X9rLEHIqSf/wfK8NsPqxJmeZgW4pcfzdXITDrUSJ6uI=\ngithub.com/containerd/cgroups v0.0.0-20190919134610-bf292b21730f/go.mod h1:OApqhQ4XNSNC13gXIwDjhOQxjWa/NxkwZXJ1EvqT0ko=\ngithub.com/containerd/cgroups v0.0.0-20200531161412-0dbf7f05ba59/go.mod h1:pA0z1pT8KYB3TCXK/ocprsh7MAkoW8bZVzPdih9snmM=\ngithub.com/containerd/cgroups v0.0.0-20200710171044-318312a37340/go.mod h1:s5q4SojHctfxANBDvMeIaIovkq29IP48TKAxnhYRxvo=\ngithub.com/containerd/cgroups v0.0.0-20200824123100-0b889c03f102/go.mod h1:s5q4SojHctfxANBDvMeIaIovkq29IP48TKAxnhYRxvo=\ngithub.com/containerd/cgroups v0.0.0-20210114181951-8a68de567b68/go.mod h1:ZJeTFisyysqgcCdecO57Dj79RfL0LNeGiFUqLYQRYLE=\ngithub.com/containerd/cgroups v1.0.1/go.mod h1:0SJrPIenamHDcZhEcJMNBB85rHcUsw4f25ZfBiPYRkU=\ngithub.com/containerd/console v0.0.0-20180822173158-c12b1e7919c1/go.mod h1:Tj/on1eG8kiEhd0+fhSDzsPAFESxzBBvdyEgyryXffw=\ngithub.com/containerd/console v0.0.0-20181022165439-0650fd9eeb50/go.mod h1:Tj/on1eG8kiEhd0+fhSDzsPAFESxzBBvdyEgyryXffw=\ngithub.com/containerd/console v0.0.0-20191206165004-02ecf6a7291e/go.mod h1:8Pf4gM6VEbTNRIT26AyyU7hxdQU3MvAvxVI0sc00XBE=\ngithub.com/containerd/console v1.0.1/go.mod h1:XUsP6YE/mKtz6bxc+I8UiKKTP04qjQL4qcS3XoQ5xkw=\ngithub.com/containerd/console v1.0.2/go.mod h1:ytZPjGgY2oeTkAONYafi2kSj0aYggsf8acV1PGKCbzQ=\ngithub.com/containerd/containerd v1.2.10/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA=\ngithub.com/containerd/containerd v1.3.0-beta.2.0.20190828155532-0293cbd26c69/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA=\ngithub.com/containerd/containerd v1.3.0/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA=\ngithub.com/containerd/containerd v1.3.1-0.20191213020239-082f7e3aed57/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA=\ngithub.com/containerd/containerd v1.3.2/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA=\ngithub.com/containerd/containerd v1.4.0-beta.2.0.20200729163537-40b22ef07410/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA=\ngithub.com/containerd/containerd v1.4.1/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA=\ngithub.com/containerd/containerd v1.4.3/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA=\ngithub.com/containerd/containerd v1.4.9/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA=\ngithub.com/containerd/containerd v1.5.0-beta.1/go.mod h1:5HfvG1V2FsKesEGQ17k5/T7V960Tmcumvqn8Mc+pCYQ=\ngithub.com/containerd/containerd v1.5.0-beta.3/go.mod h1:/wr9AVtEM7x9c+n0+stptlo/uBBoBORwEx6ardVcmKU=\ngithub.com/containerd/containerd v1.5.0-beta.4/go.mod h1:GmdgZd2zA2GYIBZ0w09ZvgqEq8EfBp/m3lcVZIvPHhI=\ngithub.com/containerd/containerd v1.5.0-rc.0/go.mod h1:V/IXoMqNGgBlabz3tHD2TWDoTJseu1FGOKuoA4nNb2s=\ngithub.com/containerd/containerd v1.5.10 h1:3cQ2uRVCkJVcx5VombsE7105Gl9Wrl7ORAO3+4+ogf4=\ngithub.com/containerd/containerd v1.5.10/go.mod h1:fvQqCfadDGga5HZyn3j4+dx56qj2I9YwBrlSdalvJYQ=\ngithub.com/containerd/continuity v0.0.0-20190426062206-aaeac12a7ffc/go.mod h1:GL3xCUCBDV3CZiTSEKksMWbLE66hEyuu9qyDOOqM47Y=\ngithub.com/containerd/continuity v0.0.0-20190815185530-f2a389ac0a02/go.mod h1:GL3xCUCBDV3CZiTSEKksMWbLE66hEyuu9qyDOOqM47Y=\ngithub.com/containerd/continuity v0.0.0-20191127005431-f65d91d395eb/go.mod h1:GL3xCUCBDV3CZiTSEKksMWbLE66hEyuu9qyDOOqM47Y=\ngithub.com/containerd/continuity v0.0.0-20200710164510-efbc4488d8fe/go.mod h1:cECdGN1O8G9bgKTlLhuPJimka6Xb/Gg7vYzCTNVxhvo=\ngithub.com/containerd/continuity v0.0.0-20201208142359-180525291bb7/go.mod h1:kR3BEg7bDFaEddKm54WSmrol1fKWDU1nKYkgrcgZT7Y=\ngithub.com/containerd/continuity v0.0.0-20210208174643-50096c924a4e/go.mod h1:EXlVlkqNba9rJe3j7w3Xa924itAMLgZH4UD/Q4PExuQ=\ngithub.com/containerd/continuity v0.1.0/go.mod h1:ICJu0PwR54nI0yPEnJ6jcS+J7CZAUXrLh8lPo2knzsM=\ngithub.com/containerd/fifo v0.0.0-20180307165137-3d5202aec260/go.mod h1:ODA38xgv3Kuk8dQz2ZQXpnv/UZZUHUCL7pnLehbXgQI=\ngithub.com/containerd/fifo v0.0.0-20190226154929-a9fb20d87448/go.mod h1:ODA38xgv3Kuk8dQz2ZQXpnv/UZZUHUCL7pnLehbXgQI=\ngithub.com/containerd/fifo v0.0.0-20200410184934-f15a3290365b/go.mod h1:jPQ2IAeZRCYxpS/Cm1495vGFww6ecHmMk1YJH2Q5ln0=\ngithub.com/containerd/fifo v0.0.0-20201026212402-0724c46b320c/go.mod h1:jPQ2IAeZRCYxpS/Cm1495vGFww6ecHmMk1YJH2Q5ln0=\ngithub.com/containerd/fifo v0.0.0-20210316144830-115abcc95a1d/go.mod h1:ocF/ME1SX5b1AOlWi9r677YJmCPSwwWnQ9O123vzpE4=\ngithub.com/containerd/fifo v1.0.0/go.mod h1:ocF/ME1SX5b1AOlWi9r677YJmCPSwwWnQ9O123vzpE4=\ngithub.com/containerd/go-cni v1.0.1/go.mod h1:+vUpYxKvAF72G9i1WoDOiPGRtQpqsNW/ZHtSlv++smU=\ngithub.com/containerd/go-cni v1.0.2/go.mod h1:nrNABBHzu0ZwCug9Ije8hL2xBCYh/pjfMb1aZGrrohk=\ngithub.com/containerd/go-runc v0.0.0-20180907222934-5a6d9f37cfa3/go.mod h1:IV7qH3hrUgRmyYrtgEeGWJfWbgcHL9CSRruz2Vqcph0=\ngithub.com/containerd/go-runc v0.0.0-20190911050354-e029b79d8cda/go.mod h1:IV7qH3hrUgRmyYrtgEeGWJfWbgcHL9CSRruz2Vqcph0=\ngithub.com/containerd/go-runc v0.0.0-20200220073739-7016d3ce2328/go.mod h1:PpyHrqVs8FTi9vpyHwPwiNEGaACDxT/N/pLcvMSRA9g=\ngithub.com/containerd/go-runc v0.0.0-20201020171139-16b287bc67d0/go.mod h1:cNU0ZbCgCQVZK4lgG3P+9tn9/PaJNmoDXPpoJhDR+Ok=\ngithub.com/containerd/go-runc v1.0.0/go.mod h1:cNU0ZbCgCQVZK4lgG3P+9tn9/PaJNmoDXPpoJhDR+Ok=\ngithub.com/containerd/imgcrypt v1.0.1/go.mod h1:mdd8cEPW7TPgNG4FpuP3sGBiQ7Yi/zak9TYCG3juvb0=\ngithub.com/containerd/imgcrypt v1.0.4-0.20210301171431-0ae5c75f59ba/go.mod h1:6TNsg0ctmizkrOgXRNQjAPFWpMYRWuiB6dSF4Pfa5SA=\ngithub.com/containerd/imgcrypt v1.1.1-0.20210312161619-7ed62a527887/go.mod h1:5AZJNI6sLHJljKuI9IHnw1pWqo/F0nGDOuR9zgTs7ow=\ngithub.com/containerd/imgcrypt v1.1.1/go.mod h1:xpLnwiQmEUJPvQoAapeb2SNCxz7Xr6PJrXQb0Dpc4ms=\ngithub.com/containerd/nri v0.0.0-20201007170849-eb1350a75164/go.mod h1:+2wGSDGFYfE5+So4M5syatU0N0f0LbWpuqyMi4/BE8c=\ngithub.com/containerd/nri v0.0.0-20210316161719-dbaa18c31c14/go.mod h1:lmxnXF6oMkbqs39FiCt1s0R2HSMhcLel9vNL3m4AaeY=\ngithub.com/containerd/nri v0.1.0/go.mod h1:lmxnXF6oMkbqs39FiCt1s0R2HSMhcLel9vNL3m4AaeY=\ngithub.com/containerd/ttrpc v0.0.0-20190828154514-0e0f228740de/go.mod h1:PvCDdDGpgqzQIzDW1TphrGLssLDZp2GuS+X5DkEJB8o=\ngithub.com/containerd/ttrpc v0.0.0-20190828172938-92c8520ef9f8/go.mod h1:PvCDdDGpgqzQIzDW1TphrGLssLDZp2GuS+X5DkEJB8o=\ngithub.com/containerd/ttrpc v0.0.0-20191028202541-4f1b8fe65a5c/go.mod h1:LPm1u0xBw8r8NOKoOdNMeVHSawSsltak+Ihv+etqsE8=\ngithub.com/containerd/ttrpc v1.0.1/go.mod h1:UAxOpgT9ziI0gJrmKvgcZivgxOp8iFPSk8httJEt98Y=\ngithub.com/containerd/ttrpc v1.0.2/go.mod h1:UAxOpgT9ziI0gJrmKvgcZivgxOp8iFPSk8httJEt98Y=\ngithub.com/containerd/ttrpc v1.1.0/go.mod h1:XX4ZTnoOId4HklF4edwc4DcqskFZuvXB1Evzy5KFQpQ=\ngithub.com/containerd/typeurl v0.0.0-20180627222232-a93fcdb778cd/go.mod h1:Cm3kwCdlkCfMSHURc+r6fwoGH6/F1hH3S4sg0rLFWPc=\ngithub.com/containerd/typeurl v0.0.0-20190911142611-5eb25027c9fd/go.mod h1:GeKYzf2pQcqv7tJ0AoCuuhtnqhva5LNU3U+OyKxxJpk=\ngithub.com/containerd/typeurl v1.0.1/go.mod h1:TB1hUtrpaiO88KEK56ijojHS1+NeF0izUACaJW2mdXg=\ngithub.com/containerd/typeurl v1.0.2/go.mod h1:9trJWW2sRlGub4wZJRTW83VtbOLS6hwcDZXTn6oPz9s=\ngithub.com/containerd/zfs v0.0.0-20200918131355-0a33824f23a2/go.mod h1:8IgZOBdv8fAgXddBT4dBXJPtxyRsejFIpXoklgxgEjw=\ngithub.com/containerd/zfs v0.0.0-20210301145711-11e8f1707f62/go.mod h1:A9zfAbMlQwE+/is6hi0Xw8ktpL+6glmqZYtevJgaB8Y=\ngithub.com/containerd/zfs v0.0.0-20210315114300-dde8f0fda960/go.mod h1:m+m51S1DvAP6r3FcmYCp54bQ34pyOwTieQDNRIRHsFY=\ngithub.com/containerd/zfs v0.0.0-20210324211415-d5c4544f0433/go.mod h1:m+m51S1DvAP6r3FcmYCp54bQ34pyOwTieQDNRIRHsFY=\ngithub.com/containerd/zfs v1.0.0/go.mod h1:m+m51S1DvAP6r3FcmYCp54bQ34pyOwTieQDNRIRHsFY=\ngithub.com/containernetworking/cni v0.7.1/go.mod h1:LGwApLUm2FpoOfxTDEeq8T9ipbpZ61X79hmU3w8FmsY=\ngithub.com/containernetworking/cni v0.8.0/go.mod h1:LGwApLUm2FpoOfxTDEeq8T9ipbpZ61X79hmU3w8FmsY=\ngithub.com/containernetworking/cni v0.8.1/go.mod h1:LGwApLUm2FpoOfxTDEeq8T9ipbpZ61X79hmU3w8FmsY=\ngithub.com/containernetworking/plugins v0.8.6/go.mod h1:qnw5mN19D8fIwkqW7oHHYDHVlzhJpcY6TQxn/fUyDDM=\ngithub.com/containernetworking/plugins v0.9.1/go.mod h1:xP/idU2ldlzN6m4p5LmGiwRDjeJr6FLK6vuiUwoH7P8=\ngithub.com/containers/ocicrypt v1.0.1/go.mod h1:MeJDzk1RJHv89LjsH0Sp5KTY3ZYkjXO/C+bKAeWFIrc=\ngithub.com/containers/ocicrypt v1.1.0/go.mod h1:b8AOe0YR67uU8OqfVNcznfFpAzu3rdgUV4GP9qXPfu4=\ngithub.com/containers/ocicrypt v1.1.1/go.mod h1:Dm55fwWm1YZAjYRaJ94z2mfZikIyIN4B0oB3dj3jFxY=\ngithub.com/coreos/bbolt v1.3.2/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk=\ngithub.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE=\ngithub.com/coreos/go-iptables v0.4.5/go.mod h1:/mVI274lEDI2ns62jHCDnCyBF9Iwsmekav8Dbxlm1MU=\ngithub.com/coreos/go-iptables v0.5.0/go.mod h1:/mVI274lEDI2ns62jHCDnCyBF9Iwsmekav8Dbxlm1MU=\ngithub.com/coreos/go-oidc v2.1.0+incompatible/go.mod h1:CgnwVTmzoESiwO9qyAFEMiHoZ1nMCKZlZ9V6mm3/LKc=\ngithub.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=\ngithub.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=\ngithub.com/coreos/go-systemd v0.0.0-20161114122254-48702e0da86b/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=\ngithub.com/coreos/go-systemd v0.0.0-20180511133405-39ca1b05acc7/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=\ngithub.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=\ngithub.com/coreos/go-systemd/v22 v22.0.0/go.mod h1:xO0FLkIi5MaZafQlIrOotqXZ90ih+1atmu1JpKERPPk=\ngithub.com/coreos/go-systemd/v22 v22.1.0/go.mod h1:xO0FLkIi5MaZafQlIrOotqXZ90ih+1atmu1JpKERPPk=\ngithub.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc=\ngithub.com/coreos/pkg v0.0.0-20160727233714-3ac0863d7acf/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA=\ngithub.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA=\ngithub.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=\ngithub.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=\ngithub.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=\ngithub.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY=\ngithub.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=\ngithub.com/creack/pty v1.1.11/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=\ngithub.com/cyphar/filepath-securejoin v0.2.2/go.mod h1:FpkQEhXnPnOthhzymB7CGsFk2G9VLXONKD9G7QGMM+4=\ngithub.com/d2g/dhcp4 v0.0.0-20170904100407-a1d1b6c41b1c/go.mod h1:Ct2BUK8SB0YC1SMSibvLzxjeJLnrYEVLULFNiHY9YfQ=\ngithub.com/d2g/dhcp4client v1.0.0/go.mod h1:j0hNfjhrt2SxUOw55nL0ATM/z4Yt3t2Kd1mW34z5W5s=\ngithub.com/d2g/dhcp4server v0.0.0-20181031114812-7d4a0a7f59a5/go.mod h1:Eo87+Kg/IX2hfWJfwxMzLyuSZyxSoAug2nGa1G2QAi8=\ngithub.com/d2g/hardwareaddr v0.0.0-20190221164911-e7d9fbe030e4/go.mod h1:bMl4RjIciD2oAxI7DmWRx6gbeqrkoLqv3MV0vzNad+I=\ngithub.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=\ngithub.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=\ngithub.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=\ngithub.com/denisbrodbeck/machineid v1.0.1 h1:geKr9qtkB876mXguW2X6TU4ZynleN6ezuMSRhl4D7AQ=\ngithub.com/denisbrodbeck/machineid v1.0.1/go.mod h1:dJUwb7PTidGDeYyUBmXZ2GphQBbjJCrnectwCyxcUSI=\ngithub.com/denverdino/aliyungo v0.0.0-20190125010748-a747050bb1ba/go.mod h1:dV8lFg6daOBZbT6/BDGIz6Y3WFGn8juu6G+CQ6LHtl0=\ngithub.com/dgrijalva/jwt-go v0.0.0-20170104182250-a601269ab70c/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=\ngithub.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=\ngithub.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no=\ngithub.com/dnaeon/go-vcr v1.0.1/go.mod h1:aBB1+wY4s93YsC3HHjMBMrwTj2R9FHDzUr9KyGc8n1E=\ngithub.com/dnaeon/go-vcr v1.1.0/go.mod h1:M7tiix8f0r6mKKJ3Yq/kqU1OYf3MnfmBWVbPx/yU9ko=\ngithub.com/dnaeon/go-vcr v1.2.0 h1:zHCHvJYTMh1N7xnV7zf1m1GPBF9Ad0Jk/whtQ1663qI=\ngithub.com/dnaeon/go-vcr v1.2.0/go.mod h1:R4UdLID7HZT3taECzJs4YgbbH6PIGXB6W/sc5OLb6RQ=\ngithub.com/docker/distribution v0.0.0-20190905152932-14b96e55d84c/go.mod h1:0+TTO4EOBfRPhZXAeF1Vu+W3hHZ8eLp8PgKVZlcvtFY=\ngithub.com/docker/distribution v2.7.1-0.20190205005809-0d3efadf0154+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w=\ngithub.com/docker/distribution v2.7.1+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w=\ngithub.com/docker/distribution v2.8.0+incompatible h1:l9EaZDICImO1ngI+uTifW+ZYvvz7fKISBAKpg+MbWbY=\ngithub.com/docker/distribution v2.8.0+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w=\ngithub.com/docker/docker v20.10.12+incompatible h1:CEeNmFM0QZIsJCZKMkZx0ZcahTiewkrgiwfYD+dfl1U=\ngithub.com/docker/docker v20.10.12+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=\ngithub.com/docker/go-connections v0.4.0 h1:El9xVISelRB7BuFusrZozjnkIM5YnzCViNKohAFqRJQ=\ngithub.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec=\ngithub.com/docker/go-events v0.0.0-20170721190031-9461782956ad/go.mod h1:Uw6UezgYA44ePAFQYUehOuCzmy5zmg/+nl2ZfMWGkpA=\ngithub.com/docker/go-events v0.0.0-20190806004212-e31b211e4f1c/go.mod h1:Uw6UezgYA44ePAFQYUehOuCzmy5zmg/+nl2ZfMWGkpA=\ngithub.com/docker/go-metrics v0.0.0-20180209012529-399ea8c73916/go.mod h1:/u0gXw0Gay3ceNrsHubL3BtdOL2fHf93USgMTe0W5dI=\ngithub.com/docker/go-metrics v0.0.1/go.mod h1:cG1hvH2utMXtqgqqYE9plW6lDxS3/5ayHzueweSI3Vw=\ngithub.com/docker/go-units v0.4.0 h1:3uh0PgVws3nIA0Q+MwDC8yjEPf9zjRfZZWXZYDct3Tw=\ngithub.com/docker/go-units v0.4.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=\ngithub.com/docker/libtrust v0.0.0-20150114040149-fa567046d9b1/go.mod h1:cyGadeNEkKy96OOhEzfZl+yxihPEzKnqJwvfuSUqbZE=\ngithub.com/docker/spdystream v0.0.0-20160310174837-449fdfce4d96/go.mod h1:Qh8CwZgvJUkLughtfhJv5dyTYa91l1fOUCrgjqmcifM=\ngithub.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3ebgob9U8Nd0kOddGdZWjyMGR8Wziv+TBNwSE=\ngithub.com/dsnet/compress v0.0.2-0.20210315054119-f66993602bf5 h1:iFaUwBSo5Svw6L7HYpRu/0lE3e0BaElwnNO1qkNQxBY=\ngithub.com/dsnet/compress v0.0.2-0.20210315054119-f66993602bf5/go.mod h1:qssHWj60/X5sZFNxpG4HBPDHVqxNm4DfnCKgrbZOT+s=\ngithub.com/dsnet/golib v0.0.0-20171103203638-1ea166775780/go.mod h1:Lj+Z9rebOhdfkVLjJ8T6VcRQv3SXugXy999NBtR9aFY=\ngithub.com/dustin/go-humanize v0.0.0-20171111073723-bb3d318650d4/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=\ngithub.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=\ngithub.com/elazarl/goproxy v0.0.0-20180725130230-947c36da3153/go.mod h1:/Zj4wYkgs4iZTTu3o/KG3Itv/qCCa8VVMlb3i9OVuzc=\ngithub.com/emicklei/go-restful v0.0.0-20170410110728-ff4f55a20633/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs=\ngithub.com/emicklei/go-restful v2.9.5+incompatible/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs=\ngithub.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=\ngithub.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=\ngithub.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=\ngithub.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po=\ngithub.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=\ngithub.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=\ngithub.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ=\ngithub.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0=\ngithub.com/envoyproxy/go-control-plane v0.10.1/go.mod h1:AY7fTTXNdv/aJ2O5jwpxAPOWUZ7hQAEvzN5Pf27BkQQ=\ngithub.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=\ngithub.com/envoyproxy/protoc-gen-validate v0.6.2/go.mod h1:2t7qjJNvHPx8IjnBOzl9E9/baC+qXE/TeeyBRzgJDws=\ngithub.com/evanphx/json-patch v4.9.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk=\ngithub.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=\ngithub.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU=\ngithub.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk=\ngithub.com/form3tech-oss/jwt-go v3.2.2+incompatible/go.mod h1:pbq4aXjuKjdthFRnoDwaVPLA+WlJuPGy+QneDUgJi2k=\ngithub.com/frankban/quicktest v1.11.3/go.mod h1:wRf/ReqHper53s+kmmSZizM8NamnL3IM0I9ntUbOk+k=\ngithub.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=\ngithub.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ=\ngithub.com/fsnotify/fsnotify v1.5.1 h1:mZcQUHVQUQWoPXXtuf9yuEXKudkV2sx1E06UadKWpgI=\ngithub.com/fsnotify/fsnotify v1.5.1/go.mod h1:T3375wBYaZdLLcVNkcVbzGHY7f1l/uK5T5Ai1i3InKU=\ngithub.com/fullsailor/pkcs7 v0.0.0-20190404230743-d7302db945fa/go.mod h1:KnogPXtdwXqoenmZCw6S+25EAm2MkxbG0deNDu4cbSA=\ngithub.com/garyburd/redigo v0.0.0-20150301180006-535138d7bcd7/go.mod h1:NR3MbYisc3/PwhQ00EMzDiPmrwpPxAn5GI05/YaO1SY=\ngithub.com/ghodss/yaml v0.0.0-20150909031657-73d445a93680/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=\ngithub.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=\ngithub.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE=\ngithub.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI=\ngithub.com/gin-gonic/gin v1.7.7 h1:3DoBmSbJbZAWqXJC3SLjAPfutPJJRN1U5pALB7EeTTs=\ngithub.com/gin-gonic/gin v1.7.7/go.mod h1:axIBovoeJpVj8S3BwE0uPMTeReE4+AfFtqpqaZ1qq1U=\ngithub.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=\ngithub.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=\ngithub.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=\ngithub.com/go-ini/ini v1.25.4/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8=\ngithub.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=\ngithub.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=\ngithub.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=\ngithub.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=\ngithub.com/go-logr/logr v0.1.0/go.mod h1:ixOQHD9gLJUVQQ2ZOR7zLEifBX6tGkNJF4QyIY7sIas=\ngithub.com/go-logr/logr v0.2.0/go.mod h1:z6/tIYblkpsD+a4lm/fGIIU9mZ+XfAiaFtq7xTgseGU=\ngithub.com/go-ole/go-ole v1.2.4/go.mod h1:XCwSNxSkXRo4vlyPy93sltvi/qJq0jqQhjqQNIwKuxM=\ngithub.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY=\ngithub.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0=\ngithub.com/go-openapi/jsonpointer v0.19.2/go.mod h1:3akKfEdA7DF1sugOqz1dVQHBcuDBPKZGEoHC/NkiQRg=\ngithub.com/go-openapi/jsonpointer v0.19.3/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg=\ngithub.com/go-openapi/jsonreference v0.19.2/go.mod h1:jMjeRr2HHw6nAVajTXJ4eiUwohSTlpa0o73RUL1owJc=\ngithub.com/go-openapi/jsonreference v0.19.3/go.mod h1:rjx6GuL8TTa9VaixXglHmQmIL98+wF9xc8zWvFonSJ8=\ngithub.com/go-openapi/spec v0.19.3/go.mod h1:FpwSN1ksY1eteniUU7X0N/BgJ7a4WvBFVA8Lj9mJglo=\ngithub.com/go-openapi/swag v0.19.2/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk=\ngithub.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk=\ngithub.com/go-playground/assert/v2 v2.0.1 h1:MsBgLAaY856+nPRTKrp3/OZK38U/wa0CcBYNjji3q3A=\ngithub.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=\ngithub.com/go-playground/locales v0.13.0/go.mod h1:taPMhCMXrRLJO55olJkUXHZBHCxTMfnGwq/HNwmWNS8=\ngithub.com/go-playground/locales v0.14.0 h1:u50s323jtVGugKlcYeyzC0etD1HifMjqmJqb8WugfUU=\ngithub.com/go-playground/locales v0.14.0/go.mod h1:sawfccIbzZTqEDETgFXqTho0QybSa7l++s0DH+LDiLs=\ngithub.com/go-playground/universal-translator v0.17.0/go.mod h1:UkSxE5sNxxRwHyU+Scu5vgOQjsIJAF8j9muTVoKLVtA=\ngithub.com/go-playground/universal-translator v0.18.0 h1:82dyy6p4OuJq4/CByFNOn/jYrnRPArHwAcmLoJZxyho=\ngithub.com/go-playground/universal-translator v0.18.0/go.mod h1:UvRDBj+xPUEGrFYl+lu/H90nyDXpg0fqeB/AQUGNTVA=\ngithub.com/go-playground/validator/v10 v10.4.1/go.mod h1:nlOn6nFhuKACm19sB/8EGNn9GlaMV7XkbRSipzJ0Ii4=\ngithub.com/go-playground/validator/v10 v10.10.0 h1:I7mrTYv78z8k8VXa/qJlOlEXn/nBh+BF8dHX5nt/dr0=\ngithub.com/go-playground/validator/v10 v10.10.0/go.mod h1:74x4gJWsvQexRdW8Pn3dXSGrTK4nAUsbPlLADvpJkos=\ngithub.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=\ngithub.com/godbus/dbus v0.0.0-20151105175453-c7fdd8b5cd55/go.mod h1:/YcGZj5zSblfDWMMoOzV4fas9FZnQYTkDnsGvmh2Grw=\ngithub.com/godbus/dbus v0.0.0-20180201030542-885f9cc04c9c/go.mod h1:/YcGZj5zSblfDWMMoOzV4fas9FZnQYTkDnsGvmh2Grw=\ngithub.com/godbus/dbus v0.0.0-20190422162347-ade71ed3457e/go.mod h1:bBOAhwG1umN6/6ZUMtDFBMQR8jRg9O75tm9K00oMsK4=\ngithub.com/godbus/dbus/v5 v5.0.3/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=\ngithub.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=\ngithub.com/gogo/googleapis v1.2.0/go.mod h1:Njal3psf3qN6dwBtQfUmBZh2ybovJ0tlu3o/AC7HYjU=\ngithub.com/gogo/googleapis v1.4.0/go.mod h1:5YRNX2z1oM5gXdAkurHa942MDgEJyk02w4OecKY87+c=\ngithub.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=\ngithub.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4=\ngithub.com/gogo/protobuf v1.2.2-0.20190723190241-65acae22fc9d/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o=\ngithub.com/gogo/protobuf v1.3.0/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o=\ngithub.com/gogo/protobuf v1.3.1/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o=\ngithub.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q=\ngithub.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q=\ngithub.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=\ngithub.com/golang/groupcache v0.0.0-20160516000752-02826c3e7903/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=\ngithub.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=\ngithub.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=\ngithub.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=\ngithub.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=\ngithub.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=\ngithub.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=\ngithub.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=\ngithub.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y=\ngithub.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=\ngithub.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=\ngithub.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=\ngithub.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4=\ngithub.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8=\ngithub.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs=\ngithub.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=\ngithub.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=\ngithub.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=\ngithub.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=\ngithub.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=\ngithub.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk=\ngithub.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8=\ngithub.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA=\ngithub.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs=\ngithub.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w=\ngithub.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0=\ngithub.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8=\ngithub.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=\ngithub.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=\ngithub.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=\ngithub.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM=\ngithub.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw=\ngithub.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=\ngithub.com/golang/snappy v0.0.2/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=\ngithub.com/golang/snappy v0.0.3 h1:fHPg5GQYlCeLIPB9BZqMVR5nR9A+IM5zcgeTdjMYmLA=\ngithub.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=\ngithub.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=\ngithub.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=\ngithub.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=\ngithub.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=\ngithub.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=\ngithub.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=\ngithub.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=\ngithub.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=\ngithub.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=\ngithub.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=\ngithub.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=\ngithub.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=\ngithub.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=\ngithub.com/google/go-cmp v0.5.6 h1:BKbKCqvP6I+rmFHt06ZmyQtvB8xAkWdhFyr0ZUNZcxQ=\ngithub.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=\ngithub.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=\ngithub.com/google/gofuzz v1.1.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=\ngithub.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=\ngithub.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0=\ngithub.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0=\ngithub.com/google/martian/v3 v3.2.1/go.mod h1:oBOf6HBosgwRXnUGWUB05QECsc6uvmMiJ3+6W4l/CUk=\ngithub.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=\ngithub.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=\ngithub.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=\ngithub.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=\ngithub.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=\ngithub.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=\ngithub.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=\ngithub.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=\ngithub.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=\ngithub.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=\ngithub.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=\ngithub.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=\ngithub.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=\ngithub.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=\ngithub.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=\ngithub.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=\ngithub.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=\ngithub.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=\ngithub.com/google/uuid v1.2.0 h1:qJYtXnJRWmpe7m/3XlyhrsLrEURqHRM2kxzoxXqyUDs=\ngithub.com/google/uuid v1.2.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=\ngithub.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=\ngithub.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=\ngithub.com/googleapis/gax-go/v2 v2.1.0/go.mod h1:Q3nei7sK6ybPYH7twZdmQpAd1MKb7pfu6SK+H1/DsU0=\ngithub.com/googleapis/gax-go/v2 v2.1.1/go.mod h1:hddJymUZASv3XPyGkUpKj8pPO47Rmb0eJc8R6ouapiM=\ngithub.com/googleapis/gnostic v0.4.1/go.mod h1:LRhVm6pbyptWbWbuZ38d1eyptfvIytN3ir6b65WBswg=\ngithub.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=\ngithub.com/gorilla/handlers v0.0.0-20150720190736-60c7bfde3e33/go.mod h1:Qkdc/uu4tH4g6mTK6auzZ766c4CA0Ng8+o/OAirnOIQ=\ngithub.com/gorilla/mux v1.7.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs=\ngithub.com/gorilla/mux v1.8.0 h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI=\ngithub.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So=\ngithub.com/gorilla/websocket v0.0.0-20170926233335-4201258b820c/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ=\ngithub.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ=\ngithub.com/gorilla/websocket v1.4.2 h1:+/TMaTYc4QFitKJxsQ7Yye35DkWvkdLcvGKqM+x0Ufc=\ngithub.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=\ngithub.com/gregjones/httpcache v0.0.0-20180305231024-9cad4c3443a7/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA=\ngithub.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs=\ngithub.com/grpc-ecosystem/go-grpc-middleware v1.0.1-0.20190118093823-f849b5445de4/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs=\ngithub.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk=\ngithub.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=\ngithub.com/grpc-ecosystem/grpc-gateway v1.9.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=\ngithub.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw=\ngithub.com/hashicorp/consul/api v1.11.0/go.mod h1:XjsvQN+RJGWI2TWy1/kqaE16HrR2J/FWgkYjdZQsX9M=\ngithub.com/hashicorp/consul/api v1.12.0/go.mod h1:6pVBMo0ebnYdt2S3H87XhekM/HHrUoTD2XXb/VrZVy0=\ngithub.com/hashicorp/consul/sdk v0.8.0/go.mod h1:GBvyrGALthsZObzUGsfgHZQDXjg4lOjagTIwIR1vPms=\ngithub.com/hashicorp/errwrap v0.0.0-20141028054710-7554cd9344ce/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=\ngithub.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=\ngithub.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80=\ngithub.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80=\ngithub.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48=\ngithub.com/hashicorp/go-hclog v0.12.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ=\ngithub.com/hashicorp/go-hclog v1.0.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ=\ngithub.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60=\ngithub.com/hashicorp/go-immutable-radix v1.3.1/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60=\ngithub.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM=\ngithub.com/hashicorp/go-multierror v0.0.0-20161216184304-ed905158d874/go.mod h1:JMRHfdO9jKNzS/+BTlxCjKNQHg/jZAft8U7LloJvN7I=\ngithub.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk=\ngithub.com/hashicorp/go-multierror v1.1.0/go.mod h1:spPvp8C1qA32ftKqdAHm4hHTbPw+vmowP0z+KUhOZdA=\ngithub.com/hashicorp/go-retryablehttp v0.5.3/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs=\ngithub.com/hashicorp/go-rootcerts v1.0.2/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8=\ngithub.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU=\ngithub.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4=\ngithub.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=\ngithub.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=\ngithub.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=\ngithub.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=\ngithub.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4=\ngithub.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4=\ngithub.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=\ngithub.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64=\ngithub.com/hashicorp/mdns v1.0.1/go.mod h1:4gW7WsVCke5TE7EPeYliwHlRUyBtfCwuFwuMg2DmyNY=\ngithub.com/hashicorp/mdns v1.0.4/go.mod h1:mtBihi+LeNXGtG8L9dX59gAEa12BDtBQSp4v/YAJqrc=\ngithub.com/hashicorp/memberlist v0.2.2/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOnAH9VT3Sh9MUE=\ngithub.com/hashicorp/memberlist v0.3.0/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOnAH9VT3Sh9MUE=\ngithub.com/hashicorp/serf v0.9.5/go.mod h1:UWDWwZeL5cuWDJdl0C6wrvrUwEqtQ4ZKBKKENpqIUyk=\ngithub.com/hashicorp/serf v0.9.6/go.mod h1:TXZNMjZQijwlDvp+r0b63xZ45H7JmCmgg4gpTwn9UV4=\ngithub.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=\ngithub.com/iancoleman/strcase v0.2.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho=\ngithub.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=\ngithub.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=\ngithub.com/imdario/mergo v0.3.5/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA=\ngithub.com/imdario/mergo v0.3.8/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA=\ngithub.com/imdario/mergo v0.3.10/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA=\ngithub.com/imdario/mergo v0.3.11/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA=\ngithub.com/imdario/mergo v0.3.12/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA=\ngithub.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM=\ngithub.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=\ngithub.com/j-keck/arping v0.0.0-20160618110441-2cf9dc699c56/go.mod h1:ymszkNOg6tORTn+6F6j+Jc8TOr5osrynvN6ivFWZ2GA=\ngithub.com/jmespath/go-jmespath v0.0.0-20160202185014-0b12d6b521d8/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k=\ngithub.com/jmespath/go-jmespath v0.0.0-20160803190731-bd40a432e4c7/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k=\ngithub.com/joho/godotenv v1.4.0 h1:3l4+N6zfMWnkbPEXKng2o2/MR5mSwTrBih4ZEkkz1lg=\ngithub.com/joho/godotenv v1.4.0/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4=\ngithub.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo=\ngithub.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=\ngithub.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=\ngithub.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=\ngithub.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=\ngithub.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=\ngithub.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=\ngithub.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=\ngithub.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=\ngithub.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=\ngithub.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=\ngithub.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=\ngithub.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q=\ngithub.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00=\ngithub.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8=\ngithub.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=\ngithub.com/klauspost/compress v1.4.1/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A=\ngithub.com/klauspost/compress v1.11.3/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs=\ngithub.com/klauspost/compress v1.11.4/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs=\ngithub.com/klauspost/compress v1.11.13 h1:eSvu8Tmq6j2psUJqJrLcWH6K3w5Dwc+qipbaA6eVEN4=\ngithub.com/klauspost/compress v1.11.13/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs=\ngithub.com/klauspost/cpuid v1.2.0/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek=\ngithub.com/klauspost/pgzip v1.2.5 h1:qnWYvvKqedOF2ulHpMG72XQol4ILEJ8k2wwRl/Km8oE=\ngithub.com/klauspost/pgzip v1.2.5/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQKQE9RUs=\ngithub.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=\ngithub.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=\ngithub.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=\ngithub.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg=\ngithub.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=\ngithub.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=\ngithub.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=\ngithub.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=\ngithub.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0=\ngithub.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk=\ngithub.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=\ngithub.com/kr/pty v1.1.5/go.mod h1:9r2w37qlBe7rQ6e1fg1S/9xpWHSnaqNdHD3WcMdbPDA=\ngithub.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=\ngithub.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=\ngithub.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=\ngithub.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII=\ngithub.com/leodido/go-urn v1.2.1 h1:BqpAaACuzVSgi/VLzGZIobT2z4v53pjosyNd9Yv6n/w=\ngithub.com/leodido/go-urn v1.2.1/go.mod h1:zt4jvISO2HfUBqxjfIshjdMTYS56ZS/qv49ictyFfxY=\ngithub.com/lyft/protoc-gen-star v0.5.3/go.mod h1:V0xaHgaf5oCCqmcxYcWiDfTiKsZsRc87/1qhoTACD8w=\ngithub.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ=\ngithub.com/magiconair/properties v1.8.5 h1:b6kJs+EmPFMYGkow9GiUyCyOvIwYetYJ3fSaWak/Gls=\ngithub.com/magiconair/properties v1.8.5/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60=\ngithub.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=\ngithub.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=\ngithub.com/mailru/easyjson v0.7.0/go.mod h1:KAzv3t3aY1NaHWoQz1+4F1ccyAH66Jk7yos7ldAVICs=\ngithub.com/marstr/guid v1.1.0/go.mod h1:74gB1z2wpxxInTG6yaqA7KrtM0NZ+RbrcqDvYHefzho=\ngithub.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU=\ngithub.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE=\ngithub.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=\ngithub.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=\ngithub.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4=\ngithub.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=\ngithub.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=\ngithub.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=\ngithub.com/mattn/go-isatty v0.0.10/go.mod h1:qgIWMr58cqv1PHHyhnkY9lrL7etaEgOFcMEpPG5Rm84=\ngithub.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOAqxQCu2WE=\ngithub.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=\ngithub.com/mattn/go-isatty v0.0.14 h1:yVuAays6BHfxijgZPzw+3Zlu5yQgKGP2/hcQbHb7S9Y=\ngithub.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94=\ngithub.com/mattn/go-runewidth v0.0.2/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU=\ngithub.com/mattn/go-shellwords v1.0.3/go.mod h1:3xCvwCdWdlDJUrvuMn7Wuy9eWs4pE8vqg+NOMyg4B2o=\ngithub.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=\ngithub.com/matttproud/golang_protobuf_extensions v1.0.2-0.20181231171920-c182affec369/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4=\ngithub.com/mholt/archiver/v3 v3.5.1 h1:rDjOBX9JSF5BvoJGvjqK479aL70qh9DIpZCl+k7Clwo=\ngithub.com/mholt/archiver/v3 v3.5.1/go.mod h1:e3dqJ7H78uzsRSEACH1joayhuSyhnonssnDhppzS1L4=\ngithub.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg=\ngithub.com/miekg/dns v1.1.26/go.mod h1:bPDLeHnStXmXAq1m/Ch/hvfNHr14JKNPMBo3VZKjuso=\ngithub.com/miekg/dns v1.1.41/go.mod h1:p6aan82bvRIyn+zDIv9xYNUpwa73JcSh9BKwknJysuI=\ngithub.com/miekg/pkcs11 v1.0.3/go.mod h1:XsNlhZGX73bx86s2hdc/FuaLm2CPZJemRLMA+WTFxgs=\ngithub.com/mistifyio/go-zfs v2.1.2-0.20190413222219-f784269be439+incompatible/go.mod h1:8AuVvqP/mXw1px98n46wfvcGfQ4ci2FwoAjKYxuo3Z4=\ngithub.com/mitchellh/cli v1.1.0/go.mod h1:xcISNoH86gajksDmfB23e/pu+B+GeFRMYmoHXxx3xhI=\ngithub.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=\ngithub.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI=\ngithub.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=\ngithub.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=\ngithub.com/mitchellh/mapstructure v1.4.3 h1:OVowDSCllw/YjdLkam3/sm7wEtOy59d8ndGgCcyj8cs=\ngithub.com/mitchellh/mapstructure v1.4.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=\ngithub.com/mitchellh/osext v0.0.0-20151018003038-5e2d6d41470f/go.mod h1:OkQIRizQZAeMln+1tSwduZz7+Af5oFlKirV/MSYes2A=\ngithub.com/moby/locker v1.0.1/go.mod h1:S7SDdo5zpBK84bzzVlKr2V0hz+7x9hWbYC/kq7oQppc=\ngithub.com/moby/sys/mountinfo v0.4.0/go.mod h1:rEr8tzG/lsIZHBtN/JjGG+LMYx9eXgW2JI+6q0qou+A=\ngithub.com/moby/sys/mountinfo v0.4.1/go.mod h1:rEr8tzG/lsIZHBtN/JjGG+LMYx9eXgW2JI+6q0qou+A=\ngithub.com/moby/sys/symlink v0.1.0/go.mod h1:GGDODQmbFOjFsXvfLVn3+ZRxkch54RkSiGqsZeMYowQ=\ngithub.com/moby/term v0.0.0-20200312100748-672ec06f55cd/go.mod h1:DdlQx2hp0Ss5/fLikoLlEeIYiATotOjgB//nb973jeo=\ngithub.com/moby/term v0.0.0-20210619224110-3f7ff695adc6 h1:dcztxKSvZ4Id8iPpHERQBbIJfabdt4wUm5qy3wOL2Zc=\ngithub.com/moby/term v0.0.0-20210619224110-3f7ff695adc6/go.mod h1:E2VnQOmVuvZB6UYnnDB0qG5Nq/1tD9acaOpo6xmt0Kw=\ngithub.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=\ngithub.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=\ngithub.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=\ngithub.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=\ngithub.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=\ngithub.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=\ngithub.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=\ngithub.com/modocache/gover v0.0.0-20171022184752-b58185e213c5/go.mod h1:caMODM3PzxT8aQXRPkAt8xlV/e7d7w8GM5g0fa5F0D8=\ngithub.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A=\ngithub.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc=\ngithub.com/mrunalp/fileutils v0.5.0/go.mod h1:M1WthSahJixYnrXQl/DFQuteStB1weuxD2QJNHXfbSQ=\ngithub.com/munnerz/goautoneg v0.0.0-20120707110453-a547fc61f48d/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=\ngithub.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=\ngithub.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=\ngithub.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f/go.mod h1:ZdcZmHo+o7JKHSa8/e818NopupXU1YMK5fe1lsApnBw=\ngithub.com/ncw/swift v1.0.47/go.mod h1:23YIA4yWVnGwv2dQlN4bB7egfYX6YLn0Yo/S6zZO/ZM=\ngithub.com/nwaples/rardecode v1.1.0 h1:vSxaY8vQhOcVr4mm5e8XllHWTiM4JF507A0Katqw7MQ=\ngithub.com/nwaples/rardecode v1.1.0/go.mod h1:5DzqNKiOdpKKBH87u8VlvAnPZMXcGRhxWkRpHbbfGS0=\ngithub.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A=\ngithub.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=\ngithub.com/olekukonko/tablewriter v0.0.0-20170122224234-a0225b3f23b5/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo=\ngithub.com/onsi/ginkgo v0.0.0-20151202141238-7f8ab55aaf3b/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=\ngithub.com/onsi/ginkgo v0.0.0-20170829012221-11459a886d9c/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=\ngithub.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=\ngithub.com/onsi/ginkgo v1.10.1/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=\ngithub.com/onsi/ginkgo v1.10.3/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=\ngithub.com/onsi/ginkgo v1.11.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=\ngithub.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk=\ngithub.com/onsi/gomega v0.0.0-20151007035656-2152b45fa28a/go.mod h1:C1qb7wdrVGGVU+Z6iS04AVkA3Q65CEZX59MT0QO5uiA=\ngithub.com/onsi/gomega v0.0.0-20170829124025-dcabb60a477c/go.mod h1:C1qb7wdrVGGVU+Z6iS04AVkA3Q65CEZX59MT0QO5uiA=\ngithub.com/onsi/gomega v1.7.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=\ngithub.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY=\ngithub.com/onsi/gomega v1.10.3/go.mod h1:V9xEwhxec5O8UDM77eCW8vLymOMltsqPVYWrpDsH8xc=\ngithub.com/opencontainers/go-digest v0.0.0-20170106003457-a6d0ee40d420/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s=\ngithub.com/opencontainers/go-digest v0.0.0-20180430190053-c9281466c8b2/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s=\ngithub.com/opencontainers/go-digest v1.0.0-rc1/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s=\ngithub.com/opencontainers/go-digest v1.0.0-rc1.0.20180430190053-c9281466c8b2/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s=\ngithub.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U=\ngithub.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM=\ngithub.com/opencontainers/image-spec v1.0.0/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0=\ngithub.com/opencontainers/image-spec v1.0.1/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0=\ngithub.com/opencontainers/image-spec v1.0.2 h1:9yCKha/T5XdGtO0q9Q9a6T5NUCsTn/DrBg0D7ufOcFM=\ngithub.com/opencontainers/image-spec v1.0.2/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0=\ngithub.com/opencontainers/runc v0.0.0-20190115041553-12f6a991201f/go.mod h1:qT5XzbpPznkRYVz/mWwUaVBUv2rmF59PVA73FjuZG0U=\ngithub.com/opencontainers/runc v0.1.1/go.mod h1:qT5XzbpPznkRYVz/mWwUaVBUv2rmF59PVA73FjuZG0U=\ngithub.com/opencontainers/runc v1.0.0-rc8.0.20190926000215-3e425f80a8c9/go.mod h1:qT5XzbpPznkRYVz/mWwUaVBUv2rmF59PVA73FjuZG0U=\ngithub.com/opencontainers/runc v1.0.0-rc9/go.mod h1:qT5XzbpPznkRYVz/mWwUaVBUv2rmF59PVA73FjuZG0U=\ngithub.com/opencontainers/runc v1.0.0-rc93/go.mod h1:3NOsor4w32B2tC0Zbl8Knk4Wg84SM2ImC1fxBuqJ/H0=\ngithub.com/opencontainers/runc v1.0.2/go.mod h1:aTaHFFwQXuA71CiyxOdFFIorAoemI04suvGRQFzWTD0=\ngithub.com/opencontainers/runtime-spec v0.1.2-0.20190507144316-5b71a03e2700/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0=\ngithub.com/opencontainers/runtime-spec v1.0.1/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0=\ngithub.com/opencontainers/runtime-spec v1.0.2-0.20190207185410-29686dbc5559/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0=\ngithub.com/opencontainers/runtime-spec v1.0.2/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0=\ngithub.com/opencontainers/runtime-spec v1.0.3-0.20200929063507-e6143ca7d51d/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0=\ngithub.com/opencontainers/runtime-spec v1.0.3-0.20210326190908-1c3f411f0417/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0=\ngithub.com/opencontainers/runtime-tools v0.0.0-20181011054405-1d69bd0f9c39/go.mod h1:r3f7wjNzSs2extwzU3Y+6pKfobzPh+kKFJ3ofN+3nfs=\ngithub.com/opencontainers/selinux v1.6.0/go.mod h1:VVGKuOLlE7v4PJyT6h7mNWvq1rzqiriPsEqVhc+svHE=\ngithub.com/opencontainers/selinux v1.8.0/go.mod h1:RScLhm78qiWa2gbVCcGkC7tCGdgk3ogry1nUQF8Evvo=\ngithub.com/opencontainers/selinux v1.8.2/go.mod h1:MUIHuUEvKB1wtJjQdOyYRgOnLD2xAPP8dBsCoU0KuF8=\ngithub.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc=\ngithub.com/pascaldekloe/goe v0.1.0/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc=\ngithub.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic=\ngithub.com/pelletier/go-toml v1.8.1/go.mod h1:T2/BmBdy8dvIRq1a/8aqjN41wvWlN4lrapLU/GW4pbc=\ngithub.com/pelletier/go-toml v1.9.4 h1:tjENF6MfZAg8e4ZmZTeWaWiT2vXtsoO6+iuOjFhECwM=\ngithub.com/pelletier/go-toml v1.9.4/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c=\ngithub.com/peterbourgon/diskv v2.0.1+incompatible/go.mod h1:uqqh8zWWbv1HBMNONnaR/tNboyR3/BZd58JJSHlUSCU=\ngithub.com/pierrec/lz4/v4 v4.1.2 h1:qvY3YFXRQE/XB8MlLzJH7mSzBs74eA2gg52YTk6jUPM=\ngithub.com/pierrec/lz4/v4 v4.1.2/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=\ngithub.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=\ngithub.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=\ngithub.com/pkg/errors v0.8.1-0.20171018195549-f15c970de5b7/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=\ngithub.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=\ngithub.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=\ngithub.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=\ngithub.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZI=\ngithub.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=\ngithub.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=\ngithub.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI=\ngithub.com/posener/complete v1.2.3/go.mod h1:WZIdtGGp+qx0sLrYKtIRAruyNpv6hFCicSgv7Sy7s/s=\ngithub.com/pquerna/cachecontrol v0.0.0-20171018203845-0dec1b30a021/go.mod h1:prYjPmNq4d1NPVmpShWobRqXY3q7Vp+80DqgxxUrUIA=\ngithub.com/prometheus/client_golang v0.0.0-20180209125602-c332b6f63c06/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=\ngithub.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=\ngithub.com/prometheus/client_golang v0.9.3/go.mod h1:/TN21ttK/J9q6uSwhBd54HahCDft0ttaMvbicHlPoso=\ngithub.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo=\ngithub.com/prometheus/client_golang v1.1.0/go.mod h1:I1FGZT9+L76gKKOs5djB6ezCbFQP1xR9D75/vuwEF3g=\ngithub.com/prometheus/client_golang v1.4.0/go.mod h1:e9GMxYsXl05ICDXkRhurwBS4Q3OK1iX/F2sw+iXX5zU=\ngithub.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M=\ngithub.com/prometheus/client_model v0.0.0-20171117100541-99fa1f4be8e5/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=\ngithub.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=\ngithub.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=\ngithub.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=\ngithub.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=\ngithub.com/prometheus/common v0.0.0-20180110214958-89604d197083/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro=\ngithub.com/prometheus/common v0.0.0-20181113130724-41aa239b4cce/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro=\ngithub.com/prometheus/common v0.4.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4=\ngithub.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4=\ngithub.com/prometheus/common v0.6.0/go.mod h1:eBmuwkDJBwy6iBfxCBob6t6dR6ENT/y+J+Zk0j9GMYc=\ngithub.com/prometheus/common v0.9.1/go.mod h1:yhUN8i9wzaXS3w1O07YhxHEBxD+W35wd8bs7vj7HSQ4=\ngithub.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo=\ngithub.com/prometheus/procfs v0.0.0-20180125133057-cb4147076ac7/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=\ngithub.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=\ngithub.com/prometheus/procfs v0.0.0-20190507164030-5867b95ac084/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=\ngithub.com/prometheus/procfs v0.0.0-20190522114515-bc1a522cf7b1/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=\ngithub.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=\ngithub.com/prometheus/procfs v0.0.3/go.mod h1:4A/X28fw3Fc593LaREMrKMqOKvUAntwMDaekg4FpcdQ=\ngithub.com/prometheus/procfs v0.0.5/go.mod h1:4A/X28fw3Fc593LaREMrKMqOKvUAntwMDaekg4FpcdQ=\ngithub.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A=\ngithub.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU=\ngithub.com/prometheus/procfs v0.2.0/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU=\ngithub.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA=\ngithub.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU=\ngithub.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs=\ngithub.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro=\ngithub.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg=\ngithub.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ=\ngithub.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=\ngithub.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=\ngithub.com/rogpeppe/go-internal v1.8.0 h1:FCbCCtXNOY3UtUuHUYaghJg4y7Fd14rXifAYUAtL9R8=\ngithub.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE=\ngithub.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=\ngithub.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=\ngithub.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=\ngithub.com/safchain/ethtool v0.0.0-20190326074333-42ed695e3de8/go.mod h1:Z0q5wiBQGYcxhMZ6gUqHn6pYNLypFAvaL3UvgZLR0U4=\ngithub.com/sagikazarmark/crypt v0.3.0/go.mod h1:uD/D+6UF4SrIR1uGEv7bBNkNqLGqUr43MRiaGWX1Nig=\ngithub.com/sagikazarmark/crypt v0.4.0/go.mod h1:ALv2SRj7GxYV4HO9elxH9nS6M9gW+xDNxqmyJ6RfDFM=\ngithub.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0=\ngithub.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc=\ngithub.com/seccomp/libseccomp-golang v0.9.1/go.mod h1:GbW5+tmTXfcxTToHLXlScSlAvWlF4P2Ca7zGrPiEpWo=\ngithub.com/shirou/gopsutil/v3 v3.21.1 h1:dA72XXj5WOXIZkAL2iYTKRVcNOOqh4yfLn9Rm7t8BMM=\ngithub.com/shirou/gopsutil/v3 v3.21.1/go.mod h1:igHnfak0qnw1biGeI2qKQvu0ZkwvEkUcCLlYhZzdr/4=\ngithub.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=\ngithub.com/sirupsen/logrus v1.0.4-0.20170822132746-89742aefa4b2/go.mod h1:pMByvHTf9Beacp5x1UXfOR9xyW/9antXMhjMPG0dEzc=\ngithub.com/sirupsen/logrus v1.0.6/go.mod h1:pMByvHTf9Beacp5x1UXfOR9xyW/9antXMhjMPG0dEzc=\ngithub.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=\ngithub.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q=\ngithub.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=\ngithub.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88=\ngithub.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0=\ngithub.com/sirupsen/logrus v1.8.1 h1:dJKuHgqk1NNQlqoA6BTlM1Wf9DOH3NBjQyu0h9+AZZE=\ngithub.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0=\ngithub.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=\ngithub.com/smartystreets/goconvey v0.0.0-20190330032615-68dc04aab96a/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA=\ngithub.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM=\ngithub.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=\ngithub.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ=\ngithub.com/spf13/afero v1.2.2/go.mod h1:9ZxEEn6pIJ8Rxe320qSDBk6AsU0r9pR7Q4OcevTdifk=\ngithub.com/spf13/afero v1.3.3/go.mod h1:5KUK8ByomD5Ti5Artl0RtHeI5pTF7MIDuXL3yY520V4=\ngithub.com/spf13/afero v1.6.0 h1:xoax2sJ2DT8S8xA2paPFjDCScCNeWsg75VG0DLRreiY=\ngithub.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I=\ngithub.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=\ngithub.com/spf13/cast v1.4.1 h1:s0hze+J0196ZfEMTs80N7UlFt0BDuQ7Q+JDnHiMWKdA=\ngithub.com/spf13/cast v1.4.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=\ngithub.com/spf13/cobra v0.0.2-0.20171109065643-2da4a54c5cee/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ=\ngithub.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ=\ngithub.com/spf13/cobra v1.0.0/go.mod h1:/6GTrnGXV9HjY+aR4k0oJ5tcvakLuG6EuKReYlHNrgE=\ngithub.com/spf13/cobra v1.3.0 h1:R7cSvGu+Vv+qX0gW5R/85dx2kmmJT5z5NM8ifdYjdn0=\ngithub.com/spf13/cobra v1.3.0/go.mod h1:BrRVncBjOJa/eUcVVm9CE+oC6as8k+VYr4NY7WCi9V4=\ngithub.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo=\ngithub.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk=\ngithub.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo=\ngithub.com/spf13/pflag v0.0.0-20170130214245-9ff6c6923cff/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=\ngithub.com/spf13/pflag v1.0.1-0.20171106142849-4c012f6dcd95/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=\ngithub.com/spf13/pflag v1.0.1/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=\ngithub.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=\ngithub.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=\ngithub.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=\ngithub.com/spf13/viper v1.4.0/go.mod h1:PTJ7Z/lr49W6bUbkmS1V3by4uWynFiR9p7+dSq/yZzE=\ngithub.com/spf13/viper v1.10.0/go.mod h1:SoyBPwAtKDzypXNDFKN5kzH7ppppbGZtls1UpIy5AsM=\ngithub.com/spf13/viper v1.10.1 h1:nuJZuYpG7gTj/XqiUwg8bA0cp1+M2mC3J4g5luUYBKk=\ngithub.com/spf13/viper v1.10.1/go.mod h1:IGlFPqhNAPKRxohIzWpI5QEy4kuI7tcl5WvR+8qy1rU=\ngithub.com/stefanberger/go-pkcs11uri v0.0.0-20201008174630-78d3cae3a980/go.mod h1:AO3tvPzVZ/ayst6UlUKUv6rcPQInYe3IknH3jYhAKu8=\ngithub.com/stretchr/objx v0.0.0-20180129172003-8a3f7159479f/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=\ngithub.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=\ngithub.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=\ngithub.com/stretchr/objx v0.2.0 h1:Hbg2NidpLE8veEBkEZTL3CvlkUIVzuU9jDplZO54c48=\ngithub.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE=\ngithub.com/stretchr/testify v0.0.0-20180303142811-b89eecf5ca5d/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=\ngithub.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=\ngithub.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=\ngithub.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=\ngithub.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=\ngithub.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=\ngithub.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=\ngithub.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=\ngithub.com/subosito/gotenv v1.2.0 h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s=\ngithub.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw=\ngithub.com/syndtr/gocapability v0.0.0-20170704070218-db04d3cc01c8/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww=\ngithub.com/syndtr/gocapability v0.0.0-20180916011248-d98352740cb2/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww=\ngithub.com/syndtr/gocapability v0.0.0-20200815063812-42c35b437635/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww=\ngithub.com/tchap/go-patricia v2.2.6+incompatible/go.mod h1:bmLyhP68RS6kStMGxByiQ23RP/odRBOTVjwp2cDyi6I=\ngithub.com/tmc/grpc-websocket-proxy v0.0.0-20170815181823-89b8d40f7ca8/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U=\ngithub.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U=\ngithub.com/tv42/httpunix v0.0.0-20150427012821-b75d8614f926/go.mod h1:9ESjWnEqriFuLhtthL60Sar/7RFoluCcXsuvEwTV5KM=\ngithub.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc=\ngithub.com/ugorji/go v1.1.7 h1:/68gy2h+1mWMrwZFeD1kQialdSzAb432dtpeJ42ovdo=\ngithub.com/ugorji/go v1.1.7/go.mod h1:kZn38zHttfInRq0xu/PH0az30d+z6vm202qpg1oXVMw=\ngithub.com/ugorji/go/codec v1.1.7 h1:2SvQaVZ1ouYrrKKwoSk2pzd4A9evlKJb9oTL+OaLUSs=\ngithub.com/ugorji/go/codec v1.1.7/go.mod h1:Ax+UKWsSmolVDwsd+7N3ZtXu+yMGCf907BLYF3GoBXY=\ngithub.com/ulikunitz/xz v0.5.8/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14=\ngithub.com/ulikunitz/xz v0.5.9 h1:RsKRIA2MO8x56wkkcd3LbtcE/uMszhb6DpRf+3uwa3I=\ngithub.com/ulikunitz/xz v0.5.9/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14=\ngithub.com/urfave/cli v0.0.0-20171014202726-7bc6a0acffa5/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA=\ngithub.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA=\ngithub.com/urfave/cli v1.22.1/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0=\ngithub.com/urfave/cli v1.22.2/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0=\ngithub.com/vishvananda/netlink v0.0.0-20181108222139-023a6dafdcdf/go.mod h1:+SR5DhBJrl6ZM7CoCKvpw5BKroDKQ+PJqOg65H/2ktk=\ngithub.com/vishvananda/netlink v1.1.0/go.mod h1:cTgwzPIzzgDAYoQrMm0EdrjRUBkTqKYppBueQtXaqoE=\ngithub.com/vishvananda/netlink v1.1.1-0.20201029203352-d40f9887b852/go.mod h1:twkDnbuQxJYemMlGd4JFIcuhgX83tXhKS2B/PRMpOho=\ngithub.com/vishvananda/netns v0.0.0-20180720170159-13995c7128cc/go.mod h1:ZjcWmFBXmLKZu9Nxj3WKYEafiSqer2rnvPr0en9UNpI=\ngithub.com/vishvananda/netns v0.0.0-20191106174202-0a2b9b5464df/go.mod h1:JP3t17pCcGlemwknint6hfoeCVQrEMVwxRLRjXpq+BU=\ngithub.com/vishvananda/netns v0.0.0-20200728191858-db3c7e526aae/go.mod h1:DD4vA1DwXk04H54A1oHXtwZmA0grkVMdPxx/VGLCah0=\ngithub.com/willf/bitset v1.1.11-0.20200630133818-d5bec3311243/go.mod h1:RjeCKbqT1RxIR/KWY6phxZiaY1IyutSBfGjNPySAYV4=\ngithub.com/willf/bitset v1.1.11/go.mod h1:83CECat5yLh5zVOf4P1ErAgKA5UDvKtgyUABdr3+MjI=\ngithub.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU=\ngithub.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ=\ngithub.com/xeipuuv/gojsonschema v0.0.0-20180618132009-1d523034197f/go.mod h1:5yf86TLmAcydyeJq5YvxkGPE2fm/u4myDekKRoLuqhs=\ngithub.com/xi2/xz v0.0.0-20171230120015-48954b6210f8 h1:nIPpBwaJSVYIxUFsDv3M8ofmx9yWTog9BfvIu0q41lo=\ngithub.com/xi2/xz v0.0.0-20171230120015-48954b6210f8/go.mod h1:HUYIGzjTL3rfEspMxjDjgmT5uz5wzYJKVo23qUhYTos=\ngithub.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU=\ngithub.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q=\ngithub.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=\ngithub.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=\ngithub.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=\ngithub.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=\ngithub.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=\ngithub.com/yvasiyarov/go-metrics v0.0.0-20140926110328-57bccd1ccd43/go.mod h1:aX5oPXxHm3bOH+xeAttToC8pqch2ScQN/JoXYupl6xs=\ngithub.com/yvasiyarov/gorelic v0.0.0-20141212073537-a9bba5b9ab50/go.mod h1:NUSPSUX/bi6SeDMUh6brw0nXpxHnc96TguQh0+r/ssA=\ngithub.com/yvasiyarov/newrelic_platform_go v0.0.0-20140908184405-b21fdbd4370f/go.mod h1:GlGEuHIJweS1mbCqG+7vt2nvWLzLLnRHbXz5JKd/Qbg=\ngo.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU=\ngo.etcd.io/bbolt v1.3.3/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU=\ngo.etcd.io/bbolt v1.3.5/go.mod h1:G5EMThwa9y8QZGBClrRx5EY+Yw9kAhnjy3bSjsnlVTQ=\ngo.etcd.io/etcd v0.5.0-alpha.5.0.20200910180754-dd1b699fc489/go.mod h1:yVHk9ub3CSBatqGNg7GRmsnfLWtoW60w4eDYfh7vHDg=\ngo.etcd.io/etcd/api/v3 v3.5.1/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs=\ngo.etcd.io/etcd/client/pkg/v3 v3.5.1/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g=\ngo.etcd.io/etcd/client/v2 v2.305.1/go.mod h1:pMEacxZW7o8pg4CrFE7pquyCJJzZvkvdD2RibOCCCGs=\ngo.mozilla.org/pkcs7 v0.0.0-20200128120323-432b2356ecb1/go.mod h1:SNgMg+EgDFwmvSmLRTNKC5fegJjB7v23qTQ0XLGUNHk=\ngo.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU=\ngo.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8=\ngo.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=\ngo.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=\ngo.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=\ngo.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk=\ngo.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E=\ngo.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI=\ngo.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=\ngo.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=\ngo.uber.org/atomic v1.7.0 h1:ADUqmZGgLDDfbSL9ZmPxKTybcoEYHgpYfELNoN+7hsw=\ngo.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=\ngo.uber.org/goleak v1.1.11 h1:wy28qYRKZgnJTxGxvye5/wgWr1EKjmUDGYox5mGlRlI=\ngo.uber.org/goleak v1.1.11/go.mod h1:cwTWslyiVhfpKIDGSZEM2HlOvcqm+tG4zioyIeLoqMQ=\ngo.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0=\ngo.uber.org/multierr v1.6.0 h1:y6IPFStTAIT5Ytl7/XYmHvzXQ7S3g/IeZW9hyZ5thw4=\ngo.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU=\ngo.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q=\ngo.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo=\ngo.uber.org/zap v1.20.0 h1:N4oPlghZwYG55MlU6LXk/Zp00FVNE9X9wrYO8CEs4lc=\ngo.uber.org/zap v1.20.0/go.mod h1:wjWOCqI0f2ZZrJF/UufIOkiC8ii6tm1iqIsLo76RfJw=\ngolang.org/x/crypto v0.0.0-20171113213409-9f005a07e0d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=\ngolang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=\ngolang.org/x/crypto v0.0.0-20181009213950-7c1a557ab941/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=\ngolang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=\ngolang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=\ngolang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=\ngolang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=\ngolang.org/x/crypto v0.0.0-20190611184440-5c40567a22f8/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=\ngolang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=\ngolang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=\ngolang.org/x/crypto v0.0.0-20190923035154-9ee001bba392/go.mod h1:/lpIB1dKB+9EgE3H3cr1v9wB50oz8l4C4h62xy7jSTY=\ngolang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=\ngolang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=\ngolang.org/x/crypto v0.0.0-20200728195943-123391ffb6de/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=\ngolang.org/x/crypto v0.0.0-20201002170205-7f63de1d35b0/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=\ngolang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4=\ngolang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=\ngolang.org/x/crypto v0.0.0-20210817164053-32db794688a5 h1:HWj/xjIHfjYU5nVXpTM0s39J9CbLn7Cc5a7IC5rwsMQ=\ngolang.org/x/crypto v0.0.0-20210817164053-32db794688a5/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=\ngolang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=\ngolang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=\ngolang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=\ngolang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek=\ngolang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY=\ngolang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=\ngolang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=\ngolang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=\ngolang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM=\ngolang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU=\ngolang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=\ngolang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=\ngolang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=\ngolang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=\ngolang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=\ngolang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=\ngolang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=\ngolang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=\ngolang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=\ngolang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs=\ngolang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=\ngolang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=\ngolang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=\ngolang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=\ngolang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE=\ngolang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o=\ngolang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc=\ngolang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY=\ngolang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=\ngolang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=\ngolang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=\ngolang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=\ngolang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=\ngolang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=\ngolang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=\ngolang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro=\ngolang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=\ngolang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=\ngolang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=\ngolang.org/x/net v0.0.0-20181011144130-49bb7cea24b1/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=\ngolang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=\ngolang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=\ngolang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=\ngolang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=\ngolang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=\ngolang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=\ngolang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=\ngolang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=\ngolang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=\ngolang.org/x/net v0.0.0-20190522155817-f3200d17e092/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=\ngolang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=\ngolang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=\ngolang.org/x/net v0.0.0-20190619014844-b5b0513f8c1b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=\ngolang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=\ngolang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=\ngolang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=\ngolang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=\ngolang.org/x/net v0.0.0-20190827160401-ba9fcec4b297/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=\ngolang.org/x/net v0.0.0-20190923162816-aa69164e4478/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=\ngolang.org/x/net v0.0.0-20191004110552-13f9640d40b9/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=\ngolang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=\ngolang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=\ngolang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=\ngolang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=\ngolang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=\ngolang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=\ngolang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=\ngolang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=\ngolang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=\ngolang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=\ngolang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=\ngolang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=\ngolang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=\ngolang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=\ngolang.org/x/net v0.0.0-20201006153459-a7d1128ccaa0/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=\ngolang.org/x/net v0.0.0-20201010224723-4f7140c49acb/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=\ngolang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=\ngolang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=\ngolang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=\ngolang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=\ngolang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=\ngolang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=\ngolang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=\ngolang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc=\ngolang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=\ngolang.org/x/net v0.0.0-20210410081132-afb366fc7cd1/go.mod h1:9tjilg8BloeKEkVJvy7fQ90B1CfIiPueXVOjqfkSzI8=\ngolang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=\ngolang.org/x/net v0.0.0-20210610132358-84b48f89b13b/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=\ngolang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=\ngolang.org/x/net v0.0.0-20210813160813-60bc85c4be6d h1:LO7XpTYMwTqxjLcGWPijK3vRXg1aWdlNOVOHRq45d7c=\ngolang.org/x/net v0.0.0-20210813160813-60bc85c4be6d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=\ngolang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=\ngolang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=\ngolang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=\ngolang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=\ngolang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=\ngolang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=\ngolang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=\ngolang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=\ngolang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=\ngolang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=\ngolang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=\ngolang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=\ngolang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=\ngolang.org/x/oauth2 v0.0.0-20210805134026-6f1e6394065a/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=\ngolang.org/x/oauth2 v0.0.0-20210819190943-2bc19b11175f/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=\ngolang.org/x/oauth2 v0.0.0-20211005180243-6b3c2da341f1/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=\ngolang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=\ngolang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=\ngolang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=\ngolang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=\ngolang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=\ngolang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=\ngolang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=\ngolang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=\ngolang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=\ngolang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=\ngolang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=\ngolang.org/x/sync v0.0.0-20210220032951-036812b2e83c h1:5KslGYwFpkhGh+Q16bwMP3cOontH8FOep7tGV86Y7SQ=\ngolang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=\ngolang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=\ngolang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=\ngolang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=\ngolang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=\ngolang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=\ngolang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=\ngolang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=\ngolang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=\ngolang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=\ngolang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20190514135907-3a4b5fb9f71f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20190522044717-8097e1b27ff5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20190602015325-4c4f7f33c9ed/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20190606203320-7fc4e5ec1444/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20190616124812-15dcb6c0061f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20190801041406-cbf593c0f2f3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20190812073006-9eafafc0a87e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20190922100055-0a153f010e69/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20190924154521-2837fb4f24fe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20191008105621-543471e840be/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20191022100944-742c48ecaeb7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20191115151921-52ab43148777/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20191210023423-ac6580df4449/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20200120151820-655fe14d7479/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20200124204421-9fbb57f87de9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20200217220822-9197077df867/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20200615200032-f1bc736245b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20200622214017-ed371f2e16b4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20200728102440-3e129f6d46b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20200817155316-9781c653f443/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20200909081042-eff7692f9009/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20200916030750-2334cc1a136f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20200922070232-aee5d888a860/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20201024232916-9f70ab9862d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20201112073958-5cba982894dd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20201117170446-d9b008d0a637/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20201202213521-69691e467435/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20210303074136-134d130e1a04/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20210324051608-47abb6519492/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20210403161142-5e06dd20ab57/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20210426230700-d19ff857e887/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=\ngolang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=\ngolang.org/x/sys v0.0.0-20210514084401-e8d321eab015/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=\ngolang.org/x/sys v0.0.0-20210603125802-9665404d3644/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=\ngolang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=\ngolang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=\ngolang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=\ngolang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=\ngolang.org/x/sys v0.0.0-20210816183151-1e6c022a8912/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=\ngolang.org/x/sys v0.0.0-20210823070655-63515b42dcdf/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=\ngolang.org/x/sys v0.0.0-20210908233432-aa78b53d3365/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=\ngolang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=\ngolang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=\ngolang.org/x/sys v0.0.0-20211124211545-fe61309f8881/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=\ngolang.org/x/sys v0.0.0-20211205182925-97ca703d548d/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=\ngolang.org/x/sys v0.0.0-20211210111614-af8b64212486/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=\ngolang.org/x/sys v0.0.0-20220111092808-5a964db01320 h1:0jf+tOCoZ3LyutmCOWpVni1chK4VfFLhRsDK7MhqGRY=\ngolang.org/x/sys v0.0.0-20220111092808-5a964db01320/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=\ngolang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=\ngolang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=\ngolang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=\ngolang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=\ngolang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=\ngolang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=\ngolang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=\ngolang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=\ngolang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=\ngolang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk=\ngolang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=\ngolang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=\ngolang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=\ngolang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=\ngolang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=\ngolang.org/x/time v0.0.0-20200630173020-3af7569d3a1e h1:EHBhcS0mlXEAVwNyO2dLfjToGsyY4j24pTs2ScHnX7s=\ngolang.org/x/time v0.0.0-20200630173020-3af7569d3a1e/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=\ngolang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=\ngolang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=\ngolang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=\ngolang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=\ngolang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=\ngolang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=\ngolang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=\ngolang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=\ngolang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=\ngolang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=\ngolang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=\ngolang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=\ngolang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=\ngolang.org/x/tools v0.0.0-20190614205625-5aca471b1d59/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=\ngolang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=\ngolang.org/x/tools v0.0.0-20190624222133-a101b041ded4/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=\ngolang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=\ngolang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=\ngolang.org/x/tools v0.0.0-20190907020128-2ca718005c18/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=\ngolang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=\ngolang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=\ngolang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=\ngolang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=\ngolang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=\ngolang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=\ngolang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=\ngolang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=\ngolang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=\ngolang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=\ngolang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=\ngolang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=\ngolang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=\ngolang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=\ngolang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=\ngolang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=\ngolang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=\ngolang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw=\ngolang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw=\ngolang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8=\ngolang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=\ngolang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=\ngolang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=\ngolang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=\ngolang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=\ngolang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=\ngolang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=\ngolang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=\ngolang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE=\ngolang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=\ngolang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=\ngolang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=\ngolang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=\ngolang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=\ngolang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0=\ngolang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=\ngolang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=\ngolang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=\ngolang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=\ngolang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=\ngolang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=\ngolang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=\ngolang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=\ngolang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE=\ngolang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=\ngoogle.golang.org/api v0.0.0-20160322025152-9bf6e6e569ff/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0=\ngoogle.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE=\ngoogle.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M=\ngoogle.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=\ngoogle.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=\ngoogle.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=\ngoogle.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=\ngoogle.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=\ngoogle.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=\ngoogle.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=\ngoogle.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=\ngoogle.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=\ngoogle.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=\ngoogle.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE=\ngoogle.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE=\ngoogle.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM=\ngoogle.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc=\ngoogle.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg=\ngoogle.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE=\ngoogle.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8=\ngoogle.golang.org/api v0.41.0/go.mod h1:RkxM5lITDfTzmyKFPt+wGrCJbVfniCr2ool8kTBzRTU=\ngoogle.golang.org/api v0.43.0/go.mod h1:nQsDGjRXMo4lvh5hP0TKqF244gqhGcr/YSIykhUk/94=\ngoogle.golang.org/api v0.47.0/go.mod h1:Wbvgpq1HddcWVtzsVLyfLp8lDg6AA241LmgIL59tHXo=\ngoogle.golang.org/api v0.48.0/go.mod h1:71Pr1vy+TAZRPkPs/xlCf5SsU8WjuAWv1Pfjbtukyy4=\ngoogle.golang.org/api v0.50.0/go.mod h1:4bNT5pAuq5ji4SRZm+5QIkjny9JAyVD/3gaSihNefaw=\ngoogle.golang.org/api v0.51.0/go.mod h1:t4HdrdoNgyN5cbEfm7Lum0lcLDLiise1F8qDKX00sOU=\ngoogle.golang.org/api v0.54.0/go.mod h1:7C4bFFOvVDGXjfDTAsgGwDgAxRDeQ4X8NvUedIt6z3k=\ngoogle.golang.org/api v0.55.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE=\ngoogle.golang.org/api v0.56.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE=\ngoogle.golang.org/api v0.57.0/go.mod h1:dVPlbZyBo2/OjBpmvNdpn2GRm6rPy75jyU7bmhdrMgI=\ngoogle.golang.org/api v0.59.0/go.mod h1:sT2boj7M9YJxZzgeZqXogmhfmRWDtPzT31xkieUbuZU=\ngoogle.golang.org/api v0.61.0/go.mod h1:xQRti5UdCmoCEqFxcz93fTl338AVqDgyaDRuOZ3hg9I=\ngoogle.golang.org/api v0.62.0/go.mod h1:dKmwPCydfsad4qCH08MSdgWjfHOyfpd4VtDGgRFdavw=\ngoogle.golang.org/api v0.63.0/go.mod h1:gs4ij2ffTRXwuzzgJl/56BdwJaA194ijkfn++9tDuPo=\ngoogle.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=\ngoogle.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=\ngoogle.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=\ngoogle.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0=\ngoogle.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=\ngoogle.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=\ngoogle.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=\ngoogle.golang.org/cloud v0.0.0-20151119220103-975617b05ea8/go.mod h1:0H1ncTHf11KCFhTc/+EFRbzSCOZx+VUbRMk55Yv5MYk=\ngoogle.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=\ngoogle.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=\ngoogle.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=\ngoogle.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=\ngoogle.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=\ngoogle.golang.org/genproto v0.0.0-20190522204451-c2c4e71fbf69/go.mod h1:z3L6/3dTEVtUr6QSP8miRzeRqwQOioJ9I66odjN4I7s=\ngoogle.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=\ngoogle.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=\ngoogle.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8=\ngoogle.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=\ngoogle.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=\ngoogle.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=\ngoogle.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=\ngoogle.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=\ngoogle.golang.org/genproto v0.0.0-20200117163144-32f20d992d24/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=\ngoogle.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=\ngoogle.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA=\ngoogle.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=\ngoogle.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=\ngoogle.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=\ngoogle.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=\ngoogle.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=\ngoogle.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=\ngoogle.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=\ngoogle.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=\ngoogle.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=\ngoogle.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U=\ngoogle.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo=\ngoogle.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA=\ngoogle.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=\ngoogle.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=\ngoogle.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=\ngoogle.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=\ngoogle.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=\ngoogle.golang.org/genproto v0.0.0-20201110150050-8816d57aaa9a/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=\ngoogle.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=\ngoogle.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=\ngoogle.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=\ngoogle.golang.org/genproto v0.0.0-20210222152913-aa3ee6e6a81c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=\ngoogle.golang.org/genproto v0.0.0-20210303154014-9728d6b83eeb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=\ngoogle.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=\ngoogle.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=\ngoogle.golang.org/genproto v0.0.0-20210402141018-6c239bbf2bb1/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A=\ngoogle.golang.org/genproto v0.0.0-20210513213006-bf773b8c8384/go.mod h1:P3QM42oQyzQSnHPnZ/vqoCdDmzH28fzWByN9asMeM8A=\ngoogle.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0=\ngoogle.golang.org/genproto v0.0.0-20210604141403-392c879c8b08/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0=\ngoogle.golang.org/genproto v0.0.0-20210608205507-b6d2f5bf0d7d/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0=\ngoogle.golang.org/genproto v0.0.0-20210624195500-8bfb893ecb84/go.mod h1:SzzZ/N+nwJDaO1kznhnlzqS8ocJICar6hYhVyhi++24=\ngoogle.golang.org/genproto v0.0.0-20210713002101-d411969a0d9a/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k=\ngoogle.golang.org/genproto v0.0.0-20210716133855-ce7ef5c701ea/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k=\ngoogle.golang.org/genproto v0.0.0-20210728212813-7823e685a01f/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48=\ngoogle.golang.org/genproto v0.0.0-20210805201207-89edb61ffb67/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48=\ngoogle.golang.org/genproto v0.0.0-20210813162853-db860fec028c/go.mod h1:cFeNkxwySK631ADgubI+/XFU/xp8FD5KIVV4rj8UC5w=\ngoogle.golang.org/genproto v0.0.0-20210821163610-241b8fcbd6c8/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY=\ngoogle.golang.org/genproto v0.0.0-20210828152312-66f60bf46e71/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY=\ngoogle.golang.org/genproto v0.0.0-20210831024726-fe130286e0e2/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY=\ngoogle.golang.org/genproto v0.0.0-20210903162649-d08c68adba83/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY=\ngoogle.golang.org/genproto v0.0.0-20210909211513-a8c4777a87af/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY=\ngoogle.golang.org/genproto v0.0.0-20210924002016-3dee208752a0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=\ngoogle.golang.org/genproto v0.0.0-20211008145708-270636b82663/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=\ngoogle.golang.org/genproto v0.0.0-20211028162531-8db9c33dc351/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=\ngoogle.golang.org/genproto v0.0.0-20211118181313-81c1377c94b1/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=\ngoogle.golang.org/genproto v0.0.0-20211129164237-f09f9a12af12/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=\ngoogle.golang.org/genproto v0.0.0-20211203200212-54befc351ae9/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=\ngoogle.golang.org/genproto v0.0.0-20211206160659-862468c7d6e0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=\ngoogle.golang.org/genproto v0.0.0-20211208223120-3a66f561d7aa h1:I0YcKz0I7OAhddo7ya8kMnvprhcWM045PmkBdMO9zN0=\ngoogle.golang.org/genproto v0.0.0-20211208223120-3a66f561d7aa/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=\ngoogle.golang.org/grpc v0.0.0-20160317175043-d3ddb4469d5a/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw=\ngoogle.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=\ngoogle.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=\ngoogle.golang.org/grpc v1.21.0/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=\ngoogle.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=\ngoogle.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=\ngoogle.golang.org/grpc v1.23.1/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=\ngoogle.golang.org/grpc v1.24.0/go.mod h1:XDChyiUovWa60DnaeDeZmSW86xtLtjtZbwvSiRnRtcA=\ngoogle.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY=\ngoogle.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=\ngoogle.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=\ngoogle.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=\ngoogle.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60=\ngoogle.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk=\ngoogle.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=\ngoogle.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=\ngoogle.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=\ngoogle.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0=\ngoogle.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc=\ngoogle.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8=\ngoogle.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU=\ngoogle.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU=\ngoogle.golang.org/grpc v1.36.1/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU=\ngoogle.golang.org/grpc v1.37.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM=\ngoogle.golang.org/grpc v1.37.1/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM=\ngoogle.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM=\ngoogle.golang.org/grpc v1.39.0/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE=\ngoogle.golang.org/grpc v1.39.1/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE=\ngoogle.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34=\ngoogle.golang.org/grpc v1.40.1/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34=\ngoogle.golang.org/grpc v1.42.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU=\ngoogle.golang.org/grpc v1.43.0 h1:Eeu7bZtDZ2DpRCsLhUlcrLnvYaMK1Gz86a+hMVvELmM=\ngoogle.golang.org/grpc v1.43.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU=\ngoogle.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw=\ngoogle.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=\ngoogle.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=\ngoogle.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=\ngoogle.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE=\ngoogle.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo=\ngoogle.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=\ngoogle.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=\ngoogle.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=\ngoogle.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4=\ngoogle.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c=\ngoogle.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=\ngoogle.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=\ngoogle.golang.org/protobuf v1.27.1 h1:SnqbnDw1V7RiZcXPx5MEeqPv2s79L9i7BJUlG/+RurQ=\ngoogle.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=\ngopkg.in/airbrake/gobrake.v2 v2.0.9/go.mod h1:/h5ZAUhDkGaJfjzjKLSjv6zCL6O0LLBxU4K+aSYdM/U=\ngopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=\ngopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=\ngopkg.in/check.v1 v1.0.0-20141024133853-64131543e789/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=\ngopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=\ngopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=\ngopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=\ngopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=\ngopkg.in/cheggaaa/pb.v1 v1.0.25/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw=\ngopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=\ngopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=\ngopkg.in/gemnasium/logrus-airbrake-hook.v2 v2.1.2/go.mod h1:Xk6kEKp8OKb+X14hQBKWaSkCsqBpgog8nAV2xsGOxlo=\ngopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw=\ngopkg.in/ini.v1 v1.66.2 h1:XfR1dOYubytKy4Shzc2LHrrGhU0lDCfDGG1yLPmpgsI=\ngopkg.in/ini.v1 v1.66.2/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=\ngopkg.in/natefinch/lumberjack.v2 v2.0.0 h1:1Lc07Kr7qY4U2YPouBjpCLxpiyxIVoxqXgkXLknAOE8=\ngopkg.in/natefinch/lumberjack.v2 v2.0.0/go.mod h1:l0ndWWf7gzL7RNwBG7wST/UCcT4T24xpD6X8LsfU/+k=\ngopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo=\ngopkg.in/square/go-jose.v2 v2.2.2/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI=\ngopkg.in/square/go-jose.v2 v2.3.1/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI=\ngopkg.in/square/go-jose.v2 v2.5.1/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI=\ngopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=\ngopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74=\ngopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=\ngopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=\ngopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=\ngopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=\ngopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=\ngopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=\ngopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=\ngopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=\ngopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=\ngopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=\ngopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=\ngopkg.in/yaml.v3 v3.0.0 h1:hjy8E9ON/egN1tAYqKb61G10WtihqetD4sz2H+8nIeA=\ngopkg.in/yaml.v3 v3.0.0/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=\ngotest.tools v2.2.0+incompatible h1:VsBPFP1AI068pPrMxtb/S8Zkgf9xEmTLJjfM+P5UIEo=\ngotest.tools v2.2.0+incompatible/go.mod h1:DsYFclhRJ6vuDpmuTbkuFWG+y2sxOXAzmJt81HFBacw=\ngotest.tools/v3 v3.0.2/go.mod h1:3SzNCllyD9/Y+b5r9JIKQ474KzkZyqLqEfYqMsX94Bk=\ngotest.tools/v3 v3.0.3/go.mod h1:Z7Lb0S5l+klDB31fvDQX8ss/FlKDxtlFlw3Oa8Ymbl8=\ngotest.tools/v3 v3.1.0 h1:rVV8Tcg/8jHUkPUorwjaMTtemIMVXfIPKiOqnhEhakk=\ngotest.tools/v3 v3.1.0/go.mod h1:fHy7eyTmJFO5bQbUsEGQ1v4m2J3Jz9eWL54TP2/ZuYQ=\nhonnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=\nhonnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=\nhonnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=\nhonnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=\nhonnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=\nhonnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=\nhonnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=\nk8s.io/api v0.20.1/go.mod h1:KqwcCVogGxQY3nBlRpwt+wpAMF/KjaCc7RpywacvqUo=\nk8s.io/api v0.20.4/go.mod h1:++lNL1AJMkDymriNniQsWRkMDzRaX2Y/POTUi8yvqYQ=\nk8s.io/api v0.20.6/go.mod h1:X9e8Qag6JV/bL5G6bU8sdVRltWKmdHsFUGS3eVndqE8=\nk8s.io/apimachinery v0.20.1/go.mod h1:WlLqWAHZGg07AeltaI0MV5uk1Omp8xaN0JGLY6gkRpU=\nk8s.io/apimachinery v0.20.4/go.mod h1:WlLqWAHZGg07AeltaI0MV5uk1Omp8xaN0JGLY6gkRpU=\nk8s.io/apimachinery v0.20.6/go.mod h1:ejZXtW1Ra6V1O5H8xPBGz+T3+4gfkTCeExAHKU57MAc=\nk8s.io/apiserver v0.20.1/go.mod h1:ro5QHeQkgMS7ZGpvf4tSMx6bBOgPfE+f52KwvXfScaU=\nk8s.io/apiserver v0.20.4/go.mod h1:Mc80thBKOyy7tbvFtB4kJv1kbdD0eIH8k8vianJcbFM=\nk8s.io/apiserver v0.20.6/go.mod h1:QIJXNt6i6JB+0YQRNcS0hdRHJlMhflFmsBDeSgT1r8Q=\nk8s.io/client-go v0.20.1/go.mod h1:/zcHdt1TeWSd5HoUe6elJmHSQ6uLLgp4bIJHVEuy+/Y=\nk8s.io/client-go v0.20.4/go.mod h1:LiMv25ND1gLUdBeYxBIwKpkSC5IsozMMmOOeSJboP+k=\nk8s.io/client-go v0.20.6/go.mod h1:nNQMnOvEUEsOzRRFIIkdmYOjAZrC8bgq0ExboWSU1I0=\nk8s.io/component-base v0.20.1/go.mod h1:guxkoJnNoh8LNrbtiQOlyp2Y2XFCZQmrcg2n/DeYNLk=\nk8s.io/component-base v0.20.4/go.mod h1:t4p9EdiagbVCJKrQ1RsA5/V4rFQNDfRlevJajlGwgjI=\nk8s.io/component-base v0.20.6/go.mod h1:6f1MPBAeI+mvuts3sIdtpjljHWBQ2cIy38oBIWMYnrM=\nk8s.io/cri-api v0.17.3/go.mod h1:X1sbHmuXhwaHs9xxYffLqJogVsnI+f6cPRcgPel7ywM=\nk8s.io/cri-api v0.20.1/go.mod h1:2JRbKt+BFLTjtrILYVqQK5jqhI+XNdF6UiGMgczeBCI=\nk8s.io/cri-api v0.20.4/go.mod h1:2JRbKt+BFLTjtrILYVqQK5jqhI+XNdF6UiGMgczeBCI=\nk8s.io/cri-api v0.20.6/go.mod h1:ew44AjNXwyn1s0U4xCKGodU7J1HzBeZ1MpGrpa5r8Yc=\nk8s.io/gengo v0.0.0-20200413195148-3a45101e95ac/go.mod h1:ezvh/TsK7cY6rbqRK0oQQ8IAqLxYwwyPxAX1Pzy0ii0=\nk8s.io/klog/v2 v2.0.0/go.mod h1:PBfzABfn139FHAV07az/IF9Wp1bkk3vpT2XSJ76fSDE=\nk8s.io/klog/v2 v2.4.0/go.mod h1:Od+F08eJP+W3HUb4pSrPpgp9DGU4GzlpG/TmITuYh/Y=\nk8s.io/kube-openapi v0.0.0-20201113171705-d219536bb9fd/go.mod h1:WOJ3KddDSol4tAGcJo0Tvi+dK12EcqSLqcWsryKMpfM=\nk8s.io/kubernetes v1.13.0/go.mod h1:ocZa8+6APFNC2tX1DZASIbocyYT5jHzqFVsY5aoB7Jk=\nk8s.io/utils v0.0.0-20201110183641-67b214c5f920/go.mod h1:jPW/WVKK9YHAvNhRxK0md/EJ228hCsBRufyofKtW8HA=\nrsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=\nrsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0=\nrsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA=\nsigs.k8s.io/apiserver-network-proxy/konnectivity-client v0.0.14/go.mod h1:LEScyzhFmoF5pso/YSeBstl57mOzx9xlU9n85RGrDQg=\nsigs.k8s.io/apiserver-network-proxy/konnectivity-client v0.0.15/go.mod h1:LEScyzhFmoF5pso/YSeBstl57mOzx9xlU9n85RGrDQg=\nsigs.k8s.io/structured-merge-diff/v4 v4.0.2/go.mod h1:bJZC9H9iH24zzfZ/41RGcq60oK1F7G282QMXDPYydCw=\nsigs.k8s.io/structured-merge-diff/v4 v4.0.3/go.mod h1:bJZC9H9iH24zzfZ/41RGcq60oK1F7G282QMXDPYydCw=\nsigs.k8s.io/yaml v1.1.0/go.mod h1:UJmg0vDUVViEyp3mgSv9WPwZCDxu4rQW1olrI1uml+o=\nsigs.k8s.io/yaml v1.2.0/go.mod h1:yfXDCHCao9+ENCvLSE62v9VSji2MKu5jeNfTrofGhJc=\n"
  },
  {
    "path": "mocks/AzureClient.go",
    "content": "// Code generated by mockery v2.14.0. DO NOT EDIT.\n\npackage mocks\n\nimport (\n\tcontext \"context\"\n\tio \"io\"\n\n\tcore \"github.com/LambdaTest/test-at-scale/pkg/core\"\n\n\tmock \"github.com/stretchr/testify/mock\"\n)\n\n// AzureClient is an autogenerated mock type for the AzureClient type\ntype AzureClient struct {\n\tmock.Mock\n}\n\n// Create provides a mock function with given fields: ctx, path, reader, mimeType\nfunc (_m *AzureClient) Create(ctx context.Context, path string, reader io.Reader, mimeType string) (string, error) {\n\tret := _m.Called(ctx, path, reader, mimeType)\n\n\tvar r0 string\n\tif rf, ok := ret.Get(0).(func(context.Context, string, io.Reader, string) string); ok {\n\t\tr0 = rf(ctx, path, reader, mimeType)\n\t} else {\n\t\tr0 = ret.Get(0).(string)\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(context.Context, string, io.Reader, string) error); ok {\n\t\tr1 = rf(ctx, path, reader, mimeType)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}\n\n// CreateUsingSASURL provides a mock function with given fields: ctx, sasURL, reader, mimeType\nfunc (_m *AzureClient) CreateUsingSASURL(ctx context.Context, sasURL string, reader io.Reader, mimeType string) (string, error) {\n\tret := _m.Called(ctx, sasURL, reader, mimeType)\n\n\tvar r0 string\n\tif rf, ok := ret.Get(0).(func(context.Context, string, io.Reader, string) string); ok {\n\t\tr0 = rf(ctx, sasURL, reader, mimeType)\n\t} else {\n\t\tr0 = ret.Get(0).(string)\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(context.Context, string, io.Reader, string) error); ok {\n\t\tr1 = rf(ctx, sasURL, reader, mimeType)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}\n\n// Exists provides a mock function with given fields: ctx, path\nfunc (_m *AzureClient) Exists(ctx context.Context, path string) (bool, error) {\n\tret := _m.Called(ctx, path)\n\n\tvar r0 bool\n\tif rf, ok := ret.Get(0).(func(context.Context, string) bool); ok {\n\t\tr0 = rf(ctx, path)\n\t} else {\n\t\tr0 = ret.Get(0).(bool)\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(context.Context, string) error); ok {\n\t\tr1 = rf(ctx, path)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}\n\n// Find provides a mock function with given fields: ctx, path\nfunc (_m *AzureClient) Find(ctx context.Context, path string) (io.ReadCloser, error) {\n\tret := _m.Called(ctx, path)\n\n\tvar r0 io.ReadCloser\n\tif rf, ok := ret.Get(0).(func(context.Context, string) io.ReadCloser); ok {\n\t\tr0 = rf(ctx, path)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(io.ReadCloser)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(context.Context, string) error); ok {\n\t\tr1 = rf(ctx, path)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}\n\n// FindUsingSASUrl provides a mock function with given fields: ctx, sasURL\nfunc (_m *AzureClient) FindUsingSASUrl(ctx context.Context, sasURL string) (io.ReadCloser, error) {\n\tret := _m.Called(ctx, sasURL)\n\n\tvar r0 io.ReadCloser\n\tif rf, ok := ret.Get(0).(func(context.Context, string) io.ReadCloser); ok {\n\t\tr0 = rf(ctx, sasURL)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(io.ReadCloser)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(context.Context, string) error); ok {\n\t\tr1 = rf(ctx, sasURL)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}\n\n// GetSASURL provides a mock function with given fields: ctx, purpose, query\nfunc (_m *AzureClient) GetSASURL(ctx context.Context, purpose core.SASURLPurpose, query map[string]interface{}) (string, error) {\n\tret := _m.Called(ctx, purpose, query)\n\n\tvar r0 string\n\tif rf, ok := ret.Get(0).(func(context.Context, core.SASURLPurpose, map[string]interface{}) string); ok {\n\t\tr0 = rf(ctx, purpose, query)\n\t} else {\n\t\tr0 = ret.Get(0).(string)\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(context.Context, core.SASURLPurpose, map[string]interface{}) error); ok {\n\t\tr1 = rf(ctx, purpose, query)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}\n\ntype mockConstructorTestingTNewAzureClient interface {\n\tmock.TestingT\n\tCleanup(func())\n}\n\n// NewAzureClient creates a new instance of AzureClient. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.\nfunc NewAzureClient(t mockConstructorTestingTNewAzureClient) *AzureClient {\n\tmock := &AzureClient{}\n\tmock.Mock.Test(t)\n\n\tt.Cleanup(func() { mock.AssertExpectations(t) })\n\n\treturn mock\n}\n"
  },
  {
    "path": "mocks/BlockTestService.go",
    "content": "// Code generated by mockery v2.14.0. DO NOT EDIT.\n\npackage mocks\n\nimport (\n\tcontext \"context\"\n\n\tmock \"github.com/stretchr/testify/mock\"\n)\n\n// BlockTestService is an autogenerated mock type for the BlockTestService type\ntype BlockTestService struct {\n\tmock.Mock\n}\n\n// GetBlockTests provides a mock function with given fields: ctx, blocklistYAML, branch\nfunc (_m *BlockTestService) GetBlockTests(ctx context.Context, blocklistYAML []string, branch string) error {\n\tret := _m.Called(ctx, blocklistYAML, branch)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, []string, string) error); ok {\n\t\tr0 = rf(ctx, blocklistYAML, branch)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}\n\ntype mockConstructorTestingTNewBlockTestService interface {\n\tmock.TestingT\n\tCleanup(func())\n}\n\n// NewBlockTestService creates a new instance of BlockTestService. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.\nfunc NewBlockTestService(t mockConstructorTestingTNewBlockTestService) *BlockTestService {\n\tmock := &BlockTestService{}\n\tmock.Mock.Test(t)\n\n\tt.Cleanup(func() { mock.AssertExpectations(t) })\n\n\treturn mock\n}\n"
  },
  {
    "path": "mocks/Builder.go",
    "content": "// Code generated by mockery v2.14.0. DO NOT EDIT.\n\npackage mocks\n\nimport (\n\tcore \"github.com/LambdaTest/test-at-scale/pkg/core\"\n\tmock \"github.com/stretchr/testify/mock\"\n)\n\n// Builder is an autogenerated mock type for the Builder type\ntype Builder struct {\n\tmock.Mock\n}\n\n// GetDriver provides a mock function with given fields: version\nfunc (_m *Builder) GetDriver(version int) (core.Driver, error) {\n\tret := _m.Called(version)\n\n\tvar r0 core.Driver\n\tif rf, ok := ret.Get(0).(func(int) core.Driver); ok {\n\t\tr0 = rf(version)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(core.Driver)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(int) error); ok {\n\t\tr1 = rf(version)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}\n\ntype mockConstructorTestingTNewBuilder interface {\n\tmock.TestingT\n\tCleanup(func())\n}\n\n// NewBuilder creates a new instance of Builder. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.\nfunc NewBuilder(t mockConstructorTestingTNewBuilder) *Builder {\n\tmock := &Builder{}\n\tmock.Mock.Test(t)\n\n\tt.Cleanup(func() { mock.AssertExpectations(t) })\n\n\treturn mock\n}\n"
  },
  {
    "path": "mocks/CacheStore.go",
    "content": "// Code generated by mockery v2.14.0. DO NOT EDIT.\n\npackage mocks\n\nimport (\n\tcontext \"context\"\n\n\tmock \"github.com/stretchr/testify/mock\"\n)\n\n// CacheStore is an autogenerated mock type for the CacheStore type\ntype CacheStore struct {\n\tmock.Mock\n}\n\n// CacheWorkspace provides a mock function with given fields: ctx, subModule\nfunc (_m *CacheStore) CacheWorkspace(ctx context.Context, subModule string) error {\n\tret := _m.Called(ctx, subModule)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string) error); ok {\n\t\tr0 = rf(ctx, subModule)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}\n\n// Download provides a mock function with given fields: ctx, cacheKey\nfunc (_m *CacheStore) Download(ctx context.Context, cacheKey string) error {\n\tret := _m.Called(ctx, cacheKey)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string) error); ok {\n\t\tr0 = rf(ctx, cacheKey)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}\n\n// ExtractWorkspace provides a mock function with given fields: ctx, subModule\nfunc (_m *CacheStore) ExtractWorkspace(ctx context.Context, subModule string) error {\n\tret := _m.Called(ctx, subModule)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string) error); ok {\n\t\tr0 = rf(ctx, subModule)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}\n\n// Upload provides a mock function with given fields: ctx, cacheKey, itemsToCompress\nfunc (_m *CacheStore) Upload(ctx context.Context, cacheKey string, itemsToCompress ...string) error {\n\t_va := make([]interface{}, len(itemsToCompress))\n\tfor _i := range itemsToCompress {\n\t\t_va[_i] = itemsToCompress[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, cacheKey)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, ...string) error); ok {\n\t\tr0 = rf(ctx, cacheKey, itemsToCompress...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}\n\ntype mockConstructorTestingTNewCacheStore interface {\n\tmock.TestingT\n\tCleanup(func())\n}\n\n// NewCacheStore creates a new instance of CacheStore. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.\nfunc NewCacheStore(t mockConstructorTestingTNewCacheStore) *CacheStore {\n\tmock := &CacheStore{}\n\tmock.Mock.Test(t)\n\n\tt.Cleanup(func() { mock.AssertExpectations(t) })\n\n\treturn mock\n}\n"
  },
  {
    "path": "mocks/CoverageService.go",
    "content": "// Code generated by mockery v2.14.0. DO NOT EDIT.\n\npackage mocks\n\nimport (\n\tcontext \"context\"\n\n\tcore \"github.com/LambdaTest/test-at-scale/pkg/core\"\n\tmock \"github.com/stretchr/testify/mock\"\n)\n\n// CoverageService is an autogenerated mock type for the CoverageService type\ntype CoverageService struct {\n\tmock.Mock\n}\n\n// MergeAndUpload provides a mock function with given fields: ctx, payload\nfunc (_m *CoverageService) MergeAndUpload(ctx context.Context, payload *core.Payload) error {\n\tret := _m.Called(ctx, payload)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, *core.Payload) error); ok {\n\t\tr0 = rf(ctx, payload)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}\n\ntype mockConstructorTestingTNewCoverageService interface {\n\tmock.TestingT\n\tCleanup(func())\n}\n\n// NewCoverageService creates a new instance of CoverageService. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.\nfunc NewCoverageService(t mockConstructorTestingTNewCoverageService) *CoverageService {\n\tmock := &CoverageService{}\n\tmock.Mock.Test(t)\n\n\tt.Cleanup(func() { mock.AssertExpectations(t) })\n\n\treturn mock\n}\n"
  },
  {
    "path": "mocks/DiffManager.go",
    "content": "// Code generated by mockery v2.14.0. DO NOT EDIT.\n\npackage mocks\n\nimport (\n\tcontext \"context\"\n\n\tcore \"github.com/LambdaTest/test-at-scale/pkg/core\"\n\tmock \"github.com/stretchr/testify/mock\"\n)\n\n// DiffManager is an autogenerated mock type for the DiffManager type\ntype DiffManager struct {\n\tmock.Mock\n}\n\n// GetChangedFiles provides a mock function with given fields: ctx, payload, oauth\nfunc (_m *DiffManager) GetChangedFiles(ctx context.Context, payload *core.Payload, oauth *core.Oauth) (map[string]int, error) {\n\tret := _m.Called(ctx, payload, oauth)\n\n\tvar r0 map[string]int\n\tif rf, ok := ret.Get(0).(func(context.Context, *core.Payload, *core.Oauth) map[string]int); ok {\n\t\tr0 = rf(ctx, payload, oauth)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(map[string]int)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(context.Context, *core.Payload, *core.Oauth) error); ok {\n\t\tr1 = rf(ctx, payload, oauth)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}\n\ntype mockConstructorTestingTNewDiffManager interface {\n\tmock.TestingT\n\tCleanup(func())\n}\n\n// NewDiffManager creates a new instance of DiffManager. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.\nfunc NewDiffManager(t mockConstructorTestingTNewDiffManager) *DiffManager {\n\tmock := &DiffManager{}\n\tmock.Mock.Test(t)\n\n\tt.Cleanup(func() { mock.AssertExpectations(t) })\n\n\treturn mock\n}\n"
  },
  {
    "path": "mocks/DockerRunner.go",
    "content": "// Code generated by mockery v2.14.0. DO NOT EDIT.\n\npackage mocks\n\nimport (\n\tcontext \"context\"\n\n\tcore \"github.com/LambdaTest/test-at-scale/pkg/core\"\n\tmock \"github.com/stretchr/testify/mock\"\n)\n\n// DockerRunner is an autogenerated mock type for the DockerRunner type\ntype DockerRunner struct {\n\tmock.Mock\n}\n\n// Create provides a mock function with given fields: _a0, _a1\nfunc (_m *DockerRunner) Create(_a0 context.Context, _a1 *core.RunnerOptions) core.ContainerStatus {\n\tret := _m.Called(_a0, _a1)\n\n\tvar r0 core.ContainerStatus\n\tif rf, ok := ret.Get(0).(func(context.Context, *core.RunnerOptions) core.ContainerStatus); ok {\n\t\tr0 = rf(_a0, _a1)\n\t} else {\n\t\tr0 = ret.Get(0).(core.ContainerStatus)\n\t}\n\n\treturn r0\n}\n\n// Destroy provides a mock function with given fields: ctx, r\nfunc (_m *DockerRunner) Destroy(ctx context.Context, r *core.RunnerOptions) error {\n\tret := _m.Called(ctx, r)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, *core.RunnerOptions) error); ok {\n\t\tr0 = rf(ctx, r)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}\n\n// GetInfo provides a mock function with given fields: _a0\nfunc (_m *DockerRunner) GetInfo(_a0 context.Context) (float32, int64) {\n\tret := _m.Called(_a0)\n\n\tvar r0 float32\n\tif rf, ok := ret.Get(0).(func(context.Context) float32); ok {\n\t\tr0 = rf(_a0)\n\t} else {\n\t\tr0 = ret.Get(0).(float32)\n\t}\n\n\tvar r1 int64\n\tif rf, ok := ret.Get(1).(func(context.Context) int64); ok {\n\t\tr1 = rf(_a0)\n\t} else {\n\t\tr1 = ret.Get(1).(int64)\n\t}\n\n\treturn r0, r1\n}\n\n// Initiate provides a mock function with given fields: _a0, _a1, _a2\nfunc (_m *DockerRunner) Initiate(_a0 context.Context, _a1 *core.RunnerOptions, _a2 chan core.ContainerStatus) {\n\t_m.Called(_a0, _a1, _a2)\n}\n\n// KillRunningDocker provides a mock function with given fields: ctx\nfunc (_m *DockerRunner) KillRunningDocker(ctx context.Context) {\n\t_m.Called(ctx)\n}\n\n// PullImage provides a mock function with given fields: containerImageConfig, r\nfunc (_m *DockerRunner) PullImage(containerImageConfig *core.ContainerImageConfig, r *core.RunnerOptions) error {\n\tret := _m.Called(containerImageConfig, r)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(*core.ContainerImageConfig, *core.RunnerOptions) error); ok {\n\t\tr0 = rf(containerImageConfig, r)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}\n\n// Run provides a mock function with given fields: _a0, _a1\nfunc (_m *DockerRunner) Run(_a0 context.Context, _a1 *core.RunnerOptions) core.ContainerStatus {\n\tret := _m.Called(_a0, _a1)\n\n\tvar r0 core.ContainerStatus\n\tif rf, ok := ret.Get(0).(func(context.Context, *core.RunnerOptions) core.ContainerStatus); ok {\n\t\tr0 = rf(_a0, _a1)\n\t} else {\n\t\tr0 = ret.Get(0).(core.ContainerStatus)\n\t}\n\n\treturn r0\n}\n\n// WaitForCompletion provides a mock function with given fields: ctx, r\nfunc (_m *DockerRunner) WaitForCompletion(ctx context.Context, r *core.RunnerOptions) error {\n\tret := _m.Called(ctx, r)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, *core.RunnerOptions) error); ok {\n\t\tr0 = rf(ctx, r)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}\n\ntype mockConstructorTestingTNewDockerRunner interface {\n\tmock.TestingT\n\tCleanup(func())\n}\n\n// NewDockerRunner creates a new instance of DockerRunner. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.\nfunc NewDockerRunner(t mockConstructorTestingTNewDockerRunner) *DockerRunner {\n\tmock := &DockerRunner{}\n\tmock.Mock.Test(t)\n\n\tt.Cleanup(func() { mock.AssertExpectations(t) })\n\n\treturn mock\n}\n"
  },
  {
    "path": "mocks/Driver.go",
    "content": "// Code generated by mockery v2.14.0. DO NOT EDIT.\n\npackage mocks\n\nimport (\n\tcontext \"context\"\n\n\tcore \"github.com/LambdaTest/test-at-scale/pkg/core\"\n\tmock \"github.com/stretchr/testify/mock\"\n)\n\n// Driver is an autogenerated mock type for the Driver type\ntype Driver struct {\n\tmock.Mock\n}\n\n// RunDiscovery provides a mock function with given fields: ctx, payload, taskPayload, oauth, coverageDir, secretMap\nfunc (_m *Driver) RunDiscovery(ctx context.Context, payload *core.Payload, taskPayload *core.TaskPayload, oauth *core.Oauth, coverageDir string, secretMap map[string]string) error {\n\tret := _m.Called(ctx, payload, taskPayload, oauth, coverageDir, secretMap)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, *core.Payload, *core.TaskPayload, *core.Oauth, string, map[string]string) error); ok {\n\t\tr0 = rf(ctx, payload, taskPayload, oauth, coverageDir, secretMap)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}\n\n// RunExecution provides a mock function with given fields: ctx, payload, taskPayload, oauth, coverageDir, secretMap\nfunc (_m *Driver) RunExecution(ctx context.Context, payload *core.Payload, taskPayload *core.TaskPayload, oauth *core.Oauth, coverageDir string, secretMap map[string]string) error {\n\tret := _m.Called(ctx, payload, taskPayload, oauth, coverageDir, secretMap)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, *core.Payload, *core.TaskPayload, *core.Oauth, string, map[string]string) error); ok {\n\t\tr0 = rf(ctx, payload, taskPayload, oauth, coverageDir, secretMap)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}\n\ntype mockConstructorTestingTNewDriver interface {\n\tmock.TestingT\n\tCleanup(func())\n}\n\n// NewDriver creates a new instance of Driver. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.\nfunc NewDriver(t mockConstructorTestingTNewDriver) *Driver {\n\tmock := &Driver{}\n\tmock.Mock.Test(t)\n\n\tt.Cleanup(func() { mock.AssertExpectations(t) })\n\n\treturn mock\n}\n"
  },
  {
    "path": "mocks/ExecutionManager.go",
    "content": "// Code generated by mockery v2.14.0. DO NOT EDIT.\n\npackage mocks\n\nimport (\n\tcontext \"context\"\n\n\tcore \"github.com/LambdaTest/test-at-scale/pkg/core\"\n\tmock \"github.com/stretchr/testify/mock\"\n)\n\n// ExecutionManager is an autogenerated mock type for the ExecutionManager type\ntype ExecutionManager struct {\n\tmock.Mock\n}\n\n// ExecuteInternalCommands provides a mock function with given fields: ctx, commandType, commands, cwd, envMap, secretData\nfunc (_m *ExecutionManager) ExecuteInternalCommands(ctx context.Context, commandType core.CommandType, commands []string, cwd string, envMap map[string]string, secretData map[string]string) error {\n\tret := _m.Called(ctx, commandType, commands, cwd, envMap, secretData)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, core.CommandType, []string, string, map[string]string, map[string]string) error); ok {\n\t\tr0 = rf(ctx, commandType, commands, cwd, envMap, secretData)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}\n\n// ExecuteUserCommands provides a mock function with given fields: ctx, commandType, payload, runConfig, secretData, logwriter, cwd\nfunc (_m *ExecutionManager) ExecuteUserCommands(ctx context.Context, commandType core.CommandType, payload *core.Payload, runConfig *core.Run, secretData map[string]string, logwriter core.LogWriterStrategy, cwd string) error {\n\tret := _m.Called(ctx, commandType, payload, runConfig, secretData, logwriter, cwd)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, core.CommandType, *core.Payload, *core.Run, map[string]string, core.LogWriterStrategy, string) error); ok {\n\t\tr0 = rf(ctx, commandType, payload, runConfig, secretData, logwriter, cwd)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}\n\n// GetEnvVariables provides a mock function with given fields: envMap, secretData\nfunc (_m *ExecutionManager) GetEnvVariables(envMap map[string]string, secretData map[string]string) ([]string, error) {\n\tret := _m.Called(envMap, secretData)\n\n\tvar r0 []string\n\tif rf, ok := ret.Get(0).(func(map[string]string, map[string]string) []string); ok {\n\t\tr0 = rf(envMap, secretData)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).([]string)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(map[string]string, map[string]string) error); ok {\n\t\tr1 = rf(envMap, secretData)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}\n\ntype mockConstructorTestingTNewExecutionManager interface {\n\tmock.TestingT\n\tCleanup(func())\n}\n\n// NewExecutionManager creates a new instance of ExecutionManager. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.\nfunc NewExecutionManager(t mockConstructorTestingTNewExecutionManager) *ExecutionManager {\n\tmock := &ExecutionManager{}\n\tmock.Mock.Test(t)\n\n\tt.Cleanup(func() { mock.AssertExpectations(t) })\n\n\treturn mock\n}\n"
  },
  {
    "path": "mocks/GitManager.go",
    "content": "// Code generated by mockery v2.14.0. DO NOT EDIT.\n\npackage mocks\n\nimport (\n\tcontext \"context\"\n\n\tcore \"github.com/LambdaTest/test-at-scale/pkg/core\"\n\tmock \"github.com/stretchr/testify/mock\"\n)\n\n// GitManager is an autogenerated mock type for the GitManager type\ntype GitManager struct {\n\tmock.Mock\n}\n\n// Clone provides a mock function with given fields: ctx, payload, oauth\nfunc (_m *GitManager) Clone(ctx context.Context, payload *core.Payload, oauth *core.Oauth) error {\n\tret := _m.Called(ctx, payload, oauth)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, *core.Payload, *core.Oauth) error); ok {\n\t\tr0 = rf(ctx, payload, oauth)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}\n\ntype mockConstructorTestingTNewGitManager interface {\n\tmock.TestingT\n\tCleanup(func())\n}\n\n// NewGitManager creates a new instance of GitManager. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.\nfunc NewGitManager(t mockConstructorTestingTNewGitManager) *GitManager {\n\tmock := &GitManager{}\n\tmock.Mock.Test(t)\n\n\tt.Cleanup(func() { mock.AssertExpectations(t) })\n\n\treturn mock\n}\n"
  },
  {
    "path": "mocks/ListSubModuleService.go",
    "content": "// Code generated by mockery v2.14.0. DO NOT EDIT.\n\npackage mocks\n\nimport (\n\tcontext \"context\"\n\n\tmock \"github.com/stretchr/testify/mock\"\n)\n\n// ListSubModuleService is an autogenerated mock type for the ListSubModuleService type\ntype ListSubModuleService struct {\n\tmock.Mock\n}\n\n// Send provides a mock function with given fields: ctx, buildID, totalSubmodule\nfunc (_m *ListSubModuleService) Send(ctx context.Context, buildID string, totalSubmodule int) error {\n\tret := _m.Called(ctx, buildID, totalSubmodule)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, int) error); ok {\n\t\tr0 = rf(ctx, buildID, totalSubmodule)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}\n\ntype mockConstructorTestingTNewListSubModuleService interface {\n\tmock.TestingT\n\tCleanup(func())\n}\n\n// NewListSubModuleService creates a new instance of ListSubModuleService. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.\nfunc NewListSubModuleService(t mockConstructorTestingTNewListSubModuleService) *ListSubModuleService {\n\tmock := &ListSubModuleService{}\n\tmock.Mock.Test(t)\n\n\tt.Cleanup(func() { mock.AssertExpectations(t) })\n\n\treturn mock\n}\n"
  },
  {
    "path": "mocks/LogWriterStrategy.go",
    "content": "// Code generated by mockery v2.14.0. DO NOT EDIT.\n\npackage mocks\n\nimport (\n\tcontext \"context\"\n\n\tio \"io\"\n\n\tmock \"github.com/stretchr/testify/mock\"\n)\n\n// LogWriterStrategy is an autogenerated mock type for the LogWriterStrategy type\ntype LogWriterStrategy struct {\n\tmock.Mock\n}\n\n// Write provides a mock function with given fields: ctx, reader\nfunc (_m *LogWriterStrategy) Write(ctx context.Context, reader io.Reader) <-chan error {\n\tret := _m.Called(ctx, reader)\n\n\tvar r0 <-chan error\n\tif rf, ok := ret.Get(0).(func(context.Context, io.Reader) <-chan error); ok {\n\t\tr0 = rf(ctx, reader)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(<-chan error)\n\t\t}\n\t}\n\n\treturn r0\n}\n\ntype mockConstructorTestingTNewLogWriterStrategy interface {\n\tmock.TestingT\n\tCleanup(func())\n}\n\n// NewLogWriterStrategy creates a new instance of LogWriterStrategy. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.\nfunc NewLogWriterStrategy(t mockConstructorTestingTNewLogWriterStrategy) *LogWriterStrategy {\n\tmock := &LogWriterStrategy{}\n\tmock.Mock.Test(t)\n\n\tt.Cleanup(func() { mock.AssertExpectations(t) })\n\n\treturn mock\n}\n"
  },
  {
    "path": "mocks/Logger.go",
    "content": "// Code generated by mockery v2.14.0. DO NOT EDIT.\n\npackage mocks\n\nimport (\n\tlumber \"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n\tmock \"github.com/stretchr/testify/mock\"\n)\n\n// Logger is an autogenerated mock type for the Logger type\ntype Logger struct {\n\tmock.Mock\n}\n\n// Debugf provides a mock function with given fields: format, args\nfunc (_m *Logger) Debugf(format string, args ...interface{}) {\n\tvar _ca []interface{}\n\t_ca = append(_ca, format)\n\t_ca = append(_ca, args...)\n\t_m.Called(_ca...)\n}\n\n// Errorf provides a mock function with given fields: format, args\nfunc (_m *Logger) Errorf(format string, args ...interface{}) {\n\tvar _ca []interface{}\n\t_ca = append(_ca, format)\n\t_ca = append(_ca, args...)\n\t_m.Called(_ca...)\n}\n\n// Fatalf provides a mock function with given fields: format, args\nfunc (_m *Logger) Fatalf(format string, args ...interface{}) {\n\tvar _ca []interface{}\n\t_ca = append(_ca, format)\n\t_ca = append(_ca, args...)\n\t_m.Called(_ca...)\n}\n\n// Infof provides a mock function with given fields: format, args\nfunc (_m *Logger) Infof(format string, args ...interface{}) {\n\tvar _ca []interface{}\n\t_ca = append(_ca, format)\n\t_ca = append(_ca, args...)\n\t_m.Called(_ca...)\n}\n\n// Panicf provides a mock function with given fields: format, args\nfunc (_m *Logger) Panicf(format string, args ...interface{}) {\n\tvar _ca []interface{}\n\t_ca = append(_ca, format)\n\t_ca = append(_ca, args...)\n\t_m.Called(_ca...)\n}\n\n// Warnf provides a mock function with given fields: format, args\nfunc (_m *Logger) Warnf(format string, args ...interface{}) {\n\tvar _ca []interface{}\n\t_ca = append(_ca, format)\n\t_ca = append(_ca, args...)\n\t_m.Called(_ca...)\n}\n\n// WithFields provides a mock function with given fields: keyValues\nfunc (_m *Logger) WithFields(keyValues lumber.Fields) lumber.Logger {\n\tret := _m.Called(keyValues)\n\n\tvar r0 lumber.Logger\n\tif rf, ok := ret.Get(0).(func(lumber.Fields) lumber.Logger); ok {\n\t\tr0 = rf(keyValues)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(lumber.Logger)\n\t\t}\n\t}\n\n\treturn r0\n}\n\ntype mockConstructorTestingTNewLogger interface {\n\tmock.TestingT\n\tCleanup(func())\n}\n\n// NewLogger creates a new instance of Logger. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.\nfunc NewLogger(t mockConstructorTestingTNewLogger) *Logger {\n\tmock := &Logger{}\n\tmock.Mock.Test(t)\n\n\tt.Cleanup(func() { mock.AssertExpectations(t) })\n\n\treturn mock\n}\n"
  },
  {
    "path": "mocks/PayloadManager.go",
    "content": "// Code generated by mockery v2.14.0. DO NOT EDIT.\n\npackage mocks\n\nimport (\n\tcontext \"context\"\n\n\tcore \"github.com/LambdaTest/test-at-scale/pkg/core\"\n\tmock \"github.com/stretchr/testify/mock\"\n)\n\n// PayloadManager is an autogenerated mock type for the PayloadManager type\ntype PayloadManager struct {\n\tmock.Mock\n}\n\n// FetchPayload provides a mock function with given fields: ctx, payloadAddress\nfunc (_m *PayloadManager) FetchPayload(ctx context.Context, payloadAddress string) (*core.Payload, error) {\n\tret := _m.Called(ctx, payloadAddress)\n\n\tvar r0 *core.Payload\n\tif rf, ok := ret.Get(0).(func(context.Context, string) *core.Payload); ok {\n\t\tr0 = rf(ctx, payloadAddress)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*core.Payload)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(context.Context, string) error); ok {\n\t\tr1 = rf(ctx, payloadAddress)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}\n\n// ValidatePayload provides a mock function with given fields: ctx, payload\nfunc (_m *PayloadManager) ValidatePayload(ctx context.Context, payload *core.Payload) error {\n\tret := _m.Called(ctx, payload)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, *core.Payload) error); ok {\n\t\tr0 = rf(ctx, payload)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}\n\ntype mockConstructorTestingTNewPayloadManager interface {\n\tmock.TestingT\n\tCleanup(func())\n}\n\n// NewPayloadManager creates a new instance of PayloadManager. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.\nfunc NewPayloadManager(t mockConstructorTestingTNewPayloadManager) *PayloadManager {\n\tmock := &PayloadManager{}\n\tmock.Mock.Test(t)\n\n\tt.Cleanup(func() { mock.AssertExpectations(t) })\n\n\treturn mock\n}\n"
  },
  {
    "path": "mocks/Requests.go",
    "content": "// Code generated by mockery v2.14.0. DO NOT EDIT.\n\npackage mocks\n\nimport (\n\tcontext \"context\"\n\n\tmock \"github.com/stretchr/testify/mock\"\n)\n\n// Requests is an autogenerated mock type for the Requests type\ntype Requests struct {\n\tmock.Mock\n}\n\n// MakeAPIRequest provides a mock function with given fields: ctx, httpMethod, endpoint, body, params, headers\nfunc (_m *Requests) MakeAPIRequest(ctx context.Context, httpMethod string, endpoint string, body []byte, params map[string]interface{}, headers map[string]string) ([]byte, int, error) {\n\tret := _m.Called(ctx, httpMethod, endpoint, body, params, headers)\n\n\tvar r0 []byte\n\tif rf, ok := ret.Get(0).(func(context.Context, string, string, []byte, map[string]interface{}, map[string]string) []byte); ok {\n\t\tr0 = rf(ctx, httpMethod, endpoint, body, params, headers)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).([]byte)\n\t\t}\n\t}\n\n\tvar r1 int\n\tif rf, ok := ret.Get(1).(func(context.Context, string, string, []byte, map[string]interface{}, map[string]string) int); ok {\n\t\tr1 = rf(ctx, httpMethod, endpoint, body, params, headers)\n\t} else {\n\t\tr1 = ret.Get(1).(int)\n\t}\n\n\tvar r2 error\n\tif rf, ok := ret.Get(2).(func(context.Context, string, string, []byte, map[string]interface{}, map[string]string) error); ok {\n\t\tr2 = rf(ctx, httpMethod, endpoint, body, params, headers)\n\t} else {\n\t\tr2 = ret.Error(2)\n\t}\n\n\treturn r0, r1, r2\n}\n\ntype mockConstructorTestingTNewRequests interface {\n\tmock.TestingT\n\tCleanup(func())\n}\n\n// NewRequests creates a new instance of Requests. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.\nfunc NewRequests(t mockConstructorTestingTNewRequests) *Requests {\n\tmock := &Requests{}\n\tmock.Mock.Test(t)\n\n\tt.Cleanup(func() { mock.AssertExpectations(t) })\n\n\treturn mock\n}\n"
  },
  {
    "path": "mocks/SecretParser.go",
    "content": "// Code generated by mockery v2.14.0. DO NOT EDIT.\n\npackage mocks\n\nimport (\n\tcore \"github.com/LambdaTest/test-at-scale/pkg/core\"\n\tmock \"github.com/stretchr/testify/mock\"\n)\n\n// SecretParser is an autogenerated mock type for the SecretParser type\ntype SecretParser struct {\n\tmock.Mock\n}\n\n// Expired provides a mock function with given fields: token\nfunc (_m *SecretParser) Expired(token *core.Oauth) bool {\n\tret := _m.Called(token)\n\n\tvar r0 bool\n\tif rf, ok := ret.Get(0).(func(*core.Oauth) bool); ok {\n\t\tr0 = rf(token)\n\t} else {\n\t\tr0 = ret.Get(0).(bool)\n\t}\n\n\treturn r0\n}\n\n// GetOauthSecret provides a mock function with given fields: filepath\nfunc (_m *SecretParser) GetOauthSecret(filepath string) (*core.Oauth, error) {\n\tret := _m.Called(filepath)\n\n\tvar r0 *core.Oauth\n\tif rf, ok := ret.Get(0).(func(string) *core.Oauth); ok {\n\t\tr0 = rf(filepath)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*core.Oauth)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(string) error); ok {\n\t\tr1 = rf(filepath)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}\n\n// GetRepoSecret provides a mock function with given fields: _a0\nfunc (_m *SecretParser) GetRepoSecret(_a0 string) (map[string]string, error) {\n\tret := _m.Called(_a0)\n\n\tvar r0 map[string]string\n\tif rf, ok := ret.Get(0).(func(string) map[string]string); ok {\n\t\tr0 = rf(_a0)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(map[string]string)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(string) error); ok {\n\t\tr1 = rf(_a0)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}\n\n// SubstituteSecret provides a mock function with given fields: command, secretData\nfunc (_m *SecretParser) SubstituteSecret(command string, secretData map[string]string) (string, error) {\n\tret := _m.Called(command, secretData)\n\n\tvar r0 string\n\tif rf, ok := ret.Get(0).(func(string, map[string]string) string); ok {\n\t\tr0 = rf(command, secretData)\n\t} else {\n\t\tr0 = ret.Get(0).(string)\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(string, map[string]string) error); ok {\n\t\tr1 = rf(command, secretData)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}\n\ntype mockConstructorTestingTNewSecretParser interface {\n\tmock.TestingT\n\tCleanup(func())\n}\n\n// NewSecretParser creates a new instance of SecretParser. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.\nfunc NewSecretParser(t mockConstructorTestingTNewSecretParser) *SecretParser {\n\tmock := &SecretParser{}\n\tmock.Mock.Test(t)\n\n\tt.Cleanup(func() { mock.AssertExpectations(t) })\n\n\treturn mock\n}\n"
  },
  {
    "path": "mocks/SecretsManager.go",
    "content": "// Code generated by mockery v2.14.0. DO NOT EDIT.\n\npackage mocks\n\nimport (\n\tconfig \"github.com/LambdaTest/test-at-scale/config\"\n\tcore \"github.com/LambdaTest/test-at-scale/pkg/core\"\n\n\tmock \"github.com/stretchr/testify/mock\"\n)\n\n// SecretsManager is an autogenerated mock type for the SecretsManager type\ntype SecretsManager struct {\n\tmock.Mock\n}\n\n// GetDockerSecrets provides a mock function with given fields: r\nfunc (_m *SecretsManager) GetDockerSecrets(r *core.RunnerOptions) (core.ContainerImageConfig, error) {\n\tret := _m.Called(r)\n\n\tvar r0 core.ContainerImageConfig\n\tif rf, ok := ret.Get(0).(func(*core.RunnerOptions) core.ContainerImageConfig); ok {\n\t\tr0 = rf(r)\n\t} else {\n\t\tr0 = ret.Get(0).(core.ContainerImageConfig)\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(*core.RunnerOptions) error); ok {\n\t\tr1 = rf(r)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}\n\n// GetLambdatestSecrets provides a mock function with given fields:\nfunc (_m *SecretsManager) GetLambdatestSecrets() *config.LambdatestConfig {\n\tret := _m.Called()\n\n\tvar r0 *config.LambdatestConfig\n\tif rf, ok := ret.Get(0).(func() *config.LambdatestConfig); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*config.LambdatestConfig)\n\t\t}\n\t}\n\n\treturn r0\n}\n\n// GetSynapseName provides a mock function with given fields:\nfunc (_m *SecretsManager) GetSynapseName() string {\n\tret := _m.Called()\n\n\tvar r0 string\n\tif rf, ok := ret.Get(0).(func() string); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tr0 = ret.Get(0).(string)\n\t}\n\n\treturn r0\n}\n\n// WriteGitSecrets provides a mock function with given fields: path\nfunc (_m *SecretsManager) WriteGitSecrets(path string) error {\n\tret := _m.Called(path)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string) error); ok {\n\t\tr0 = rf(path)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}\n\n// WriteRepoSecrets provides a mock function with given fields: repo, path\nfunc (_m *SecretsManager) WriteRepoSecrets(repo string, path string) error {\n\tret := _m.Called(repo, path)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(string, string) error); ok {\n\t\tr0 = rf(repo, path)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}\n\ntype mockConstructorTestingTNewSecretsManager interface {\n\tmock.TestingT\n\tCleanup(func())\n}\n\n// NewSecretsManager creates a new instance of SecretsManager. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.\nfunc NewSecretsManager(t mockConstructorTestingTNewSecretsManager) *SecretsManager {\n\tmock := &SecretsManager{}\n\tmock.Mock.Test(t)\n\n\tt.Cleanup(func() { mock.AssertExpectations(t) })\n\n\treturn mock\n}\n"
  },
  {
    "path": "mocks/SynapseManager.go",
    "content": "// Code generated by mockery v2.14.0. DO NOT EDIT.\n\npackage mocks\n\nimport (\n\tcontext \"context\"\n\n\tmock \"github.com/stretchr/testify/mock\"\n\n\tsync \"sync\"\n)\n\n// SynapseManager is an autogenerated mock type for the SynapseManager type\ntype SynapseManager struct {\n\tmock.Mock\n}\n\n// InitiateConnection provides a mock function with given fields: ctx, wg, connectionFailed\nfunc (_m *SynapseManager) InitiateConnection(ctx context.Context, wg *sync.WaitGroup, connectionFailed chan struct{}) {\n\t_m.Called(ctx, wg, connectionFailed)\n}\n\ntype mockConstructorTestingTNewSynapseManager interface {\n\tmock.TestingT\n\tCleanup(func())\n}\n\n// NewSynapseManager creates a new instance of SynapseManager. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.\nfunc NewSynapseManager(t mockConstructorTestingTNewSynapseManager) *SynapseManager {\n\tmock := &SynapseManager{}\n\tmock.Mock.Test(t)\n\n\tt.Cleanup(func() { mock.AssertExpectations(t) })\n\n\treturn mock\n}\n"
  },
  {
    "path": "mocks/TASConfigManager.go",
    "content": "// Code generated by mockery v2.14.0. DO NOT EDIT.\n\npackage mocks\n\nimport (\n\tcontext \"context\"\n\n\tcore \"github.com/LambdaTest/test-at-scale/pkg/core\"\n\tmock \"github.com/stretchr/testify/mock\"\n)\n\n// TASConfigManager is an autogenerated mock type for the TASConfigManager type\ntype TASConfigManager struct {\n\tmock.Mock\n}\n\n// GetVersion provides a mock function with given fields: path\nfunc (_m *TASConfigManager) GetVersion(path string) (int, error) {\n\tret := _m.Called(path)\n\n\tvar r0 int\n\tif rf, ok := ret.Get(0).(func(string) int); ok {\n\t\tr0 = rf(path)\n\t} else {\n\t\tr0 = ret.Get(0).(int)\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(string) error); ok {\n\t\tr1 = rf(path)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}\n\n// LoadAndValidate provides a mock function with given fields: ctx, version, path, eventType, licenseTier\nfunc (_m *TASConfigManager) LoadAndValidate(ctx context.Context, version int, path string, eventType core.EventType, licenseTier core.Tier) (interface{}, error) {\n\tret := _m.Called(ctx, version, path, eventType, licenseTier)\n\n\tvar r0 interface{}\n\tif rf, ok := ret.Get(0).(func(context.Context, int, string, core.EventType, core.Tier) interface{}); ok {\n\t\tr0 = rf(ctx, version, path, eventType, licenseTier)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(interface{})\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(context.Context, int, string, core.EventType, core.Tier) error); ok {\n\t\tr1 = rf(ctx, version, path, eventType, licenseTier)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}\n\ntype mockConstructorTestingTNewTASConfigManager interface {\n\tmock.TestingT\n\tCleanup(func())\n}\n\n// NewTASConfigManager creates a new instance of TASConfigManager. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.\nfunc NewTASConfigManager(t mockConstructorTestingTNewTASConfigManager) *TASConfigManager {\n\tmock := &TASConfigManager{}\n\tmock.Mock.Test(t)\n\n\tt.Cleanup(func() { mock.AssertExpectations(t) })\n\n\treturn mock\n}\n"
  },
  {
    "path": "mocks/Task.go",
    "content": "// Code generated by mockery v2.14.0. DO NOT EDIT.\n\npackage mocks\n\nimport (\n\tcontext \"context\"\n\n\tcore \"github.com/LambdaTest/test-at-scale/pkg/core\"\n\tmock \"github.com/stretchr/testify/mock\"\n)\n\n// Task is an autogenerated mock type for the Task type\ntype Task struct {\n\tmock.Mock\n}\n\n// UpdateStatus provides a mock function with given fields: ctx, payload\nfunc (_m *Task) UpdateStatus(ctx context.Context, payload *core.TaskPayload) error {\n\tret := _m.Called(ctx, payload)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, *core.TaskPayload) error); ok {\n\t\tr0 = rf(ctx, payload)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}\n\ntype mockConstructorTestingTNewTask interface {\n\tmock.TestingT\n\tCleanup(func())\n}\n\n// NewTask creates a new instance of Task. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.\nfunc NewTask(t mockConstructorTestingTNewTask) *Task {\n\tmock := &Task{}\n\tmock.Mock.Test(t)\n\n\tt.Cleanup(func() { mock.AssertExpectations(t) })\n\n\treturn mock\n}\n"
  },
  {
    "path": "mocks/TestDiscoveryService.go",
    "content": "// Code generated by mockery v2.14.0. DO NOT EDIT.\n\npackage mocks\n\nimport (\n\tcontext \"context\"\n\n\tcore \"github.com/LambdaTest/test-at-scale/pkg/core\"\n\tmock \"github.com/stretchr/testify/mock\"\n)\n\n// TestDiscoveryService is an autogenerated mock type for the TestDiscoveryService type\ntype TestDiscoveryService struct {\n\tmock.Mock\n}\n\n// Discover provides a mock function with given fields: ctx, args\nfunc (_m *TestDiscoveryService) Discover(ctx context.Context, args *core.DiscoveyArgs) (*core.DiscoveryResult, error) {\n\tret := _m.Called(ctx, args)\n\n\tvar r0 *core.DiscoveryResult\n\tif rf, ok := ret.Get(0).(func(context.Context, *core.DiscoveyArgs) *core.DiscoveryResult); ok {\n\t\tr0 = rf(ctx, args)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*core.DiscoveryResult)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(context.Context, *core.DiscoveyArgs) error); ok {\n\t\tr1 = rf(ctx, args)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}\n\n// SendResult provides a mock function with given fields: ctx, testDiscoveryResult\nfunc (_m *TestDiscoveryService) SendResult(ctx context.Context, testDiscoveryResult *core.DiscoveryResult) error {\n\tret := _m.Called(ctx, testDiscoveryResult)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, *core.DiscoveryResult) error); ok {\n\t\tr0 = rf(ctx, testDiscoveryResult)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}\n\ntype mockConstructorTestingTNewTestDiscoveryService interface {\n\tmock.TestingT\n\tCleanup(func())\n}\n\n// NewTestDiscoveryService creates a new instance of TestDiscoveryService. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.\nfunc NewTestDiscoveryService(t mockConstructorTestingTNewTestDiscoveryService) *TestDiscoveryService {\n\tmock := &TestDiscoveryService{}\n\tmock.Mock.Test(t)\n\n\tt.Cleanup(func() { mock.AssertExpectations(t) })\n\n\treturn mock\n}\n"
  },
  {
    "path": "mocks/TestExecutionService.go",
    "content": "// Code generated by mockery v2.14.0. DO NOT EDIT.\n\npackage mocks\n\nimport (\n\tcontext \"context\"\n\n\tcore \"github.com/LambdaTest/test-at-scale/pkg/core\"\n\tmock \"github.com/stretchr/testify/mock\"\n)\n\n// TestExecutionService is an autogenerated mock type for the TestExecutionService type\ntype TestExecutionService struct {\n\tmock.Mock\n}\n\n// Run provides a mock function with given fields: ctx, testExecutionArgs\nfunc (_m *TestExecutionService) Run(ctx context.Context, testExecutionArgs *core.TestExecutionArgs) (*core.ExecutionResults, error) {\n\tret := _m.Called(ctx, testExecutionArgs)\n\n\tvar r0 *core.ExecutionResults\n\tif rf, ok := ret.Get(0).(func(context.Context, *core.TestExecutionArgs) *core.ExecutionResults); ok {\n\t\tr0 = rf(ctx, testExecutionArgs)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*core.ExecutionResults)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(context.Context, *core.TestExecutionArgs) error); ok {\n\t\tr1 = rf(ctx, testExecutionArgs)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}\n\n// SendResults provides a mock function with given fields: ctx, payload\nfunc (_m *TestExecutionService) SendResults(ctx context.Context, payload *core.ExecutionResults) (*core.TestReportResponsePayload, error) {\n\tret := _m.Called(ctx, payload)\n\n\tvar r0 *core.TestReportResponsePayload\n\tif rf, ok := ret.Get(0).(func(context.Context, *core.ExecutionResults) *core.TestReportResponsePayload); ok {\n\t\tr0 = rf(ctx, payload)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(*core.TestReportResponsePayload)\n\t\t}\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func(context.Context, *core.ExecutionResults) error); ok {\n\t\tr1 = rf(ctx, payload)\n\t} else {\n\t\tr1 = ret.Error(1)\n\t}\n\n\treturn r0, r1\n}\n\ntype mockConstructorTestingTNewTestExecutionService interface {\n\tmock.TestingT\n\tCleanup(func())\n}\n\n// NewTestExecutionService creates a new instance of TestExecutionService. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.\nfunc NewTestExecutionService(t mockConstructorTestingTNewTestExecutionService) *TestExecutionService {\n\tmock := &TestExecutionService{}\n\tmock.Mock.Test(t)\n\n\tt.Cleanup(func() { mock.AssertExpectations(t) })\n\n\treturn mock\n}\n"
  },
  {
    "path": "mocks/TestStats.go",
    "content": "// Code generated by mockery v2.14.0. DO NOT EDIT.\n\npackage mocks\n\nimport mock \"github.com/stretchr/testify/mock\"\n\n// TestStats is an autogenerated mock type for the TestStats type\ntype TestStats struct {\n\tmock.Mock\n}\n\n// CaptureTestStats provides a mock function with given fields: pid, collectStats\nfunc (_m *TestStats) CaptureTestStats(pid int32, collectStats bool) error {\n\tret := _m.Called(pid, collectStats)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(int32, bool) error); ok {\n\t\tr0 = rf(pid, collectStats)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}\n\ntype mockConstructorTestingTNewTestStats interface {\n\tmock.TestingT\n\tCleanup(func())\n}\n\n// NewTestStats creates a new instance of TestStats. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.\nfunc NewTestStats(t mockConstructorTestingTNewTestStats) *TestStats {\n\tmock := &TestStats{}\n\tmock.Mock.Test(t)\n\n\tt.Cleanup(func() { mock.AssertExpectations(t) })\n\n\treturn mock\n}\n"
  },
  {
    "path": "mocks/ZstdCompressor.go",
    "content": "// Code generated by mockery v2.14.0. DO NOT EDIT.\n\npackage mocks\n\nimport (\n\tcontext \"context\"\n\n\tmock \"github.com/stretchr/testify/mock\"\n)\n\n// ZstdCompressor is an autogenerated mock type for the ZstdCompressor type\ntype ZstdCompressor struct {\n\tmock.Mock\n}\n\n// Compress provides a mock function with given fields: ctx, compressedFileName, preservePath, workingDirectory, filesToCompress\nfunc (_m *ZstdCompressor) Compress(ctx context.Context, compressedFileName string, preservePath bool, workingDirectory string, filesToCompress ...string) error {\n\t_va := make([]interface{}, len(filesToCompress))\n\tfor _i := range filesToCompress {\n\t\t_va[_i] = filesToCompress[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, ctx, compressedFileName, preservePath, workingDirectory)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, bool, string, ...string) error); ok {\n\t\tr0 = rf(ctx, compressedFileName, preservePath, workingDirectory, filesToCompress...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}\n\n// Decompress provides a mock function with given fields: ctx, filePath, preservePath, workingDirectory\nfunc (_m *ZstdCompressor) Decompress(ctx context.Context, filePath string, preservePath bool, workingDirectory string) error {\n\tret := _m.Called(ctx, filePath, preservePath, workingDirectory)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, string, bool, string) error); ok {\n\t\tr0 = rf(ctx, filePath, preservePath, workingDirectory)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}\n\ntype mockConstructorTestingTNewZstdCompressor interface {\n\tmock.TestingT\n\tCleanup(func())\n}\n\n// NewZstdCompressor creates a new instance of ZstdCompressor. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.\nfunc NewZstdCompressor(t mockConstructorTestingTNewZstdCompressor) *ZstdCompressor {\n\tmock := &ZstdCompressor{}\n\tmock.Mock.Test(t)\n\n\tt.Cleanup(func() { mock.AssertExpectations(t) })\n\n\treturn mock\n}\n"
  },
  {
    "path": "pkg/api/health/health.go",
    "content": "package health\n\nimport (\n\t\"net/http\"\n\n\t\"github.com/gin-gonic/gin\"\n)\n\n// Handler for health API\nfunc Handler(c *gin.Context) {\n\tc.Data(http.StatusOK, gin.MIMEPlain, []byte(http.StatusText(http.StatusOK)))\n}\n"
  },
  {
    "path": "pkg/api/health/health_test.go",
    "content": "package health\n\nimport (\n\t\"fmt\"\n\t\"net/http\"\n\t\"net/http/httptest\"\n\t\"reflect\"\n\t\"testing\"\n\n\t\"github.com/gin-gonic/gin\"\n)\n\nfunc TestHandler(t *testing.T) {\n\ttests := []struct {\n\t\tname             string\n\t\thttpRequest      *http.Request\n\t\twantResponseCode int\n\t\twantStatusText   string\n\t}{\n\n\t\t{\"Test handler health route for success\", httptest.NewRequest(http.MethodGet, \"/health\", nil), 200, http.StatusText(http.StatusOK)},\n\t}\n\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\tresp := httptest.NewRecorder()\n\t\t\tgin.SetMode(gin.TestMode)\n\t\t\tc, _ := gin.CreateTestContext(resp)\n\n\t\t\tc.Request = tt.httpRequest\n\n\t\t\trouter := gin.Default()\n\t\t\trouter.GET(\"/health\", Handler)\n\t\t\trouter.ServeHTTP(resp, c.Request)\n\n\t\t\tfmt.Printf(\"Responsecode: %v\\n\", resp.Code)\n\t\t\tif resp.Code != tt.wantResponseCode {\n\t\t\t\tt.Errorf(\"Router.Handler() responseCode = %v, want = %v\\n\", resp.Code, tt.wantResponseCode)\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\tif !reflect.DeepEqual(resp.Body.String(), tt.wantStatusText) {\n\t\t\t\tt.Errorf(\"Router.Handler() statusText = %v, want = %v\\n\", resp.Body.String(), tt.wantStatusText)\n\t\t\t}\n\t\t})\n\t}\n}\n"
  },
  {
    "path": "pkg/api/results/results.go",
    "content": "package results\n\nimport (\n\t\"net/http\"\n\n\t\"github.com/LambdaTest/test-at-scale/pkg/service/teststats\"\n\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n\t\"github.com/gin-gonic/gin\"\n)\n\n//Handler captures the test execution results from nucleus\nfunc Handler(logger lumber.Logger, ts *teststats.ProcStats) gin.HandlerFunc {\n\treturn func(c *gin.Context) {\n\t\trequest := core.ExecutionResults{}\n\t\tif err := c.ShouldBindJSON(&request); err != nil {\n\t\t\tlogger.Errorf(\"error while binding json %v\", err)\n\t\t\tc.JSON(http.StatusBadRequest, gin.H{\"message\": err.Error()})\n\t\t\treturn\n\t\t}\n\n\t\tgo func() {\n\t\t\tts.ExecutionResultInputChannel <- request\n\t\t}()\n\t\tc.Data(http.StatusOK, gin.MIMEPlain, []byte(http.StatusText(http.StatusOK)))\n\t}\n}\n"
  },
  {
    "path": "pkg/api/results/results_test.go",
    "content": "package results\n\nimport (\n\t\"bytes\"\n\t\"fmt\"\n\t\"net/http\"\n\t\"net/http/httptest\"\n\t\"reflect\"\n\t\"testing\"\n\n\t\"github.com/LambdaTest/test-at-scale/pkg/service/teststats\"\n\t\"github.com/LambdaTest/test-at-scale/testutils\"\n\t\"github.com/gin-gonic/gin\"\n)\n\n// NOTE: Tests in this package are meant to be run in a Linux environment\n\nfunc TestHandler(t *testing.T) {\n\tlogger, _ := testutils.GetLogger()\n\tcfg, _ := testutils.GetConfig()\n\n\tts, err := teststats.New(cfg, logger)\n\tif err != nil {\n\t\tt.Errorf(\"Error creating teststats service: %v\", err)\n\t}\n\n\ttests := []struct {\n\t\tname             string\n\t\thttpRequest      *http.Request\n\t\twantResponseCode int\n\t\twantStatusText   string\n\t}{\n\n\t\t{\n\t\t\t\"Test handler result route\",\n\t\t\thttptest.NewRequest(http.MethodPost, \"/results\", bytes.NewBuffer([]byte(`{\"TaskID\" : \"123\"}`))),\n\t\t\t200,\n\t\t\thttp.StatusText(http.StatusOK),\n\t\t},\n\n\t\t{\n\t\t\t\"Test handler result route for error in jsonBinding and hence http.StatusBadRequest\",\n\t\t\thttptest.NewRequest(http.MethodPost, \"/results\", nil),\n\t\t\thttp.StatusBadRequest,\n\t\t\t`{\"message\":\"EOF\"}`,\n\t\t},\n\t}\n\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\tresp := httptest.NewRecorder()\n\t\t\tgin.SetMode(gin.TestMode)\n\t\t\tc, _ := gin.CreateTestContext(resp)\n\n\t\t\tc.Request = tt.httpRequest\n\n\t\t\trouter := gin.Default()\n\t\t\trouter.POST(\"/results\", Handler(logger, ts))\n\t\t\trouter.ServeHTTP(resp, c.Request)\n\n\t\t\tfmt.Printf(\"Responsecode: %v\\n\", resp.Code)\n\t\t\tif resp.Code != tt.wantResponseCode {\n\t\t\t\tt.Errorf(\"Router.Handler() responseCode = %v, want = %v\\n\", resp.Code, tt.wantResponseCode)\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\tif !reflect.DeepEqual(resp.Body.String(), tt.wantStatusText) {\n\t\t\t\tt.Errorf(\"Router.Handler() statusText = %v, want = %v\\n\", resp.Body.String(), tt.wantStatusText)\n\t\t\t}\n\t\t})\n\t}\n\n}\n"
  },
  {
    "path": "pkg/api/router.go",
    "content": "package api\n\nimport (\n\t\"github.com/LambdaTest/test-at-scale/pkg/api/health\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/api/results\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/api/testlist\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/service/teststats\"\n\t\"github.com/gin-gonic/gin\"\n)\n\n// Router for nucleus\ntype Router struct {\n\tlogger           lumber.Logger\n\ttestStatsService *teststats.ProcStats\n\ttdResChan        chan core.DiscoveryResult\n}\n\n// NewRouter returns instance of Router\nfunc NewRouter(logger lumber.Logger, ts *teststats.ProcStats, tdResChan chan core.DiscoveryResult) Router {\n\treturn Router{\n\t\tlogger:           logger,\n\t\ttestStatsService: ts,\n\t\ttdResChan:        tdResChan,\n\t}\n}\n\n//Handler function will perform all route operations\nfunc (r Router) Handler() *gin.Engine {\n\n\tr.logger.Infof(\"Setting up routes\")\n\trouter := gin.Default()\n\t// corsConfig := cors.DefaultConfig()\n\t// corsConfig.AllowAllOrigins = true\n\t// corsConfig.AddAllowHeaders(\"authorization\", \"cache-control\", \"pragma\")\n\t// router.Use(cors.New(corsConfig))\n\trouter.GET(\"/health\", health.Handler)\n\trouter.POST(\"/results\", results.Handler(r.logger, r.testStatsService))\n\trouter.POST(\"/test-list\", testlist.Handler(r.logger, r.tdResChan))\n\n\treturn router\n\n}\n"
  },
  {
    "path": "pkg/api/router_test.go",
    "content": "package api\n\nimport (\n\t\"bytes\"\n\t\"net/http\"\n\t\"net/http/httptest\"\n\t\"reflect\"\n\t\"testing\"\n\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/service/teststats\"\n\t\"github.com/LambdaTest/test-at-scale/testutils\"\n\t\"github.com/gin-gonic/gin\"\n)\n\n// NOTE: Tests in this package are meant to be run in a Linux environment\n\nfunc TestNewRouter(t *testing.T) {\n\tlogger, _ := testutils.GetLogger()\n\tcfg, _ := testutils.GetConfig()\n\tts, err := teststats.New(cfg, logger)\n\ttdResChan := make(chan core.DiscoveryResult)\n\tif err != nil {\n\t\tt.Errorf(\"Error creating teststats service: %v\", err)\n\t}\n\ttype args struct {\n\t\tlogger    lumber.Logger\n\t\tts        *teststats.ProcStats\n\t\ttdResChan chan core.DiscoveryResult\n\t}\n\ttests := []struct {\n\t\tname string\n\t\targs args\n\t\twant Router\n\t}{\n\t\t{\"TestNewRouter\", args{logger, ts, tdResChan}, Router{logger, ts, tdResChan}},\n\t}\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\tif got := NewRouter(tt.args.logger, tt.args.ts, tt.args.tdResChan); !reflect.DeepEqual(got, tt.want) {\n\t\t\t\tt.Errorf(\"NewRouter() = %v, want %v\", got, tt.want)\n\t\t\t}\n\t\t})\n\t}\n}\nfunc TestRouter_Handler(t *testing.T) {\n\tlogger, _ := testutils.GetLogger()\n\tcfg, _ := testutils.GetConfig()\n\tts, err := teststats.New(cfg, logger)\n\ttdResChan := make(chan core.DiscoveryResult)\n\tif err != nil {\n\t\tt.Errorf(\"Error creating teststats service: %v\", err)\n\t}\n\ttests := []struct {\n\t\tname             string\n\t\thttpRequest      *http.Request\n\t\twantResponseCode int\n\t\twantStatusText   string\n\t}{\n\t\t{\"Test handler health route for success\", httptest.NewRequest(http.MethodGet, \"/health\", nil), 200, http.StatusText(http.StatusOK)},\n\t\t{\"Test handler result route\", httptest.NewRequest(http.MethodPost, \"/results\", bytes.NewBuffer([]byte(`{\"TaskID\" : \"123\"}`))), 200, http.StatusText(http.StatusOK)},\n\t\t{\"Test handler result route for error in jsonBinding and hence http.StatusBadRequest\", httptest.NewRequest(http.MethodPost, \"/results\", nil), http.StatusBadRequest, `{\"message\":\"EOF\"}`},\n\t}\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\tnewRouter := NewRouter(logger, ts, tdResChan)\n\t\t\tresp := httptest.NewRecorder()\n\t\t\tgin.SetMode(gin.TestMode)\n\t\t\tc, _ := gin.CreateTestContext(resp)\n\t\t\tc.Request = tt.httpRequest\n\t\t\tnewRouter.Handler().ServeHTTP(resp, c.Request)\n\t\t\tif resp.Code != tt.wantResponseCode {\n\t\t\t\tt.Errorf(\"Router.Handler() responseCode = %v, want = %v\\n\", resp.Code, tt.wantResponseCode)\n\t\t\t\treturn\n\t\t\t}\n\t\t\tif !reflect.DeepEqual(resp.Body.String(), tt.wantStatusText) {\n\t\t\t\tt.Errorf(\"Router.Handler() statusText = %v, want = %v\\n\", resp.Body.String(), tt.wantStatusText)\n\t\t\t}\n\t\t})\n\t}\n}\n"
  },
  {
    "path": "pkg/api/testlist/testlist.go",
    "content": "package testlist\n\nimport (\n\t\"net/http\"\n\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n\t\"github.com/gin-gonic/gin\"\n)\n\n// Handler captures the test execution results from nucleus\nfunc Handler(logger lumber.Logger, tdResChan chan core.DiscoveryResult) gin.HandlerFunc {\n\treturn func(c *gin.Context) {\n\t\trequest := core.DiscoveryResult{}\n\t\tif err := c.ShouldBindJSON(&request); err != nil {\n\t\t\tlogger.Errorf(\"error while binding json %v\", err)\n\t\t\tc.JSON(http.StatusBadRequest, gin.H{\"message\": err.Error()})\n\t\t\treturn\n\t\t}\n\n\t\tgo func() {\n\t\t\ttdResChan <- request\n\t\t}()\n\t\tc.Data(http.StatusOK, gin.MIMEPlain, []byte(http.StatusText(http.StatusOK)))\n\t}\n}\n"
  },
  {
    "path": "pkg/azure/client.go",
    "content": "package azure\n\nimport (\n\t\"context\"\n\t\"encoding/json\"\n\t\"errors\"\n\t\"fmt\"\n\t\"io\"\n\t\"net/http\"\n\t\"net/url\"\n\n\t\"github.com/Azure/azure-sdk-for-go/sdk/azcore/policy\"\n\t\"github.com/Azure/azure-sdk-for-go/sdk/storage/azblob\"\n\n\t\"github.com/LambdaTest/test-at-scale/config\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/errs\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/global\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/utils\"\n)\n\nvar (\n\tdefaultBufferSize     = 3 * 1024 * 1024\n\tdefaultMaxBuffers     = 4\n\tcoverageContainerName = \"coverage\"\n\tmaxRetry              = 10\n)\n\n// store represents the azure storage\ntype store struct {\n\trequests        core.Requests\n\tcontainerClient azblob.ContainerClient\n\tlogger          lumber.Logger\n\tendpoint        string\n}\n\n// request body for getting SAS URL API.\ntype request struct {\n\tPurpose core.SASURLPurpose `json:\"purpose\" validate:\"oneof=cache workspace_cache pre_run_logs post_run_logs execution_logs\"`\n}\n\n//  response body for  get SAS URL API.\ntype response struct {\n\tSASURL string `json:\"sas_url\"`\n}\n\n// NewAzureBlobEnv returns a new Azure blob store.\nfunc NewAzureBlobEnv(cfg *config.NucleusConfig, requests core.Requests, logger lumber.Logger) (core.AzureClient, error) {\n\t// if non coverage mode then use Azure SAS Token\n\tif !cfg.CoverageMode {\n\t\treturn &store{\n\t\t\trequests: requests,\n\t\t\tlogger:   logger,\n\t\t\tendpoint: global.NeuronHost + \"/internal/sas-token\",\n\t\t}, nil\n\t}\n\t// FIXME: Hack for synapse\n\tif cfg.LocalRunner {\n\t\tcfg.Azure.StorageAccountName = \"dummy-account\"\n\t\tcfg.Azure.StorageAccessKey = \"dummy-access-key\"\n\t}\n\tif cfg.Azure.StorageAccountName == \"\" || cfg.Azure.StorageAccessKey == \"\" {\n\t\treturn nil, errors.New(\"either the storage account or storage access key environment variable is not set\")\n\t}\n\tcredential, err := azblob.NewSharedKeyCredential(cfg.Azure.StorageAccountName, cfg.Azure.StorageAccessKey)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tu, err := url.Parse(fmt.Sprintf(\"https://%s.blob.core.windows.net/%s\", cfg.Azure.StorageAccountName, cfg.Azure.ContainerName))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tserviceClient, err := azblob.NewServiceClientWithSharedKey(u.String(), credential, getClientOptions())\n\tif err != nil {\n\t\tlogger.Errorf(\"Failed to create azure service client, error: %v\", err)\n\t\treturn nil, err\n\t}\n\n\treturn &store{\n\t\trequests:        requests,\n\t\tlogger:          logger,\n\t\tendpoint:        global.NeuronHost + \"/internal/sas-token\",\n\t\tcontainerClient: serviceClient.NewContainerClient(coverageContainerName),\n\t}, nil\n}\n\n// FindUsingSASUrl download object based on sasURL\nfunc (s *store) FindUsingSASUrl(ctx context.Context, sasURL string) (io.ReadCloser, error) {\n\tu, err := url.Parse(sasURL)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tblobClient, err := azblob.NewBlockBlobClientWithNoCredential(u.String(), &azblob.ClientOptions{})\n\tif err != nil {\n\t\ts.logger.Errorf(\"failed to create blob client, error: %v\", err)\n\t\treturn nil, err\n\t}\n\ts.logger.Debugf(\"Downloading blob from %s\", blobClient.URL())\n\tout, err := blobClient.Download(ctx, &azblob.DownloadBlobOptions{})\n\tif err != nil {\n\t\treturn nil, handleError(err)\n\t}\n\n\treturn out.Body(&azblob.RetryReaderOptions{MaxRetryRequests: 5}), nil\n}\n\n// CreateUsingSASURL creates object using sasURL\nfunc (s *store) CreateUsingSASURL(ctx context.Context, sasURL string, reader io.Reader, mimeType string) (string, error) {\n\tu, err := url.Parse(sasURL)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\tblobClient, err := azblob.NewBlockBlobClientWithNoCredential(u.String(), getClientOptions())\n\tif err != nil {\n\t\ts.logger.Errorf(\"failed to create blob client, error: %v\", err)\n\t\treturn \"\", err\n\t}\n\ts.logger.Debugf(\"Uploading blob to %s\", blobClient.URL())\n\n\t_, err = blobClient.UploadStreamToBlockBlob(ctx, reader, azblob.UploadStreamToBlockBlobOptions{\n\t\tHTTPHeaders: &azblob.BlobHTTPHeaders{BlobContentType: &mimeType},\n\t\tBufferSize:  defaultBufferSize,\n\t\tMaxBuffers:  defaultMaxBuffers,\n\t})\n\n\treturn blobClient.URL(), err\n}\n\n// Find function downloads blob based on URI\nfunc (s *store) Find(ctx context.Context, path string) (io.ReadCloser, error) {\n\tblobClient := s.containerClient.NewBlockBlobClient(path)\n\tout, err := blobClient.Download(ctx, &azblob.DownloadBlobOptions{})\n\tif err != nil {\n\t\treturn nil, handleError(err)\n\t}\n\tdefer out.RawResponse.Body.Close()\n\n\treturn out.Body(&azblob.RetryReaderOptions{MaxRetryRequests: 5}), nil\n}\n\n// Create function ulploads blob to URI\nfunc (s *store) Create(ctx context.Context, path string, reader io.Reader, mimeType string) (string, error) {\n\tblobClient := s.containerClient.NewBlockBlobClient(path)\n\t_, err := blobClient.UploadStreamToBlockBlob(ctx, reader, azblob.UploadStreamToBlockBlobOptions{\n\t\tHTTPHeaders: &azblob.BlobHTTPHeaders{BlobContentType: &mimeType},\n\t\tBufferSize:  defaultBufferSize,\n\t\tMaxBuffers:  defaultMaxBuffers,\n\t})\n\n\treturn blobClient.URL(), err\n}\n\n// GetSASURL calls request neuron to get the SAS url\nfunc (s *store) GetSASURL(ctx context.Context, purpose core.SASURLPurpose, query map[string]interface{}) (string, error) {\n\treqPayload := &request{Purpose: purpose}\n\treqBody, err := json.Marshal(reqPayload)\n\tif err != nil {\n\t\ts.logger.Errorf(\"failed to marshal request body %v\", err)\n\t\treturn \"\", err\n\t}\n\tdefaultQuery, headers := utils.GetDefaultQueryAndHeaders()\n\tfor key, val := range defaultQuery {\n\t\tif query == nil {\n\t\t\tquery = make(map[string]interface{})\n\t\t}\n\t\tquery[key] = val\n\t}\n\trawBytes, _, err := s.requests.MakeAPIRequest(ctx, http.MethodPost, s.endpoint, reqBody, query, headers)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\tpayload := new(response)\n\terr = json.Unmarshal(rawBytes, payload)\n\tif err != nil {\n\t\ts.logger.Errorf(\"Error while unmarshalling json, error %v\", err)\n\t\treturn \"\", err\n\t}\n\treturn payload.SASURL, nil\n}\n\n// Exists checks the blob if exists\nfunc (s *store) Exists(ctx context.Context, path string) (bool, error) {\n\tblobClient := s.containerClient.NewBlockBlobClient(path)\n\tget, err := blobClient.GetProperties(ctx, &azblob.GetBlobPropertiesOptions{})\n\tif err != nil {\n\t\treturn false, fmt.Errorf(\"check if object exists, %w\", err)\n\t}\n\tstatusCode := get.RawResponse.StatusCode\n\tdefer get.RawResponse.Body.Close()\n\treturn statusCode == http.StatusOK, nil\n}\n\nfunc handleError(err error) error {\n\tif err == nil {\n\t\treturn nil\n\t}\n\tvar errResp *azblob.StorageError\n\tif internalErr, ok := err.(*azblob.InternalError); ok && internalErr.As(&errResp) {\n\t\tif errResp.ErrorCode == azblob.StorageErrorCodeBlobNotFound {\n\t\t\treturn errs.ErrNotFound\n\t\t}\n\t}\n\treturn err\n}\n\nfunc getClientOptions() *azblob.ClientOptions {\n\treturn &azblob.ClientOptions{\n\t\tRetry: policy.RetryOptions{\n\t\t\tMaxRetries: int32(maxRetry),\n\t\t\tTryTimeout: global.DefaultAPITimeout,\n\t\t},\n\t}\n}\n"
  },
  {
    "path": "pkg/blocktestservice/setup.go",
    "content": "// Package blocktestservice is used for creating the blocklist file\npackage blocktestservice\n\nimport (\n\t\"context\"\n\t\"encoding/json\"\n\t\"io/ioutil\"\n\t\"net/http\"\n\t\"strings\"\n\t\"sync\"\n\n\t\"github.com/LambdaTest/test-at-scale/config\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/global\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/utils\"\n)\n\nconst (\n\tdelimiter = \"##\"\n)\n\n// blocktest represents the blocked test suites and test cases.\ntype blocktest struct {\n\tSource  string `json:\"source\"`\n\tLocator string `json:\"locator\"`\n\tStatus  string `json:\"status\"`\n}\n\n// blocktestAPIResponse fetch blocked test cases from neuron API\ntype blocktestAPIResponse struct {\n\tName        string `json:\"test_name\"`\n\tTestLocator string `json:\"test_locator\"`\n\tStatus      string `json:\"status\"`\n}\n\n// blocktestLocator stores locator and its status info\ntype blocktestLocator struct {\n\tLocator string `json:\"locator\"`\n\tStatus  string `json:\"status\"`\n}\n\n// TestBlockTestService represents an instance of ConfManager instance\ntype TestBlockTestService struct {\n\tcfg               *config.NucleusConfig\n\trequests          core.Requests\n\tlogger            lumber.Logger\n\tendpoint          string\n\tblockTestEntities map[string][]blocktest\n\tonce              sync.Once\n\terrChan           chan error\n}\n\n// NewTestBlockTestService creates and returns a new TestBlockTestService instance\nfunc NewTestBlockTestService(cfg *config.NucleusConfig, requests core.Requests, logger lumber.Logger) *TestBlockTestService {\n\treturn &TestBlockTestService{\n\t\tcfg:               cfg,\n\t\tlogger:            logger,\n\t\trequests:          requests,\n\t\tendpoint:          global.NeuronHost + \"/blocktest\",\n\t\tblockTestEntities: make(map[string][]blocktest),\n\t\terrChan:           make(chan error, 1),\n\t}\n}\n\nfunc (tbs *TestBlockTestService) fetchBlockListFromNeuron(ctx context.Context, branch string) error {\n\tvar inp []blocktestAPIResponse\n\tquery, headers := utils.GetDefaultQueryAndHeaders()\n\tquery[\"branch\"] = branch\n\n\trawBytes, statusCode, err := tbs.requests.MakeAPIRequest(ctx, http.MethodGet, tbs.endpoint, nil, query, headers)\n\tif statusCode == http.StatusNotFound {\n\t\treturn nil\n\t}\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif jsonErr := json.Unmarshal(rawBytes, &inp); jsonErr != nil {\n\t\ttbs.logger.Errorf(\"Unable to fetch blocklist response: %v\", jsonErr)\n\t\treturn jsonErr\n\t}\n\t// populate bl\n\n\tblocktestLocators := make([]*blocktestLocator, 0, len(inp))\n\tfor i := range inp {\n\t\tblockLocator := new(blocktestLocator)\n\t\tblockLocator.Locator = inp[i].TestLocator\n\t\tblockLocator.Status = inp[i].Status\n\t\tblocktestLocators = append(blocktestLocators, blockLocator)\n\t}\n\ttbs.populateBlockList(\"api\", blocktestLocators)\n\treturn nil\n}\n\n// GetBlockTests provides list of blocked test cases\nfunc (tbs *TestBlockTestService) GetBlockTests(ctx context.Context, blocklistYAML []string, branch string) error {\n\ttbs.once.Do(func() {\n\n\t\tblocktestLocators := make([]*blocktestLocator, 0, len(blocklistYAML))\n\t\tfor _, locator := range blocklistYAML {\n\t\t\tblockLocator := new(blocktestLocator)\n\t\t\tblockLocator.Locator = locator\n\t\t\tblockLocator.Status = string(core.Blocklisted)\n\t\t\tblocktestLocators = append(blocktestLocators, blockLocator)\n\t\t}\n\n\t\ttbs.populateBlockList(\"yml\", blocktestLocators)\n\n\t\tif err := tbs.fetchBlockListFromNeuron(ctx, branch); err != nil {\n\t\t\ttbs.logger.Errorf(\"Unable to fetch remote blocklist: %v. Ignoring remote response\", err)\n\t\t\ttbs.errChan <- err\n\t\t\treturn\n\t\t}\n\t\ttbs.logger.Infof(\"Block tests: %+v\", tbs.blockTestEntities)\n\n\t\t// write blocklistest tests on disk\n\t\tmarshalledBlocklist, err := json.Marshal(tbs.blockTestEntities)\n\t\tif err != nil {\n\t\t\ttbs.logger.Errorf(\"Unable to json marshal blocklist: %+v\", err)\n\t\t\ttbs.errChan <- err\n\t\t\treturn\n\t\t}\n\n\t\tif err = ioutil.WriteFile(global.BlockTestFileLocation, marshalledBlocklist, 0644); err != nil {\n\t\t\ttbs.logger.Errorf(\"Unable to write blocklist file: %+v\", err)\n\t\t\ttbs.errChan <- err\n\t\t\treturn\n\t\t}\n\t\ttbs.blockTestEntities = nil\n\t})\n\tselect {\n\tcase err := <-tbs.errChan:\n\t\treturn err\n\tdefault:\n\t\treturn nil\n\t}\n}\n\nfunc (tbs *TestBlockTestService) populateBlockList(blocktestSource string, blocktestLocators []*blocktestLocator) {\n\ti := 0\n\tfor _, test := range blocktestLocators {\n\t\t// locators must end with delimiter\n\t\tif !strings.HasSuffix(test.Locator, delimiter) {\n\t\t\ttest.Locator += delimiter\n\t\t}\n\t\ti = strings.Index(test.Locator, delimiter)\n\t\t// TODO: handle duplicate entries and ignore its individual suites or testcases in blocklist if file is blocklisted\n\n\t\tentity := blocktest{Source: blocktestSource, Locator: test.Locator, Status: test.Status}\n\t\tif val, ok := tbs.blockTestEntities[test.Locator[:i]]; ok {\n\t\t\ttbs.blockTestEntities[test.Locator[:i]] = append(val, entity)\n\t\t} else {\n\t\t\ttbs.blockTestEntities[test.Locator[:i]] = append([]blocktest{},\n\t\t\t\tblocktest{Source: blocktestSource, Locator: test.Locator, Status: test.Status})\n\t\t}\n\t}\n}\n"
  },
  {
    "path": "pkg/blocktestservice/setup_test.go",
    "content": "// Package blocktestservice is used for creating the blocklist file\npackage blocktestservice\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\t\"net/http\"\n\t\"net/http/httptest\"\n\t\"reflect\"\n\t\"sync\"\n\t\"testing\"\n\n\t\"github.com/LambdaTest/test-at-scale/config\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/global\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/requestutils\"\n\t\"github.com/LambdaTest/test-at-scale/testutils\"\n\t\"github.com/cenkalti/backoff/v4\"\n)\n\nconst buildID = \"buildID\"\n\nfunc TestBlockListService_fetchBlockListFromNeuron(t *testing.T) {\n\tserver := httptest.NewServer( // mock server\n\t\thttp.FileServer(http.Dir(\"../../testutils/testdata/testblocklistdata/\")), // mock data stored at testutils/testdata\n\t)\n\tdefer server.Close()\n\n\tserver2 := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\tif r.URL.Path != \"/non200\" {\n\t\t\tt.Errorf(\"Expected to request '/non200', got: %v\", r.URL)\n\t\t\treturn\n\t\t}\n\t\tw.WriteHeader(503)\n\t\t_, err := w.Write([]byte(`{\"value\":\"fixed\"}`))\n\t\tif err != nil {\n\t\t\tfmt.Printf(\"Could not write data in httptest server, error: %v\", err)\n\t\t}\n\t}))\n\tdefer server2.Close()\n\n\tcfg := new(config.NucleusConfig)\n\tcfg.BuildID = buildID\n\tlogger, err := testutils.GetLogger()\n\tif err != nil {\n\t\tt.Errorf(\"Couldn't initialize logger, error: %v\", err)\n\t}\n\tblocklistedEntities := make(map[string][]blocktest)\n\n\ttype args struct {\n\t\tctx      context.Context\n\t\tendpoint string\n\t\trepoID   string\n\t\tbranch   string\n\t}\n\ttests := []struct {\n\t\tname    string\n\t\targs    args\n\t\twantErr bool\n\t}{\n\t\t{\"Test fetchBlocklistFromNeuron\",\n\t\t\targs{\n\t\t\t\tctx:      context.TODO(),\n\t\t\t\tendpoint: server.URL + \"/testBlocklist.json\",\n\t\t\t\trepoID:   \"repoID\",\n\t\t\t\tbranch:   \"branch\",\n\t\t\t},\n\t\t\tfalse,\n\t\t},\n\n\t\t{\"Test fetchBlocklistFromNeuron for wrong request endpoint\",\n\t\t\targs{\n\t\t\t\tctx:      context.TODO(),\n\t\t\t\tendpoint: \"/dne.json\",\n\t\t\t\trepoID:   \"repoID\",\n\t\t\t\tbranch:   \"branch\",\n\t\t\t},\n\t\t\ttrue,\n\t\t},\n\n\t\t{\"Test fetchBlocklistFromNeuron for non 200 response\",\n\t\t\targs{\n\t\t\t\tctx:      context.TODO(),\n\t\t\t\tendpoint: server2.URL + \"/non200\",\n\t\t\t\trepoID:   \"repoID\",\n\t\t\t\tbranch:   \"branch\",\n\t\t\t},\n\t\t\ttrue,\n\t\t},\n\t}\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\ttbs := &TestBlockTestService{\n\t\t\t\tcfg:               cfg,\n\t\t\t\tlogger:            logger,\n\t\t\t\tendpoint:          tt.args.endpoint,\n\t\t\t\tblockTestEntities: blocklistedEntities,\n\t\t\t\tonce:              sync.Once{},\n\t\t\t\terrChan:           make(chan error, 1),\n\t\t\t\trequests:          requestutils.New(logger, global.DefaultAPITimeout, &backoff.StopBackOff{}),\n\t\t\t}\n\t\t\tif err := tbs.fetchBlockListFromNeuron(tt.args.ctx, tt.args.branch); (err != nil) != tt.wantErr {\n\t\t\t\tt.Errorf(\"TestBlockListService.fetchBlockListFromNeuron() error = %v, wantErr %v\", err, tt.wantErr)\n\t\t\t}\n\t\t})\n\t}\n}\n\nfunc TestBlockListService_GetBlockListedTests(t *testing.T) {\n\tserver := httptest.NewServer( // mock server\n\t\thttp.FileServer(http.Dir(\"../../testutils/testdata/testblocklistdata/\")), // mock data stored at testutils/testdata\n\t)\n\tdefer server.Close()\n\n\tcfg := new(config.NucleusConfig)\n\tcfg.BuildID = buildID\n\tlogger, err := testutils.GetLogger()\n\tif err != nil {\n\t\tt.Errorf(\"Couldn't initialize logger, error: %v\", err)\n\t}\n\trequests := requestutils.New(logger, global.DefaultAPITimeout, &backoff.StopBackOff{})\n\ttbs := NewTestBlockTestService(cfg, requests, logger)\n\n\ttbs.endpoint = server.URL + \"/testBlocklist.json\"\n\n\ttype args struct {\n\t\tctx       context.Context\n\t\ttasConfig *core.TASConfig\n\t\tbranch    string\n\t}\n\ttests := []struct {\n\t\tname    string\n\t\targs    args\n\t\twantErr bool\n\t}{\n\t\t{\"Test GetBlockListedTests\",\n\t\t\targs{\n\t\t\t\tctx: context.TODO(),\n\t\t\t\ttasConfig: &core.TASConfig{\n\t\t\t\t\tSmartRun:  false,\n\t\t\t\t\tFramework: \"jest\",\n\t\t\t\t\tBlocklist: []string{\"src/test/f1.spec.js\", \"src/test/f2.spec.js\"},\n\t\t\t\t\tSplitMode: core.TestSplit,\n\t\t\t\t\tTier:      \"small\"},\n\t\t\t},\n\t\t\tfalse},\n\t}\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\tblYML := tt.args.tasConfig.Blocklist\n\t\t\tif err := tbs.GetBlockTests(tt.args.ctx, blYML, tt.args.branch); (err != nil) != tt.wantErr {\n\t\t\t\tt.Errorf(\"TestBlockListService.GetBlockListedTests() error = %v, wantErr %v\", err, tt.wantErr)\n\t\t\t}\n\t\t})\n\t}\n}\n\nfunc TestBlockListService_populateBlockList(t *testing.T) {\n\tcfg := config.GlobalNucleusConfig\n\tlogger, err := testutils.GetLogger()\n\tif err != nil {\n\t\tt.Errorf(\"Couldn't initialize logger, error: %v\", err)\n\t}\n\tblocklistLocators := []*blocktestLocator{}\n\tfirstLocator := &blocktestLocator{\n\t\tLocator: \"src/test/api1.js\",\n\t\tStatus:  \"quarantined\",\n\t}\n\n\tsecondLocator := &blocktestLocator{\n\t\tLocator: \"src/test/api2.js\",\n\t\tStatus:  \"blocklisted\",\n\t}\n\tblocklistLocators = append(blocklistLocators, firstLocator, secondLocator)\n\n\ttype fields struct {\n\t\tcfg                 *config.NucleusConfig\n\t\tlogger              lumber.Logger\n\t\tendpoint            string\n\t\tblocklistedEntities map[string][]blocktest\n\t\terrChan             chan error\n\t}\n\ttype args struct {\n\t\tblocklistSource   string\n\t\tblocktestLocators []*blocktestLocator\n\t}\n\ttests := []struct {\n\t\tname   string\n\t\tfields fields\n\t\targs   args\n\t}{\n\t\t{\"Test populateBlockList\",\n\t\t\tfields{\n\t\t\t\tcfg:      cfg,\n\t\t\t\tlogger:   logger,\n\t\t\t\tendpoint: \"/blocktest\",\n\t\t\t\tblocklistedEntities: map[string][]blocktest{\n\t\t\t\t\t\"src/test/api1.js\": {\n\t\t\t\t\t\tblocktest{\n\t\t\t\t\t\t\tSource:  \"src\",\n\t\t\t\t\t\t\tLocator: \"loc\",\n\t\t\t\t\t\t\tStatus:  \"blocklisted\",\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\terrChan: make(chan error, 1)},\n\t\t\targs{\n\t\t\t\tblocklistSource:   \"./\",\n\t\t\t\tblocktestLocators: blocklistLocators,\n\t\t\t},\n\t\t},\n\t}\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\ttbs := &TestBlockTestService{\n\t\t\t\tcfg:               tt.fields.cfg,\n\t\t\t\tlogger:            tt.fields.logger,\n\t\t\t\tendpoint:          tt.fields.endpoint,\n\t\t\t\tblockTestEntities: tt.fields.blocklistedEntities,\n\t\t\t\tonce:              sync.Once{},\n\t\t\t\terrChan:           tt.fields.errChan,\n\t\t\t}\n\t\t\ttbs.populateBlockList(tt.args.blocklistSource, tt.args.blocktestLocators)\n\t\t\texpected := map[string][]blocktest{\"src/test/api1.js\": {{\"src\", \"loc\", \"blocklisted\"}, {\"./\", \"src/test/api1.js##\", \"quarantined\"}},\n\t\t\t\t\"src/test/api2.js\": {{\"./\", \"src/test/api2.js##\", \"blocklisted\"}}}\n\t\t\tgot := tbs.blockTestEntities\n\t\t\tif !reflect.DeepEqual(expected, got) {\n\t\t\t\tt.Errorf(\"\\nexpected: %v\\ngot: %v\", expected, got)\n\t\t\t}\n\t\t})\n\t}\n}\n"
  },
  {
    "path": "pkg/cachemanager/cachemanager.go",
    "content": "package cachemanager\n\nimport (\n\t\"context\"\n\t\"errors\"\n\t\"fmt\"\n\t\"io\"\n\t\"os\"\n\t\"path/filepath\"\n\t\"sync\"\n\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/errs\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/fileutils\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/global\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n)\n\nconst (\n\tpnpmLock                      = \"pnpm-lock.yaml\"\n\tyarnLock                      = \"yarn.lock\"\n\tpackageLock                   = \"package-lock.json\"\n\tnpmShrinkwrap                 = \"npm-shrinkwrap.json\"\n\tnodeModules                   = \"node_modules\"\n\tdefaultCompressedFileName     = \"cache.tzst\"\n\tworkspaceCompressedFilenameV1 = \"workspace.tzst\"\n\tworkspaceCompressedFilenameV2 = \"workspace-%s.tzst\"\n)\n\n// cache represents the files/dirs that will be cached\ntype cache struct {\n\tazureClient core.AzureClient\n\tlogger      lumber.Logger\n\tonce        sync.Once\n\tzstd        core.ZstdCompressor\n\tskipUpload  bool\n\thomeDir     string\n}\n\nvar cacheBlobURL string\nvar apiErr error\n\n// New returns a new CacheStore\nfunc New(z core.ZstdCompressor, azureClient core.AzureClient, logger lumber.Logger) (core.CacheStore, error) {\n\thomeDir, err := os.UserHomeDir()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &cache{\n\t\tazureClient: azureClient,\n\t\tzstd:        z,\n\t\tlogger:      logger,\n\t\thomeDir:     homeDir,\n\t}, nil\n}\n\nfunc (c *cache) getCacheSASURL(ctx context.Context, cacheKey string) (string, error) {\n\tc.once.Do(func() {\n\t\tquery := map[string]interface{}{\"key\": cacheKey}\n\t\tcacheBlobURL, apiErr = c.azureClient.GetSASURL(ctx, core.PurposeCache, query)\n\t})\n\treturn cacheBlobURL, apiErr\n}\n\nfunc (c *cache) Download(ctx context.Context, cacheKey string) error {\n\tsasURL, err := c.getCacheSASURL(ctx, cacheKey)\n\tif err != nil {\n\t\tc.logger.Errorf(\"Error while generating SAS Token, error %v\", err)\n\t\treturn err\n\t}\n\tresp, err := c.azureClient.FindUsingSASUrl(ctx, sasURL)\n\tif err != nil {\n\t\tif errors.Is(err, errs.ErrNotFound) {\n\t\t\tc.logger.Infof(\"Cache not found for key: %s\", cacheKey)\n\t\t\treturn nil\n\t\t}\n\t\tc.logger.Errorf(\"Error while downloading cache for key: %s, error %v\", cacheKey, err)\n\t\treturn err\n\t}\n\tc.skipUpload = true\n\tdefer resp.Close()\n\n\tcachedFilePath := filepath.Join(os.TempDir(), defaultCompressedFileName)\n\tout, err := os.Create(cachedFilePath)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer out.Close()\n\n\tif _, err := io.Copy(out, resp); err != nil {\n\t\treturn err\n\t}\n\treturn c.zstd.Decompress(ctx, cachedFilePath, true, global.RepoDir)\n}\n\nfunc (c *cache) Upload(ctx context.Context, cacheKey string, itemsToCompress ...string) error {\n\tif c.skipUpload {\n\t\tc.logger.Infof(\"Cache hit occurred on the key %s, not saving cache.\", cacheKey)\n\t\treturn nil\n\t}\n\n\tvalidatedItems := make([]string, 0, len(itemsToCompress))\n\tif len(itemsToCompress) == 0 {\n\t\tdirs, err := c.getDefaultDirs()\n\t\tc.logger.Debugf(\"Dirs: %+v\", dirs)\n\t\tif err != nil {\n\t\t\tc.logger.Errorf(\"failed to get default cache directories, error %v\", err)\n\t\t\treturn nil\n\t\t}\n\t\titemsToCompress = append(itemsToCompress, dirs...)\n\t}\n\t// validate the file or dir paths if it exists.\n\tfor _, item := range itemsToCompress {\n\t\texists, err := fileutils.CheckIfExists(item)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif exists {\n\t\t\tvalidatedItems = append(validatedItems, item)\n\t\t} else {\n\t\t\tc.logger.Debugf(\"%s does not exist, skipping upload\", item)\n\t\t}\n\t}\n\tif len(validatedItems) == 0 {\n\t\tc.logger.Debugf(\"No valid files/dirs found to cache\")\n\t\treturn nil\n\t}\n\n\terr := c.zstd.Compress(ctx, defaultCompressedFileName, true, global.RepoDir, validatedItems...)\n\tif err != nil {\n\t\tc.logger.Errorf(\"error while compressing files with key %s, error: %v\", cacheKey, err)\n\t\treturn err\n\t}\n\n\tf, err := os.Open(filepath.Join(global.RepoDir, defaultCompressedFileName))\n\tif err != nil {\n\t\tc.logger.Errorf(\"error while opening compressed file with key %s, error: %v\", cacheKey, err)\n\t\treturn err\n\t}\n\n\tdefer f.Close()\n\tsasURL, err := c.getCacheSASURL(ctx, cacheKey)\n\tif err != nil {\n\t\tc.logger.Errorf(\"Error while generating SAS Token, error %v\", err)\n\t\treturn err\n\t}\n\t_, err = c.azureClient.CreateUsingSASURL(ctx, sasURL, f, \"application/zstd\")\n\tif err != nil {\n\t\tc.logger.Errorf(\"error while uploading cached file %s with key %s, error: %v\", defaultCompressedFileName, cacheKey, err)\n\t\treturn err\n\t}\n\treturn nil\n}\n\nfunc (c *cache) CacheWorkspace(ctx context.Context, subModule string) error {\n\ttmpDir := os.TempDir()\n\tworkspaceCompressedFilename := workspaceCompressedFilenameV1\n\tif subModule != \"\" {\n\t\tworkspaceCompressedFilename = fmt.Sprintf(workspaceCompressedFilenameV2, subModule)\n\t}\n\tif err := c.zstd.Compress(ctx, workspaceCompressedFilename, true, tmpDir, global.HomeDir); err != nil {\n\t\treturn err\n\t}\n\tsrc := filepath.Join(tmpDir, workspaceCompressedFilename)\n\tdst := filepath.Join(global.WorkspaceCacheDir, workspaceCompressedFilename)\n\tif err := fileutils.CopyFile(src, dst, false); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}\n\nfunc (c *cache) ExtractWorkspace(ctx context.Context, subModule string) error {\n\ttmpDir := os.TempDir()\n\tworkspaceCompressedFilename := workspaceCompressedFilenameV1\n\tif subModule != \"\" {\n\t\tworkspaceCompressedFilename = fmt.Sprintf(workspaceCompressedFilenameV2, subModule)\n\t}\n\tsrc := filepath.Join(global.WorkspaceCacheDir, workspaceCompressedFilename)\n\tdst := filepath.Join(tmpDir, workspaceCompressedFilename)\n\tif err := fileutils.CopyFile(src, dst, false); err != nil {\n\t\treturn err\n\t}\n\tif err := c.zstd.Decompress(ctx, filepath.Join(tmpDir, workspaceCompressedFilename), true, global.HomeDir); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}\n\nfunc (c *cache) getDefaultDirs() ([]string, error) {\n\tdefaultDirs := []string{}\n\tf, err := os.Open(global.RepoDir)\n\tif err != nil {\n\t\treturn defaultDirs, err\n\t}\n\n\tdirs, err := f.ReadDir(-1)\n\tif err != nil {\n\t\treturn defaultDirs, err\n\t}\n\n\tdefaultDirs = append(defaultDirs, global.RepoCacheDir)\n\tfor _, d := range dirs {\n\t\t// if yarn.lock present cache yarn folder\n\t\tif d.Name() == yarnLock {\n\t\t\tdefaultDirs = append(defaultDirs, filepath.Join(c.homeDir, \".cache\", \"yarn\"))\n\t\t\treturn defaultDirs, nil\n\t\t}\n\t\t// if package-lock.json or npm-shrinkwrap.json cache .npm cache\n\t\tif d.Name() == packageLock || d.Name() == npmShrinkwrap {\n\t\t\tdefaultDirs = append(defaultDirs, filepath.Join(c.homeDir, \".npm\"))\n\t\t\treturn defaultDirs, nil\n\t\t}\n\t\t// if pnmpm-lock.yaml is present, cache .pnpm-store cache\n\t\tif d.Name() == pnpmLock {\n\t\t\tdefaultDirs = append(defaultDirs, filepath.Join(c.homeDir, \".local\", \"share\", \"pnpm\", \"store\"))\n\t\t\treturn defaultDirs, nil\n\t\t}\n\t}\n\t// If none present cache node_modules\n\tdefaultDirs = append(defaultDirs, nodeModules)\n\treturn defaultDirs, nil\n}\n"
  },
  {
    "path": "pkg/command/run.go",
    "content": "package command\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\t\"io\"\n\t\"os\"\n\t\"os/exec\"\n\t\"strings\"\n\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/logstream\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n)\n\ntype manager struct {\n\tlogger       lumber.Logger\n\tsecretParser core.SecretParser\n\tazureClient  core.AzureClient\n}\n\n// NewExecutionManager returns new instance of manger\nfunc NewExecutionManager(secretParser core.SecretParser,\n\tazureClient core.AzureClient,\n\tlogger lumber.Logger) core.ExecutionManager {\n\treturn &manager{logger: logger,\n\t\tsecretParser: secretParser,\n\t\tazureClient:  azureClient}\n}\n\n// ExecuteUserCommands executes user commands\nfunc (m *manager) ExecuteUserCommands(ctx context.Context,\n\tcommandType core.CommandType,\n\tpayload *core.Payload,\n\trunConfig *core.Run,\n\tsecretData map[string]string,\n\tlogwriter core.LogWriterStrategy,\n\tcwd string) error {\n\tscript, err := m.createScript(runConfig.Commands, secretData)\n\tif err != nil {\n\t\treturn err\n\t}\n\tenvVars, err := m.GetEnvVariables(runConfig.EnvMap, secretData)\n\tif err != nil {\n\t\treturn err\n\t}\n\tazureReader, azureWriter := io.Pipe()\n\tdefer azureWriter.Close()\n\n\terrChan := logwriter.Write(ctx, azureReader)\n\tdefer m.closeAndWriteLog(azureWriter, errChan, commandType)\n\tlogWriter := lumber.NewWriter(m.logger)\n\tdefer logWriter.Close()\n\tmultiWriter := io.MultiWriter(logWriter, azureWriter)\n\tmaskWriter := logstream.NewMasker(multiWriter, secretData)\n\n\tcmd := exec.CommandContext(ctx, \"/bin/bash\", \"-c\", script)\n\tcmd.Dir = cwd\n\tcmd.Env = envVars\n\tcmd.Stdout = maskWriter\n\tcmd.Stderr = maskWriter\n\n\tif startErr := cmd.Start(); startErr != nil {\n\t\tm.logger.Errorf(\"failed to start command: %s, error: %v\", commandType, startErr)\n\t\treturn startErr\n\t}\n\tif execErr := cmd.Wait(); execErr != nil {\n\t\tm.logger.Errorf(\"command %s, exited with error: %v\", commandType, execErr)\n\t\treturn execErr\n\t}\n\tazureWriter.Close()\n\tif uploadErr := <-errChan; uploadErr != nil {\n\t\tm.logger.Errorf(\"failed to upload logs for command %s, error: %v\", commandType, uploadErr)\n\t\treturn uploadErr\n\t}\n\treturn nil\n}\n\n// ExecuteInternalCommands executes internal commands\nfunc (m *manager) ExecuteInternalCommands(ctx context.Context,\n\tcommandType core.CommandType,\n\tcommands []string,\n\tcwd string,\n\tenvMap, secretData map[string]string) error {\n\tbashCommands := strings.Join(commands, \" && \")\n\tcmd := exec.CommandContext(ctx, \"/bin/bash\", \"-c\", bashCommands)\n\tif cwd != \"\" {\n\t\tcmd.Dir = cwd\n\t}\n\tlogWriter := lumber.NewWriter(m.logger)\n\tdefer logWriter.Close()\n\tcmd.Stderr = logWriter\n\tcmd.Stdout = logWriter\n\tm.logger.Debugf(\"Executing command of type %s\", commandType)\n\tif err := cmd.Run(); err != nil {\n\t\tm.logger.Errorf(\"command of type %s failed with error: %v\", commandType, err)\n\t\treturn err\n\t}\n\treturn nil\n}\n\n// GetEnvVariables gives set environment variable\nfunc (m *manager) GetEnvVariables(envMap, secretData map[string]string) ([]string, error) {\n\tenvVars := os.Environ()\n\tfor k, v := range envMap {\n\t\tval, err := m.secretParser.SubstituteSecret(v, secretData)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tenvVars = append(envVars, fmt.Sprintf(\"%s=%s\", k, val))\n\t}\n\treturn envVars, nil\n}\n\nfunc (m *manager) closeAndWriteLog(azureWriter *io.PipeWriter, errChan <-chan error, commandType core.CommandType) {\n\tazureWriter.Close()\n\tif uploadErr := <-errChan; uploadErr != nil {\n\t\tm.logger.Errorf(\"failed to upload logs for command %s, error: %v\", commandType, uploadErr)\n\t}\n}\n"
  },
  {
    "path": "pkg/command/run_test.go",
    "content": "package command\n\nimport (\n\t\"fmt\"\n\t\"os\"\n\t\"reflect\"\n\t\"sort\"\n\t\"testing\"\n\n\t\"github.com/LambdaTest/test-at-scale/mocks\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/secret\"\n\t\"github.com/LambdaTest/test-at-scale/testutils\"\n)\n\nfunc TestNewExecutionManager(t *testing.T) {\n\tlogger, err := testutils.GetLogger()\n\tif err != nil {\n\t\tt.Errorf(\"Couldn't initialize logger, error: %v\", err)\n\t}\n\tazureClient := new(mocks.AzureClient)\n\tsecretParser := secret.New(logger)\n\ttype args struct {\n\t\tsecretParser core.SecretParser\n\t\tazureClient  core.AzureClient\n\t\tlogger       lumber.Logger\n\t}\n\ttests := []struct {\n\t\tname string\n\t\targs args\n\t\twant core.ExecutionManager\n\t}{\n\t\t{\"Test initialisation func\",\n\t\t\targs{secretParser: secretParser,\n\t\t\t\tazureClient: azureClient,\n\t\t\t\tlogger:      logger,\n\t\t\t},\n\t\t\t&manager{\n\t\t\t\tlogger:       logger,\n\t\t\t\tsecretParser: secretParser,\n\t\t\t\tazureClient:  azureClient,\n\t\t\t},\n\t\t},\n\t}\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\tif got := NewExecutionManager(tt.args.secretParser, tt.args.azureClient, tt.args.logger); !reflect.DeepEqual(got, tt.want) {\n\t\t\t\tt.Errorf(\"NewExecutionManager() = %v, want %v\", got, tt.want)\n\t\t\t}\n\t\t})\n\t}\n}\n\nfunc Test_manager_GetEnvVariables(t *testing.T) {\n\tlogger, err := testutils.GetLogger()\n\tif err != nil {\n\t\tt.Errorf(\"Couldn't initialize logger, error: %v\", err)\n\t}\n\n\tsecretParser := secret.New(logger)\n\tazureClient := new(mocks.AzureClient)\n\tenvVars := os.Environ()\n\n\ttype fields struct {\n\t\tlogger       lumber.Logger\n\t\tsecretParser core.SecretParser\n\t\tazureClient  core.AzureClient\n\t}\n\ttype args struct {\n\t\tenvMap     map[string]string\n\t\tsecretData map[string]string\n\t}\n\ttests := []struct {\n\t\tname    string\n\t\tfields  fields\n\t\targs    args\n\t\twant    []string\n\t\twantErr bool\n\t}{\n\t\t{\"Test GetEnvVariables for success\",\n\t\t\tfields{\n\t\t\t\tlogger:       logger,\n\t\t\t\tsecretParser: secretParser,\n\t\t\t\tazureClient:  azureClient,\n\t\t\t},\n\t\t\targs{\n\t\t\t\tenvMap:     map[string]string{\"os\": \"linux\", \"arch\": \"amd64\", \"ver\": \"1.15\"},\n\t\t\t\tsecretData: map[string]string{\"key1\": \"abc\", \"key2\": \"xyz\", \"key3\": \"123\"},\n\t\t\t},\n\t\t\tappend(envVars, \"arch=amd64 os=linux ver=1.15\"),\n\t\t\tfalse,\n\t\t},\n\t}\n\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\tm := &manager{\n\t\t\t\tlogger:       tt.fields.logger,\n\t\t\t\tsecretParser: tt.fields.secretParser,\n\t\t\t\tazureClient:  tt.fields.azureClient,\n\t\t\t}\n\t\t\tgot, err := m.GetEnvVariables(tt.args.envMap, tt.args.secretData)\n\t\t\tif (err != nil) != tt.wantErr {\n\t\t\t\tt.Errorf(\"manager.GetEnvVariables() error = %v, wantErr %v\", err, tt.wantErr)\n\t\t\t\treturn\n\t\t\t}\n\t\t\tsort.Strings(got)\n\t\t\tsort.Strings(tt.want)\n\t\t\treceived := fmt.Sprintf(\"%v\", got)\n\t\t\twant := fmt.Sprintf(\"%v\", tt.want)\n\t\t\tif len(received) != len(want) || received != want {\n\t\t\t\tt.Errorf(\"manager.GetEnvVariables() = \\n%v, \\nwant \\n%v\", got, tt.want)\n\t\t\t}\n\t\t})\n\t}\n}\n"
  },
  {
    "path": "pkg/command/script.go",
    "content": "package command\n\nimport (\n\t\"bytes\"\n\t\"fmt\"\n\t\"strings\"\n)\n\n// CreateScript converts a slice of individual shell commands to\n// a shell script.\nfunc (m *manager) createScript(commands []string, secretData map[string]string) (string, error) {\n\tbuf := new(bytes.Buffer)\n\tfmt.Fprintln(buf)\n\tfmt.Fprint(buf, optionScript)\n\tfmt.Fprintln(buf)\n\tvar err error\n\tfor _, command := range commands {\n\t\tescaped := fmt.Sprintf(\"%q\", command)\n\t\tescaped = strings.Replace(escaped, \"$\", `\\$`, -1)\n\t\tif len(secretData) > 0 {\n\t\t\tcommand, err = m.secretParser.SubstituteSecret(command, secretData)\n\t\t\tif err != nil {\n\t\t\t\treturn \"\", err\n\t\t\t}\n\t\t}\n\t\tbuf.WriteString(fmt.Sprintf(\n\t\t\ttraceScript,\n\t\t\tescaped,\n\t\t\tcommand,\n\t\t))\n\t}\n\treturn buf.String(), nil\n}\n\n// optionScript is a helper script this is added to the build\n// to set shell options, in this case, to exit on error.\nconst optionScript = `\nset -e\n`\n\n// traceScript is a helper script that is added to\n// the build script to trace a command.\nconst traceScript = `\necho + %s\n%s\n`\n"
  },
  {
    "path": "pkg/command/script_test.go",
    "content": "package command\n\nimport (\n\t\"testing\"\n\n\t\"github.com/LambdaTest/test-at-scale/mocks\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/errs\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n\t\"github.com/LambdaTest/test-at-scale/testutils\"\n\t\"github.com/stretchr/testify/mock\"\n)\n\nfunc Test_manager_createScript(t *testing.T) {\n\tlogger, err := testutils.GetLogger()\n\tif err != nil {\n\t\tt.Errorf(\"Couldn't initialize logger, error: %v\", err)\n\t}\n\n\tvar azureClient core.AzureClient\n\tcommands := []string{\"cmd1\", \"cmd2\", \"cmd3\"}\n\tsecretData := map[string]string{\"secret1\": \"s1\", \"secret2\": \"s2\", \"secret3\": \"s3\"}\n\tsecretParser := new(mocks.SecretParser)\n\tsecretParserErr := new(mocks.SecretParser)\n\twant := `\n\nset -e\n\n\necho + \"cmd1\"\nfakecommand\n\necho + \"cmd2\"\nfakecommand\n\necho + \"cmd3\"\nfakecommand\n`\n\ttype fields struct {\n\t\tlogger       lumber.Logger\n\t\tsecretParser core.SecretParser\n\t\tazureClient  core.AzureClient\n\t}\n\ttype args struct {\n\t\tcommands   []string\n\t\tsecretData map[string]string\n\t}\n\ttests := []struct {\n\t\tname    string\n\t\tfields  fields\n\t\targs    args\n\t\twant    string\n\t\twantErr bool\n\t}{\n\t\t{\n\t\t\t\"Test for success\",\n\t\t\tfields{logger: logger, secretParser: secretParser, azureClient: azureClient},\n\t\t\targs{commands: commands, secretData: secretData},\n\t\t\twant,\n\t\t\tfalse,\n\t\t},\n\t\t{\n\t\t\t\"This should throw an error\",\n\t\t\tfields{logger: logger, secretParser: secretParserErr, azureClient: azureClient},\n\t\t\targs{commands: commands, secretData: secretData},\n\t\t\t\"\",\n\t\t\ttrue,\n\t\t},\n\t}\n\n\tsecretParser.On(\"SubstituteSecret\", mock.AnythingOfType(\"string\"), secretData).Return(\n\t\tfunc(command string, secretData map[string]string) string {\n\t\t\treturn \"fakecommand\"\n\t\t},\n\t\tfunc(command string, secretData map[string]string) error {\n\t\t\treturn nil\n\t\t})\n\n\tsecretParserErr.On(\"SubstituteSecret\", mock.AnythingOfType(\"string\"), secretData).Return(\n\t\tfunc(command string, secretData map[string]string) string {\n\t\t\treturn \"\"\n\t\t},\n\t\tfunc(command string, secretData map[string]string) error {\n\t\t\treturn errs.New(\"error from mocked interface\")\n\t\t})\n\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\tm := &manager{\n\t\t\t\tlogger:       tt.fields.logger,\n\t\t\t\tsecretParser: tt.fields.secretParser,\n\t\t\t\tazureClient:  tt.fields.azureClient,\n\t\t\t}\n\t\t\tgot, err := m.createScript(tt.args.commands, tt.args.secretData)\n\t\t\tif (err != nil) != tt.wantErr {\n\t\t\t\tt.Errorf(\"manager.createScript() error = %v, wantErr %v\", err, tt.wantErr)\n\t\t\t\treturn\n\t\t\t}\n\t\t\tif got != tt.want {\n\t\t\t\tt.Errorf(\"manager.createScript() = %v, want %v\", got, tt.want)\n\t\t\t}\n\t\t})\n\t}\n}\n"
  },
  {
    "path": "pkg/core/interfaces.go",
    "content": "package core\n\nimport (\n\t\"context\"\n\t\"io\"\n)\n\n// PayloadManager defines operations for payload\ntype PayloadManager interface {\n\t// ValidatePayload validates the nucleus payload\n\tValidatePayload(ctx context.Context, payload *Payload) error\n\t// FetchPayload used for fetching the payload used for running nucleus\n\tFetchPayload(ctx context.Context, payloadAddress string) (*Payload, error)\n}\n\n// TASConfigManager defines operations for tas config\ntype TASConfigManager interface {\n\t// LoadAndValidate loads and returns the tas config\n\tLoadAndValidate(ctx context.Context, version int, path string, eventType EventType, licenseTier Tier,\n\t\ttasFilePathInRepo string) (interface{}, error)\n\n\t// GetVersion returns TAS yml version\n\tGetVersion(path string) (int, error)\n\n\t// GetTasConfigFilePath returns file path of tas config\n\tGetTasConfigFilePath(payload *Payload) (string, error)\n}\n\n// GitManager manages the cloning of git repositories\ntype GitManager interface {\n\t// Clone repository from TAS config\n\tClone(ctx context.Context, payload *Payload, oauth *Oauth) error\n\t// DownloadFileByCommit download file from repo for given commit\n\tDownloadFileByCommit(ctx context.Context, gitProvider, repoSlug, commitID, filePath string, oauth *Oauth) (string, error)\n}\n\n// DiffManager manages the diff findings for the given payload\ntype DiffManager interface {\n\tGetChangedFiles(ctx context.Context, payload *Payload, oauth *Oauth) (map[string]int, error)\n}\n\n// TestDiscoveryService services discovery of tests\ntype TestDiscoveryService interface {\n\t// Discover executes the test discovery scripts.\n\tDiscover(ctx context.Context, args *DiscoveyArgs) (*DiscoveryResult, error)\n\n\t// SendResult sends discovery result to TAS server\n\tSendResult(ctx context.Context, testDiscoveryResult *DiscoveryResult) error\n}\n\n// BlockTestService is used for fetching blocklisted tests\ntype BlockTestService interface {\n\tGetBlockTests(ctx context.Context, blocklistYAML []string, branch string) error\n}\n\n// TestExecutionService services execution of tests\ntype TestExecutionService interface {\n\t// Run executes the test execution scripts\n\tRun(ctx context.Context, testExecutionArgs *TestExecutionArgs) (results *ExecutionResults, err error)\n\t// SendResults sends the test execution results to the TAS server.\n\tSendResults(ctx context.Context, payload *ExecutionResults) (resp *TestReportResponsePayload, err error)\n}\n\n// CoverageService services coverage of tests\ntype CoverageService interface {\n\tMergeAndUpload(ctx context.Context, payload *Payload) error\n}\n\n// TestStats is used for servicing stat collection\ntype TestStats interface {\n\tCaptureTestStats(pid int32, collectStats bool) error\n}\n\n// Task is a service to update task status at neuron\ntype Task interface {\n\t// UpdateStatus updates status of the task\n\tUpdateStatus(ctx context.Context, payload *TaskPayload) error\n}\n\n// NotifMessage  defines struct for notification message\ntype NotifMessage struct {\n\tType   string\n\tValue  string\n\tStatus string\n\tError  string\n}\n\n// AzureClient defines operation for working with azure store\ntype AzureClient interface {\n\tFindUsingSASUrl(ctx context.Context, sasURL string) (io.ReadCloser, error)\n\tFind(ctx context.Context, path string) (io.ReadCloser, error)\n\tCreate(ctx context.Context, path string, reader io.Reader, mimeType string) (string, error)\n\tCreateUsingSASURL(ctx context.Context, sasURL string, reader io.Reader, mimeType string) (string, error)\n\tGetSASURL(ctx context.Context, purpose SASURLPurpose, query map[string]interface{}) (string, error)\n\tExists(ctx context.Context, path string) (bool, error)\n}\n\n// ZstdCompressor performs zstd compression and decompression\ntype ZstdCompressor interface {\n\tCompress(ctx context.Context, compressedFileName string, preservePath bool, workingDirectory string, filesToCompress ...string) error\n\tDecompress(ctx context.Context, filePath string, preservePath bool, workingDirectory string) error\n}\n\n// CacheStore defines operation for working with the cache\n//go:generate mockery  --name  CacheStore  --keeptree  --output  ../mocks/CacheStore.go\ntype CacheStore interface {\n\t// Download downloads cache present at cacheKey\n\tDownload(ctx context.Context, cacheKey string) error\n\t// Upload creates, compresses and uploads cache at cacheKey\n\tUpload(ctx context.Context, cacheKey string, itemsToCompress ...string) error\n\t// CacheWorkspace caches the workspace onto a mounted volume\n\tCacheWorkspace(ctx context.Context, subModule string) error\n\t// ExtractWorkspace extracts the workspace cache from mounted volume\n\tExtractWorkspace(ctx context.Context, subModule string) error\n}\n\n// SecretParser defines operation for parsing the vault secrets in given path\ntype SecretParser interface {\n\t// GetOauthSecret parses the oauth secret for given path\n\tGetOauthSecret(filepath string) (*Oauth, error)\n\t// GetRepoSecret parses the repo secret for given path\n\tGetRepoSecret(string) (map[string]string, error)\n\t// SubstituteSecret replace secret placeholders with their respective values\n\tSubstituteSecret(command string, secretData map[string]string) (string, error)\n\t// Expired reports whether the token is expired.\n\tExpired(token *Oauth) bool\n}\n\n// ExecutionManager has responsibility for executing the preRun, postRun and internal commands\ntype ExecutionManager interface {\n\t// ExecuteUserCommands executes the preRun or postRun commands given by user in his yaml.\n\tExecuteUserCommands(ctx context.Context,\n\t\tcommandType CommandType,\n\t\tpayload *Payload,\n\t\trunConfig *Run,\n\t\tsecretData map[string]string,\n\t\tlogwriter LogWriterStrategy,\n\t\tcwd string) error\n\n\t// ExecuteInternalCommands executes the commands like installing runners and test discovery.\n\tExecuteInternalCommands(ctx context.Context,\n\t\tcommandType CommandType,\n\t\tcommands []string,\n\t\tcwd string, envMap,\n\t\tsecretData map[string]string) error\n\t// GetEnvVariables get the environment variables from the env map given by user.\n\tGetEnvVariables(envMap, secretData map[string]string) ([]string, error)\n}\n\n// Requests is a util interface for making API Requests\ntype Requests interface {\n\t// MakeAPIRequest makes an HTTP request with auth\n\tMakeAPIRequest(ctx context.Context, httpMethod, endpoint string, body []byte, params map[string]interface{},\n\t\theaders map[string]string) (rawbody []byte, statusCode int, err error)\n}\n\n// ListSubModuleService will sends the submodule count to TAS server\ntype ListSubModuleService interface {\n\t// Send sends count of submodules to TAS server\n\tSend(ctx context.Context, buildID string, totalSubmodule int) error\n}\n\n// Driver has the responsibility to run discovery and test execution\ntype Driver interface {\n\t// RunDiscovery runs the test discovery\n\tRunDiscovery(ctx context.Context, payload *Payload,\n\t\ttaskPayload *TaskPayload, oauth *Oauth, coverageDir string, secretMap map[string]string) error\n\t// RunExecution runs the test execution\n\tRunExecution(ctx context.Context, payload *Payload,\n\t\ttaskPayload *TaskPayload, oauth *Oauth, coverageDir string, secretMap map[string]string) error\n}\n\n// LogWriterStrategy interface is used to tag all log writing strategy\ntype LogWriterStrategy interface {\n\t// Write reads data from io.Reader and write it to various data stream\n\tWrite(ctx context.Context, reader io.Reader) <-chan error\n}\n\n// Builder builds the driver for given tas yml version\ntype Builder interface {\n\t// GetDriver returns driver for use\n\tGetDriver(version int, ymlFilePath string) (Driver, error)\n}\n"
  },
  {
    "path": "pkg/core/lifecycle.go",
    "content": "package core\n\nimport (\n\t\"context\"\n\t\"errors\"\n\t\"fmt\"\n\t\"os\"\n\t\"path/filepath\"\n\t\"runtime/debug\"\n\t\"time\"\n\n\t\"github.com/LambdaTest/test-at-scale/config\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/errs\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/fileutils\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/global\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n)\n\nconst (\n\tendpointPostTestResults = \"http://localhost:9876/results\"\n\tendpointPostTestList    = \"http://localhost:9876/test-list\"\n\tlanguageJs              = \"javascript\"\n)\n\n// NewPipeline creates and returns a new Pipeline instance\nfunc NewPipeline(cfg *config.NucleusConfig, logger lumber.Logger) (*Pipeline, error) {\n\treturn &Pipeline{\n\t\tCfg:    cfg,\n\t\tLogger: logger,\n\t}, nil\n}\n\n// Start starts pipeline lifecycle\nfunc (pl *Pipeline) Start(ctx context.Context) (err error) {\n\tctx, cancel := context.WithCancel(ctx)\n\tdefer cancel()\n\tstartTime := time.Now()\n\n\tpl.Logger.Debugf(\"Starting pipeline.....\")\n\tpl.Logger.Debugf(\"Fetching config\")\n\n\t// fetch configuration\n\tpayload, err := pl.PayloadManager.FetchPayload(ctx, pl.Cfg.PayloadAddress)\n\tif err != nil {\n\t\tpl.Logger.Fatalf(\"error while fetching payload: %v\", err)\n\t}\n\n\terr = pl.PayloadManager.ValidatePayload(ctx, payload)\n\tif err != nil {\n\t\tpl.Logger.Fatalf(\"error while validating payload %v\", err)\n\t}\n\n\tpl.Logger.Debugf(\"Payload for current task: %+v \\n\", *payload)\n\n\tif pl.Cfg.CoverageMode {\n\t\tif err = pl.CoverageService.MergeAndUpload(ctx, payload); err != nil {\n\t\t\tpl.Logger.Fatalf(\"error while merge and upload coverage files %v\", err)\n\t\t}\n\t\tos.Exit(0)\n\t}\n\n\t// set payload on pipeline object\n\tpl.Payload = payload\n\n\ttaskPayload := pl.getTaskPayload(payload, startTime)\n\tpayload.TaskType = taskPayload.Type\n\tpl.Logger.Infof(\"Running nucleus in %s mode\", taskPayload.Type)\n\n\tgo func() {\n\t\t// marking task to running state\n\t\tif err = pl.Task.UpdateStatus(context.Background(), taskPayload); err != nil {\n\t\t\tpl.Logger.Fatalf(\"failed to update task status %v\", err)\n\t\t}\n\t}()\n\n\t// update task status when pipeline exits\n\tdefer func() {\n\t\ttaskPayload.EndTime = time.Now()\n\t\tif p := recover(); p != nil {\n\t\t\tpl.Logger.Errorf(\"panic stack trace: %v\\n%s\", p, string(debug.Stack()))\n\t\t\ttaskPayload.Status = Error\n\t\t\ttaskPayload.Remark = errs.GenericErrRemark.Error()\n\t\t} else if err != nil {\n\t\t\tif errors.Is(err, context.Canceled) {\n\t\t\t\ttaskPayload.Status = Aborted\n\t\t\t\ttaskPayload.Remark = \"Task aborted\"\n\t\t\t} else {\n\t\t\t\tif _, ok := err.(*errs.StatusFailed); ok {\n\t\t\t\t\ttaskPayload.Status = Failed\n\t\t\t\t} else {\n\t\t\t\t\ttaskPayload.Status = Error\n\t\t\t\t}\n\t\t\t\ttaskPayload.Remark = err.Error()\n\t\t\t}\n\t\t}\n\t\tif err = pl.Task.UpdateStatus(context.Background(), taskPayload); err != nil {\n\t\t\tpl.Logger.Fatalf(\"failed to update task status %v\", err)\n\t\t}\n\t}()\n\n\toauth, err := pl.SecretParser.GetOauthSecret(global.OauthSecretPath)\n\tif err != nil {\n\t\tpl.Logger.Errorf(\"failed to get oauth secret %v\", err)\n\t\treturn err\n\t}\n\t// read secrets\n\tsecretMap, err := pl.SecretParser.GetRepoSecret(global.RepoSecretPath)\n\tif err != nil {\n\t\tpl.Logger.Errorf(\"Error in fetching Repo secrets %v\", err)\n\t\terr = errs.New(errs.GenericErrRemark.Error())\n\t\treturn err\n\t}\n\tif pl.Cfg.DiscoverMode {\n\t\tpl.Logger.Infof(\"Cloning repo ...\")\n\t\terr = pl.GitManager.Clone(ctx, pl.Payload, oauth)\n\t\tif err != nil {\n\t\t\tpl.Logger.Errorf(\"Unable to clone repo '%s': %s\", payload.RepoLink, err)\n\t\t\terr = &errs.StatusFailed{Remark: fmt.Sprintf(\"Unable to clone repo: %s\", payload.RepoLink)}\n\t\t\treturn err\n\t\t}\n\t} else {\n\t\tpl.Logger.Debugf(\"Extracting workspace\")\n\t\t// Replicate workspace\n\t\t// TODO this will be changed after parallel discovery support\n\t\tif err = pl.CacheStore.ExtractWorkspace(ctx, \"\"); err != nil {\n\t\t\tpl.Logger.Errorf(\"Error replicating workspace: %+v\", err)\n\t\t\terr = errs.New(errs.GenericErrRemark.Error())\n\t\t\treturn err\n\t\t}\n\t}\n\tcoverageDir := filepath.Join(global.CodeCoverageDir, payload.OrgID, payload.RepoID, payload.BuildTargetCommit)\n\tif payload.CollectCoverage {\n\t\tif err = fileutils.CreateIfNotExists(coverageDir, true); err != nil {\n\t\t\tpl.Logger.Errorf(\"failed to create coverage directory %v\", err)\n\t\t\terr = errs.New(errs.GenericErrRemark.Error())\n\t\t\treturn err\n\t\t}\n\t}\n\tfilePath, err := pl.TASConfigManager.GetTasConfigFilePath(pl.Payload)\n\tif err != nil {\n\t\treturn err\n\t}\n\tversion, err := pl.TASConfigManager.GetVersion(filePath)\n\tif err != nil {\n\t\tpl.Logger.Errorf(\"Unable to load tas yaml file, error: %v\", err)\n\t\terr = &errs.StatusFailed{Remark: err.Error()}\n\t\treturn err\n\t}\n\tpl.Logger.Infof(\"TAS Version %f\", version)\n\tpl.setEnv(payload, coverageDir)\n\tnewDriver, err := pl.Builder.GetDriver(version, filePath)\n\tif err != nil {\n\t\tpl.Logger.Errorf(\"error crearing driver, error %v\", err)\n\t\treturn err\n\t}\n\tif pl.Cfg.DiscoverMode {\n\t\terr = newDriver.RunDiscovery(ctx, payload, taskPayload, oauth, coverageDir, secretMap)\n\t} else {\n\t\terr = newDriver.RunExecution(ctx, payload, taskPayload, oauth, coverageDir, secretMap)\n\t}\n\n\treturn err\n}\n\nfunc (pl *Pipeline) getTaskPayload(payload *Payload, startTime time.Time) *TaskPayload {\n\ttaskPayload := &TaskPayload{\n\t\tTaskID:      payload.TaskID,\n\t\tBuildID:     payload.BuildID,\n\t\tRepoSlug:    payload.RepoSlug,\n\t\tRepoLink:    payload.RepoLink,\n\t\tOrgID:       payload.OrgID,\n\t\tRepoID:      payload.RepoID,\n\t\tGitProvider: payload.GitProvider,\n\t\tStartTime:   startTime,\n\t\tStatus:      Running,\n\t}\n\tif pl.Cfg.DiscoverMode {\n\t\ttaskPayload.Type = DiscoveryTask\n\t} else if pl.Cfg.FlakyMode {\n\t\ttaskPayload.Type = FlakyTask\n\t} else {\n\t\ttaskPayload.Type = ExecutionTask\n\t}\n\treturn taskPayload\n}\n\nfunc (pl *Pipeline) setEnv(payload *Payload, coverageDir string) {\n\t// set testing taskID, orgID and buildID as environment variable\n\tos.Setenv(\"TASK_ID\", payload.TaskID)\n\tos.Setenv(\"ORG_ID\", payload.OrgID)\n\tos.Setenv(\"BUILD_ID\", payload.BuildID)\n\t// set target commit_id as environment variable\n\tos.Setenv(\"COMMIT_ID\", payload.BuildTargetCommit)\n\t// set repo_id as environment variable\n\tos.Setenv(\"REPO_ID\", payload.RepoID)\n\t// set coverage_dir as environment variable\n\tos.Setenv(\"CODE_COVERAGE_DIR\", coverageDir)\n\tos.Setenv(\"BRANCH_NAME\", payload.BranchName)\n\tos.Setenv(\"ENV\", pl.Cfg.Env)\n\tos.Setenv(\"ENDPOINT_POST_TEST_LIST\", endpointPostTestList)\n\tos.Setenv(\"ENDPOINT_POST_TEST_RESULTS\", endpointPostTestResults)\n\tos.Setenv(\"REPO_ROOT\", global.RepoDir)\n\tos.Setenv(\"BLOCK_TESTS_FILE\", global.BlockTestFileLocation)\n\tos.Setenv(global.SubModuleName, pl.Cfg.SubModule)\n\t// set MODULE_PATH to empty as env variable\n\tos.Setenv(global.ModulePath, \"\")\n}\n"
  },
  {
    "path": "pkg/core/models.go",
    "content": "// Package core is the backbone of the tunnel client,\n// it defines  the tunnel lifecycle and allows attaching hooks for functionality\n// as plugins.\npackage core\n\nimport (\n\t\"time\"\n\n\t\"github.com/LambdaTest/test-at-scale/config\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n)\n\n// ExecutionID type\ntype ExecutionID string\n\n// SASURLPurpose defines reasons for which SAS Url is required\ntype SASURLPurpose string\n\n// SASURLPurpose values\nconst (\n\tPurposeCache          SASURLPurpose = \"cache\"\n\tPurposeWorkspaceCache SASURLPurpose = \"workspace_cache\"\n\tPurposePreRunLogs     SASURLPurpose = \"pre_run_logs\"\n\tPurposePostRunLogs    SASURLPurpose = \"post_run_logs\"\n\tPurposeExecutionLogs  SASURLPurpose = \"execution_logs\"\n)\n\n// Tier type of synapse\ntype Tier string\n\n// TaskTier values.\nconst (\n\tInternal Tier = \"internal\"\n\tXSmall   Tier = \"xsmall\"\n\tSmall    Tier = \"small\"\n\tMedium   Tier = \"medium\"\n\tLarge    Tier = \"large\"\n\tXLarge   Tier = \"xlarge\"\n)\n\n// PostMergeStrategyName type\ntype PostMergeStrategyName string\n\n// All  const of type PostMergeStrategyName\nconst (\n\tAfterNCommitStrategy PostMergeStrategyName = \"after_n_commits\"\n)\n\n// SplitMode is the mode for splitting tests\ntype SplitMode string\n\n// list of support test splitting modes\nconst (\n\tFileSplit SplitMode = \"file\"\n\tTestSplit SplitMode = \"test\"\n)\n\n// CommandType defines type of command\ntype CommandType string\n\n// Types of Command string\nconst (\n\tPreRun          CommandType = \"prerun\"\n\tPostRun         CommandType = \"postrun\"\n\tInstallRunners  CommandType = \"installrunners\"\n\tExecution       CommandType = \"execution\"\n\tDiscovery       CommandType = \"discovery\"\n\tZstd            CommandType = \"zstd\"\n\tCoverageMerge   CommandType = \"coveragemerge\"\n\tInstallNodeVer  CommandType = \"installnodeversion\"\n\tInitGit         CommandType = \"initgit\"\n\tRenameCloneFile CommandType = \"renameclonefile\"\n)\n\n// EventType represents the webhook event\ntype EventType string\n\nconst (\n\t// EventPush represents the push event.\n\tEventPush EventType = \"push\"\n\t// EventPullRequest represents the pull request event.\n\tEventPullRequest EventType = \"pull-request\"\n)\n\n// CommitChangeList defines  information related to commits\ntype CommitChangeList struct {\n\tSha      string   `json:\"Sha\"`\n\tLink     string   `json:\"Link\"`\n\tAdded    []string `json:\"added\"`\n\tRemoved  []string `json:\"removed\"`\n\tModified []string `json:\"modified\"`\n\tMessage  string   `json:\"message\"`\n}\n\n// Payload defines structure of payload\ntype Payload struct {\n\tRepoSlug                   string             `json:\"repo_slug\"`\n\tForkSlug                   string             `json:\"fork_slug\"`\n\tRepoLink                   string             `json:\"repo_link\"`\n\tBuildTargetCommit          string             `json:\"build_target_commit\"`\n\tBuildBaseCommit            string             `json:\"build_base_commit\"`\n\tTaskID                     string             `json:\"task_id\"`\n\tBranchName                 string             `json:\"branch_name\"`\n\tBuildID                    string             `json:\"build_id\"`\n\tRepoID                     string             `json:\"repo_id\"`\n\tOrgID                      string             `json:\"org_id\"`\n\tGitProvider                string             `json:\"git_provider\"`\n\tPrivateRepo                bool               `json:\"private_repo\"`\n\tEventType                  EventType          `json:\"event_type\"`\n\tDiff                       string             `json:\"diff_url\"`\n\tPullRequestNumber          int                `json:\"pull_request_number\"`\n\tCommits                    []CommitChangeList `json:\"commits\"`\n\tTasFileName                string             `json:\"tas_file_name\"`\n\tLocators                   string             `json:\"locators\"`\n\tLocatorAddress             string             `json:\"locator_address\"`\n\tParentCommitCoverageExists bool               `json:\"parent_commit_coverage_exists\"`\n\tLicenseTier                Tier               `json:\"license_tier\"`\n\tCollectCoverage            bool               `json:\"collect_coverage\"`\n\tTaskType                   TaskType           `json:\"-\"`\n}\n\n// Pipeline defines all attributes of Pipeline\ntype Pipeline struct {\n\tCfg                  *config.NucleusConfig\n\tPayload              *Payload\n\tLogger               lumber.Logger\n\tPayloadManager       PayloadManager\n\tTASConfigManager     TASConfigManager\n\tGitManager           GitManager\n\tExecutionManager     ExecutionManager\n\tDiffManager          DiffManager\n\tCacheStore           CacheStore\n\tTestDiscoveryService TestDiscoveryService\n\tBlockTestService     BlockTestService\n\tTestExecutionService TestExecutionService\n\tCoverageService      CoverageService\n\tTestStats            TestStats\n\tTask                 Task\n\tSecretParser         SecretParser\n\tBuilder              Builder\n}\ntype DiscoveryResult struct {\n\tTests           []TestPayload      `json:\"tests\"`\n\tImpactedTests   []string           `json:\"impactedTests\"`\n\tTestSuites      []TestSuitePayload `json:\"testSuites\"`\n\tExecuteAllTests bool               `json:\"executeAllTests\"`\n\tParallelism     int                `json:\"parallelism\"`\n\tSplitMode       SplitMode          `json:\"splitMode\"`\n\tRepoID          string             `json:\"repoID\"`\n\tBuildID         string             `json:\"buildID\"`\n\tCommitID        string             `json:\"commitID\"`\n\tTaskID          string             `json:\"taskID\"`\n\tOrgID           string             `json:\"orgID\"`\n\tBranch          string             `json:\"branch\"`\n\tSubModule       string             `json:\"subModule\"`\n}\n\n// ExecutionResult represents the request body for test and test suite execution\ntype ExecutionResult struct {\n\tTestPayload      []TestPayload      `json:\"testResults\"`\n\tTestSuitePayload []TestSuitePayload `json:\"testSuiteResults\"`\n}\n\n// ExecutionResults represents collection of execution results\ntype ExecutionResults struct {\n\tTaskID   string            `json:\"taskID\"`\n\tBuildID  string            `json:\"buildID\"`\n\tRepoID   string            `json:\"repoID\"`\n\tOrgID    string            `json:\"orgID\"`\n\tCommitID string            `json:\"commitID\"`\n\tTaskType TaskType          `json:\"taskType\"`\n\tResults  []ExecutionResult `json:\"results\"`\n}\n\n// TestReportResponsePayload represents the response body for test and test suite report api.\ntype TestReportResponsePayload struct {\n\tTaskID     string `json:\"taskID\"`\n\tTaskStatus Status `json:\"taskStatus\"`\n\tRemark     string `json:\"remark,omitempty\"`\n}\n\n// TestPayload represents the request body for test execution\ntype TestPayload struct {\n\tTestID          string             `json:\"testID\"`\n\tDetail          string             `json:\"_detail\"`\n\tSuiteID         string             `json:\"suiteID\"`\n\tSuites          []string           `json:\"_suites\"`\n\tTitle           string             `json:\"title\"`\n\tFullTitle       string             `json:\"fullTitle\"`\n\tName            string             `json:\"name\"`\n\tDuration        int                `json:\"duration\"`\n\tFilePath        string             `json:\"file\"`\n\tLine            string             `json:\"line\"`\n\tCol             string             `json:\"col\"`\n\tCurrentRetry    int                `json:\"currentRetry\"`\n\tStatus          string             `json:\"status\"`\n\tDAG             []string           `json:\"dependsOn\"`\n\tFilelocator     string             `json:\"locator\"`\n\tBlocklistSource string             `json:\"blocklistSource\"`\n\tBlocklisted     bool               `json:\"blocklist\"`\n\tStartTime       time.Time          `json:\"start_time\"`\n\tEndTime         time.Time          `json:\"end_time\"`\n\tStats           []TestProcessStats `json:\"stats\"`\n\tFailureMessage  string             `json:\"failureMessage\"`\n}\n\n// TestSuitePayload represents the request body for test suite execution\ntype TestSuitePayload struct {\n\tSuiteID         string             `json:\"suiteID\"`\n\tSuiteName       string             `json:\"suiteName\"`\n\tParentSuiteID   string             `json:\"parentSuiteID\"`\n\tBlocklistSource string             `json:\"blocklistSource\"`\n\tBlocklisted     bool               `json:\"blocklist\"`\n\tStartTime       time.Time          `json:\"start_time\"`\n\tEndTime         time.Time          `json:\"end_time\"`\n\tDuration        int                `json:\"duration\"`\n\tStatus          string             `json:\"status\"`\n\tStats           []TestProcessStats `json:\"stats\"`\n\tTotalTests      int                `json:\"totalTests\"`\n}\n\n// TestProcessStats process stats associated with each test\ntype TestProcessStats struct {\n\tMemory     uint64    `json:\"memory_consumed,omitempty\"`\n\tCPU        float64   `json:\"cpu_percentage,omitempty\"`\n\tStorage    uint64    `json:\"storage,omitempty\"`\n\tRecordTime time.Time `json:\"record_time\"`\n}\n\n// Status represents the task status\ntype Status string\n\n// Const related to task status\nconst (\n\tInitiating Status = \"initiating\"\n\tRunning    Status = \"running\"\n\tFailed     Status = \"failed\"\n\tAborted    Status = \"aborted\"\n\tPassed     Status = \"passed\"\n\tError      Status = \"error\"\n)\n\n// TaskPayload repersent task response given by nucleus to neuron\ntype TaskPayload struct {\n\tTaskID      string    `json:\"task_id\"`\n\tStatus      Status    `json:\"status\"`\n\tRepoSlug    string    `json:\"repo_slug\"`\n\tRepoLink    string    `json:\"repo_link\"`\n\tRepoID      string    `json:\"repo_id\"`\n\tOrgID       string    `json:\"org_id\"`\n\tGitProvider string    `json:\"git_provider\"`\n\tCommitID    string    `json:\"commit_id,omitempty\"`\n\tBuildID     string    `json:\"build_id\"`\n\tStartTime   time.Time `json:\"start_time\"`\n\tEndTime     time.Time `json:\"end_time,omitempty\"`\n\tRemark      string    `json:\"remark,omitempty\"`\n\tType        TaskType  `json:\"type\"`\n}\n\n// CoverageManifest for post processing coverage job\ntype CoverageManifest struct {\n\tRemovedfiles      []string           `json:\"removed_files\"`\n\tAllFilesExecuted  bool               `json:\"all_files_executed\"`\n\tCoverageThreshold *CoverageThreshold `json:\"coverage_threshold,omitempty\"`\n}\n\nconst (\n\t// FileAdded file added in commit\n\tFileAdded int = iota + 1\n\t// FileRemoved file removed in commit\n\tFileRemoved\n\t// FileModified file modified in commit\n\tFileModified\n)\n\nconst (\n\t// GitHub as git provider\n\tGitHub string = \"github\"\n\t// GitLab as git provider\n\tGitLab string = \"gitlab\"\n\t// Bitbucket as git provider\n\tBitbucket string = \"bitbucket\"\n)\n\ntype TokenType string\n\nconst (\n\t// Bearer as token type\n\tBearer TokenType = \"Bearer\"\n\t// Basic as token type\n\tBasic TokenType = \"Basic\"\n)\n\n// Oauth represents the sructure of Oauth\ntype Oauth struct {\n\tAccessToken  string    `json:\"access_token\"`\n\tExpiry       time.Time `json:\"expiry\"`\n\tRefreshToken string    `json:\"refresh_token\"`\n\tType         TokenType `json:\"token_type,omitempty\"`\n}\n\n// TASConfig represents the .tas.yml file\ntype TASConfig struct {\n\tSmartRun          bool               `yaml:\"smartRun\"`\n\tFramework         string             `yaml:\"framework\" validate:\"required,oneof=jest mocha jasmine golang junit\"`\n\tBlocklist         []string           `yaml:\"blocklist\"`\n\tPostmerge         *Merge             `yaml:\"postMerge\" validate:\"omitempty\"`\n\tPremerge          *Merge             `yaml:\"preMerge\" validate:\"omitempty\"`\n\tCache             *Cache             `yaml:\"cache\" validate:\"omitempty\"`\n\tPrerun            *Run               `yaml:\"preRun\" validate:\"omitempty\"`\n\tPostrun           *Run               `yaml:\"postRun\" validate:\"omitempty\"`\n\tParallelism       int                `yaml:\"parallelism\"`\n\tSplitMode         SplitMode          `yaml:\"splitMode\" validate:\"oneof=test file\"`\n\tSkipCache         bool               `yaml:\"skipCache\"`\n\tConfigFile        string             `yaml:\"configFile\" validate:\"omitempty\"`\n\tCoverageThreshold *CoverageThreshold `yaml:\"coverageThreshold\" validate:\"omitempty\"`\n\tTier              Tier               `yaml:\"tier\" validate:\"oneof=xsmall small medium large xlarge\"`\n\tNodeVersion       string             `yaml:\"nodeVersion\" validate:\"omitempty,semver\"`\n\tContainerImage    string             `yaml:\"containerImage\"`\n\tFrameworkVersion  int                `yaml:\"frameworkVersion\" validate:\"omitempty\"`\n\tVersion           string             `yaml:\"version\" validate:\"required\"`\n}\n\n// CoverageThreshold reprents the code coverage threshold\ntype CoverageThreshold struct {\n\tBranches   float64 `yaml:\"branches\" json:\"branches\" validate:\"number,min=0,max=100\"`\n\tLines      float64 `yaml:\"lines\" json:\"lines\" validate:\"number,min=0,max=100\"`\n\tFunctions  float64 `yaml:\"functions\" json:\"functions\" validate:\"number,min=0,max=100\"`\n\tStatements float64 `yaml:\"statements\" json:\"statements\" validate:\"number,min=0,max=100\"`\n\tPerFile    bool    `yaml:\"perFile\" json:\"perFile\"`\n}\n\n// Cache represents the user's cached directories\ntype Cache struct {\n\tKey   string   `yaml:\"key\" validate:\"required\"`\n\tPaths []string `yaml:\"paths\" validate:\"required\"`\n}\n\n// Modifier defines struct for modifier\ntype Modifier struct {\n\tType   string\n\tConfig string\n\tCli    string\n}\n\n// Run represents  pre and post runs\ntype Run struct {\n\tCommands []string          `yaml:\"command\" validate:\"omitempty,gt=0\"`\n\tEnvMap   map[string]string `yaml:\"env\" validate:\"omitempty,gt=0\"`\n}\n\n// Merge represents pre and post merge\ntype Merge struct {\n\tPatterns []string          `yaml:\"pattern\" validate:\"required,gt=0\"`\n\tEnvMap   map[string]string `yaml:\"env\" validate:\"omitempty,gt=0\"`\n}\n\n// Stability defines struct for stability\ntype Stability struct {\n\tConsecutiveRuns int `yaml:\"consecutive_runs\"`\n}\n\n// TaskType specifies the type of a Task\ntype TaskType string\n\n// Task Type values.\nconst (\n\tDiscoveryTask TaskType = \"discover\"\n\tExecutionTask TaskType = \"execute\"\n\tFlakyTask     TaskType = \"flaky\"\n)\n\n// TestStatus stores tests status\ntype TestStatus string\n\nconst (\n\tBlocklisted TestStatus = \"blocklisted\"\n\tQuarantined TestStatus = \"quarantined\"\n)\n\n// TASConfigV2 repersent TASConfig for version 2 and above\ntype TASConfigV2 struct {\n\tSmartRun          bool               `yaml:\"smartRun\"`\n\tCache             *Cache             `yaml:\"cache\" validate:\"omitempty\"`\n\tTier              Tier               `yaml:\"tier\" validate:\"oneof=xsmall small medium large xlarge\"`\n\tPostMerge         *MergeV2           `yaml:\"postMerge\" validate:\"omitempty\"`\n\tPreMerge          *MergeV2           `yaml:\"preMerge\" validate:\"omitempty\"`\n\tSkipCache         bool               `yaml:\"skipCache\"`\n\tCoverageThreshold *CoverageThreshold `yaml:\"coverageThreshold\" validate:\"omitempty\"`\n\tParallelism       int                `yaml:\"parallelism\"` // TODO: will be supported later\n\tVersion           string             `yaml:\"version\" validate:\"required\"`\n\tSplitMode         SplitMode          `yaml:\"splitMode\" validate:\"oneof=test file\"`\n\tContainerImage    string             `yaml:\"containerImage\"`\n\tNodeVersion       string             `yaml:\"nodeVersion\" validate:\"omitempty,semver\"`\n}\n\n// MergeV2 repersent MergeConfig for version 2 and above\ntype MergeV2 struct {\n\tPreRun     *Run              `yaml:\"preRun\" validate:\"omitempty\"`\n\tSubModules []SubModule       `yaml:\"subModules\" validate:\"required,gt=0\"`\n\tEnvMap     map[string]string `yaml:\"env\" validate:\"omitempty,gt=0\"`\n}\n\n// SubModule represent the structure of subModule yaml v2\ntype SubModule struct {\n\tName               string   `yaml:\"name\" validate:\"required\"`\n\tPath               string   `yaml:\"path\" validate:\"required\"`\n\tPatterns           []string `yaml:\"pattern\" validate:\"required,gt=0\"`\n\tFramework          string   `yaml:\"framework\" validate:\"required,oneof=jest mocha jasmine\"`\n\tBlocklist          []string `yaml:\"blocklist\"`\n\tPrerun             *Run     `yaml:\"preRun\" validate:\"omitempty\"`\n\tPostrun            *Run     `yaml:\"postRun\" validate:\"omitempty\"`\n\tRunPrerunEveryTime bool     `yaml:\"runPreRunEveryTime\"`\n\tParallelism        int      `yaml:\"parallelism\"` // TODO: will be supported later\n\tConfigFile         string   `yaml:\"configFile\" validate:\"omitempty\"`\n}\n\n// TasVersion used to identify yaml version\ntype TasVersion struct {\n\tVersion string `yaml:\"version\" validate:\"required\"`\n}\n\n// SubModuleList repersent submodule list API payload\ntype SubModuleList struct {\n\tBuildID        string `json:\"buildID\"`\n\tTotalSubModule int    `json:\"totalSubModule\"`\n}\n\n// DiscoveyArgs specify the arguments for discovery\ntype DiscoveyArgs struct {\n\tTestPattern      []string\n\tPayload          *Payload\n\tEnvMap           map[string]string\n\tSecretData       map[string]string\n\tTestConfigFile   string\n\tFrameWork        string\n\tSmartRun         bool\n\tDiff             map[string]int\n\tDiffExists       bool\n\tFrameWorkVersion int\n\tCWD              string\n}\n\n// TestExecutionArgs specify the argument for test discovery\ntype TestExecutionArgs struct {\n\tPayload           *Payload\n\tCoverageDir       string\n\tLogWriterStrategy LogWriterStrategy\n\tTestPattern       []string\n\tEnvMap            map[string]string\n\tTestConfigFile    string\n\tFrameWork         string\n\tSecretData        map[string]string\n\tFrameWorkVersion  int\n\tCWD               string\n}\n\n// YMLParsingRequestMessage defines yml parsing request received from TAS server\ntype YMLParsingRequestMessage struct {\n\tGitProvider string    `json:\"gitProvider\"`\n\tCommitID    string    `json:\"commitID\"`\n\tEvent       EventType `json:\"eventType\"`\n\tRepoSlug    string    `json:\"repoSlug\"`\n\tTasFileName string    `json:\"tasFilePath\"`\n\tLicenseTier Tier      `json:\"license_tier\"`\n\tOrgID       string    `json:\"orgID\"`\n\tBuildID     string    `json:\"buildID\"`\n}\n\n// TASConfigDownloaderOutput repersent output return by tasconfig downloader\ntype TASConfigDownloaderOutput struct {\n\tVersion   int         `json:\"version\"`\n\tTASConfig interface{} `json:\"tasConfig\"`\n}\n\n// YMLParsingResultMessage repersent message sent to TAS server in response of yml parsing request\ntype YMLParsingResultMessage struct {\n\tErrorMsg  string                    `json:\"ErrorMsg\"`\n\tOrgID     string                    `json:\"orgID\"`\n\tBuildID   string                    `json:\"buildID\"`\n\tYMLOutput TASConfigDownloaderOutput `json:\"ymlOutput\"`\n}\n"
  },
  {
    "path": "pkg/core/runner.go",
    "content": "package core\n\nimport (\n\t\"context\"\n\t\"time\"\n\n\t\"github.com/LambdaTest/test-at-scale/config\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/errs\"\n)\n\n// Specs denotes system specification\ntype Specs struct {\n\tCPU float32\n\tRAM int64\n}\n\n// TierOpts is const map which map each tier to specs\nvar TierOpts = map[Tier]Specs{\n\tInternal: {CPU: 0.5, RAM: 256},\n\tXSmall:   {CPU: 1, RAM: 2048},\n\tSmall:    {CPU: 2, RAM: 4096},\n\tMedium:   {CPU: 4, RAM: 8192},\n\tLarge:    {CPU: 8, RAM: 16384},\n\tXLarge:   {CPU: 16, RAM: 32768},\n}\n\n// ContainerStatus contains status of container\ntype ContainerStatus struct {\n\tDone  bool\n\tError errs.Err\n}\n\n// ContainerImageConfig contains registry config for docker\ntype ContainerImageConfig struct {\n\tAuthRegistry string\n\tImage        string\n\tMode         config.ModeType\n\tPullPolicy   config.PullPolicyType\n}\n\n// DockerRunner defines operations for docker\ntype DockerRunner interface {\n\t// Creates the execution enging\n\tCreate(context.Context, *RunnerOptions) ContainerStatus\n\n\t// Run runs the execution engine\n\tRun(context.Context, *RunnerOptions) ContainerStatus\n\n\t// WaitForRunning waits for runner to get completed\n\tWaitForCompletion(ctx context.Context, r *RunnerOptions) error\n\n\t// Destroy the execution engine\n\tDestroy(ctx context.Context, r *RunnerOptions) error\n\n\t// GetInfo will get resources details of the infra\n\tGetInfo(context.Context) (float32, int64)\n\n\t// Initiate runs docker containers\n\tInitiate(context.Context, *RunnerOptions, chan ContainerStatus)\n\n\t// PullImage will pull image from remote\n\tPullImage(containerImageConfig *ContainerImageConfig, r *RunnerOptions) error\n\n\t// KillRunningDocker kills  container spawn by synapse\n\tKillRunningDocker(ctx context.Context)\n\n\t// KillContainerForBuildID kills synapse container which is running for given buildID\n\tKillContainerForBuildID(buildID string) error\n\n\tCreateVolume(ctx context.Context, r *RunnerOptions) error\n\n\t// RemoveOldVolumes removes volumes that are older than X hours\n\tRemoveOldVolumes(ctx context.Context)\n\n\t// CopyFileToContainer copies content to container in file\n\tCopyFileToContainer(ctx context.Context, path, fileName, containerID string, content []byte) error\n\n\t// FindVolumes checks if docker volume is available\n\tFindVolumes(volumeName string) (bool, error)\n\n\t// RemoveVolume removes volume\n\tRemoveVolume(ctx context.Context, volumeName string) error\n}\n\n// VolumeDetails docker volume options\ntype VolumeDetails struct {\n\tCreatedAt  time.Time              `json:\"CreatedAt,omitempty\"`\n\tDriver     string                 `json:\"Driver\"`\n\tLabels     map[string]string      `json:\"Labels\"`\n\tMountpoint string                 `json:\"Mountpoint\"`\n\tName       string                 `json:\"Name\"`\n\tOptions    map[string]string      `json:\"Options\"`\n\tScope      string                 `json:\"Scope\"`\n\tStatus     map[string]interface{} `json:\"Status,omitempty\"`\n}\n\n// RunnerOptions provides the the required instructions for execution engine.\ntype RunnerOptions struct {\n\tContainerID       string            `json:\"container_id\"`\n\tDockerImage       string            `json:\"docker_image\"`\n\tContainerPort     int               `json:\"container_port\"`\n\tHostPort          int               `json:\"host_port\"`\n\tLabel             map[string]string `json:\"label\"`\n\tNameSpace         string            `json:\"name_space\"`\n\tServiceAccount    string            `json:\"service_account\"`\n\tPodName           string            `json:\"pod_name\"`\n\tContainerName     string            `json:\"container_name\"`\n\tContainerArgs     []string          `json:\"container_args\"`\n\tContainerCommands []string          `json:\"container_commands\"`\n\tHostVolumePath    string            `json:\"host_volume_path\"`\n\tEnv               []string          `json:\"env\"`\n\tOrgID             string            `json:\"org_id\"`\n\tVault             *VaultOpts        `json:\"vault\"`\n\tLogfilePath       string            `json:\"logfile_path\"`\n\tPodType           PodType           `json:\"pod_type\"`\n\tTier              Tier              `json:\"tier\"`\n}\n\n// VaultOpts provides the vault path options\ntype VaultOpts struct {\n\t// SecretPath path of the repo secrets.\n\tSecretPath string\n\t// TokenPath path of the user token.\n\tTokenPath string\n\t// RoleName vault role name\n\tRoleName string\n\t// Namespace is the default vault namespace\n\tNamespace string\n}\n\n// PodType specifies the type of pod\ntype PodType string\n\n// Values that PodType can take\nconst (\n\tNucleusPod  PodType = \"nucleus\"\n\tCoveragePod PodType = \"coverage\"\n)\n"
  },
  {
    "path": "pkg/core/secrets.go",
    "content": "package core\n\nimport \"github.com/LambdaTest/test-at-scale/config\"\n\n// Secret struct for holding secret data\ntype Secret map[string]string\n\n// VaultSecret holds secrets in vault format\ntype VaultSecret struct {\n\tSecrets Secret `json:\"data\"`\n}\n\n// SecretsManager defines operation for secrets\ntype SecretsManager interface {\n\t// GetLambdatestSecrets returns lambdatest config\n\tGetLambdatestSecrets() *config.LambdatestConfig\n\n\t// GetDockerSecrets returns Mode , RegistryAuth, and URL for pulling remote docker image\n\tGetDockerSecrets(r *RunnerOptions) (ContainerImageConfig, error)\n\n\t// GetSynapseName returns synapse name mentioned in config\n\tGetSynapseName() string\n\t// GetOauthToken returns oauth token\n\tGetOauthToken() *Oauth\n\n\t// GetGitSecretBytes get git secrets in bytes\n\tGetGitSecretBytes() ([]byte, error)\n\n\t// GetRepoSecretBytes get repo secrets in bytes\n\tGetRepoSecretBytes(repo string) ([]byte, error)\n}\n"
  },
  {
    "path": "pkg/core/synapse.go",
    "content": "package core\n\nimport (\n\t\"context\"\n\t\"sync\"\n)\n\n// SynapseManager denfines operations for synapse client\ntype SynapseManager interface {\n\t// InitiateConnection initiates the connection with LT cloud\n\tInitiateConnection(ctx context.Context, wg *sync.WaitGroup, connectionFailed chan struct{})\n}\n"
  },
  {
    "path": "pkg/core/wsproto.go",
    "content": "package core\n\n// MessageType defines type of message\ntype MessageType string\n\n// StatusType defines type job status\ntype StatusType string\n\n// StatType defines type of resource status\ntype StatType string\n\n// types of messages\nconst (\n\tMsgLogin             MessageType = \"login\"\n\tMsgLogout            MessageType = \"logout\"\n\tMsgTask              MessageType = \"task\"\n\tMsgInfo              MessageType = \"info\"\n\tMsgError             MessageType = \"error\"\n\tMsgResourceStats     MessageType = \"resourcestats\"\n\tMsgJobInfo           MessageType = \"jobinfo\"\n\tMsgBuildAbort        MessageType = \"build_abort\"\n\tMsgYMLParsingRequest MessageType = \"yml_parsing_request\"\n\tMsgYMLParsingResult  MessageType = \"yml_parsing_result\"\n)\n\n// JobInfo types\nconst (\n\tJobCompleted StatusType = \"complete\"\n\tJobStarted   StatusType = \"started\"\n\tJobFailed    StatusType = \"failed\"\n\tJobAborted   StatusType = \"aborted\"\n)\n\n// ResourceStats types\nconst (\n\tResourceRelease StatType = \"release\"\n\tResourceCapture StatType = \"capture\"\n)\n\n// Message struct\ntype Message struct {\n\tType    MessageType `json:\"type\"`\n\tContent []byte      `json:\"content\"`\n\tSuccess bool        `json:\"success\"`\n}\n\n// LoginDetails struct\ntype LoginDetails struct {\n\tName           string  `json:\"name\"`\n\tSynapseID      string  `json:\"synapse_id\"`\n\tSecretKey      string  `json:\"secret_key\"`\n\tCPU            float32 `json:\"cpu\"`\n\tRAM            int64   `json:\"ram\"`\n\tSynapseVersion string  `json:\"synapse_version\"`\n}\n\n// ResourceStats struct for CPU, RAM details\ntype ResourceStats struct {\n\tStatus StatType `json:\"status\"`\n\tCPU    float32  `json:\"cpu\"`\n\tRAM    int64    `json:\"ram\"`\n}\n\n// JobInfo stuct for job updates info\ntype JobInfo struct {\n\tStatus  StatusType `json:\"status\"`\n\tJobID   string     `json:\"job_id\"`\n\tID      string     `json:\"id\"`\n\tMode    string     `json:\"mode\"`\n\tBuildID string     `json:\"build_id\"`\n\tMessage string     `json:\"message\"`\n}\n\n// BuildAbortMsg struct defines message for aborting a build\ntype BuildAbortMsg struct {\n\tBuildID string `json:\"build_id\"`\n}\n"
  },
  {
    "path": "pkg/cron/setup.go",
    "content": "package cron\n\nimport (\n\t\"context\"\n\t\"sync\"\n\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n\t\"github.com/robfig/cron/v3\"\n)\n\n// Setup initializes all crons on service startup\nfunc Setup(ctx context.Context, wg *sync.WaitGroup, logger lumber.Logger, runner core.DockerRunner) {\n\tdefer wg.Done()\n\n\tc := cron.New()\n\tif _, err := c.AddFunc(\"@every 5m\", func() { cleanupBuildCache(runner) }); err != nil {\n\t\tlogger.Errorf(\"error setting up cron\")\n\t\treturn\n\t}\n\tc.Start()\n\n\t<-ctx.Done()\n\tc.Stop()\n\tlogger.Infof(\"Caller has requested graceful shutdown. Returning.....\")\n}\n\nfunc cleanupBuildCache(runner core.DockerRunner) {\n\trunner.RemoveOldVolumes(context.Background())\n}\n"
  },
  {
    "path": "pkg/diffmanager/setup.go",
    "content": "// Package diffmanager is used for cloning repo\npackage diffmanager\n\nimport (\n\t\"bufio\"\n\t\"context\"\n\t\"encoding/json\"\n\t\"errors\"\n\t\"fmt\"\n\t\"io/ioutil\"\n\t\"net/http\"\n\t\"net/url\"\n\t\"strings\"\n\t\"time\"\n\n\t\"github.com/LambdaTest/test-at-scale/config\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/errs\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/urlmanager\"\n)\n\n//TODO: add logger\n\ntype diffManager struct {\n\tcfg    *config.NucleusConfig\n\tclient http.Client\n\tlogger lumber.Logger\n}\n\ntype gitLabDiffList struct {\n\tCommitDiff []gitLabDiff `json:\"diffs\"`\n\tPRDiff     []gitLabDiff `json:\"changes\"`\n}\ntype gitLabDiff struct {\n\tOldPath     string `json:\"old_path\"`\n\tNewPath     string `json:\"new_path\"`\n\tNewFile     bool   `json:\"new_file\"`\n\tRenamedFile bool   `json:\"renamed_file\"`\n\tDeletedFile bool   `json:\"deleted_file\"`\n}\n\n// NewDiffManager Instantiate DiffManager\nfunc NewDiffManager(cfg *config.NucleusConfig, logger lumber.Logger) *diffManager {\n\treturn &diffManager{\n\t\tcfg:    cfg,\n\t\tlogger: logger,\n\t\tclient: http.Client{\n\t\t\tTimeout: 30 * time.Second,\n\t\t\tTransport: &http.Transport{\n\t\t\t\tDisableKeepAlives: true,\n\t\t\t},\n\t\t},\n\t}\n}\n\n// Updated values with \"or\" operation\nfunc (dm *diffManager) updateWithOr(m map[string]int, key string, value int) {\n\tif _, exists := m[key]; !exists {\n\t\tm[key] = 0\n\t}\n\tm[key] = m[key] | value\n}\n\nfunc (dm *diffManager) getCommitDiff(gitprovider, repoURL string, oauth *core.Oauth, baseCommit, targetCommit, forkSlug string) ([]byte, error) {\n\tif baseCommit == \"\" {\n\t\tdm.logger.Debugf(\"basecommit is empty for gitprovider %v error %v\", gitprovider, errs.ErrGitDiffNotFound)\n\t\treturn nil, errs.ErrGitDiffNotFound\n\t}\n\turl, err := url.Parse(repoURL)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tapiURLString, err := urlmanager.GetCommitDiffURL(gitprovider, url.Path, baseCommit, targetCommit, forkSlug)\n\tif err != nil {\n\t\tdm.logger.Errorf(\"failed to get api url for gitprovider: %v error: %v\", gitprovider, err)\n\t\treturn nil, err\n\t}\n\tapiURL, err := url.Parse(apiURLString)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treq, err := http.NewRequest(http.MethodGet, apiURL.String(), nil)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif oauth.AccessToken != \"\" {\n\t\treq.Header.Add(\"Authorization\", fmt.Sprintf(\"%s %s\", oauth.Type, oauth.AccessToken))\n\t}\n\treq.Header.Add(\"Accept\", \"application/vnd.github.v3.diff\")\n\tresp, err := dm.client.Do(req)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer resp.Body.Close()\n\n\t//TODO: Handle initial commit case\n\tif resp.StatusCode != http.StatusOK {\n\t\treturn nil, errs.ErrGitDiffNotFound\n\t}\n\treturn ioutil.ReadAll(resp.Body)\n}\n\nfunc (dm *diffManager) getPRDiff(gitprovider, repoURL string, prNumber int, oauth *core.Oauth) ([]byte, error) {\n\tparsedUrl, err := url.Parse(repoURL)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdiffURL, err := urlmanager.GetPullRequestDiffURL(gitprovider, parsedUrl.Path, prNumber)\n\tif err != nil {\n\t\tdm.logger.Errorf(\"failed to get diff url error: %v\", err)\n\t\treturn nil, err\n\t}\n\tchangeListURL, err := url.Parse(diffURL)\n\tif err != nil {\n\t\tdm.logger.Errorf(\"failed to get changelist url error: %v\", err)\n\t\treturn nil, err\n\t}\n\n\treq, err := http.NewRequest(http.MethodGet, changeListURL.String(), nil)\n\tif err != nil {\n\t\tdm.logger.Errorf(\"failed to create http request for changelist url error: %v\", err)\n\t\treturn nil, err\n\t}\n\treq.Header.Add(\"Authorization\", fmt.Sprintf(\"%s %s\", oauth.Type, oauth.AccessToken))\n\treq.Header.Set(\"Accept\", \"application/vnd.github.v3.diff\")\n\n\tresp, err := dm.client.Do(req)\n\n\tif err != nil {\n\t\tdm.logger.Errorf(\"failed to get changedlist url api error: %v\", err)\n\t\treturn nil, err\n\t}\n\tdefer resp.Body.Close()\n\n\tif resp.StatusCode != http.StatusOK {\n\t\treturn nil, errors.New(\"non 200 response\")\n\t}\n\n\treturn ioutil.ReadAll(resp.Body)\n\n}\n\nfunc (dm *diffManager) parseDiff(diff string) map[string]int {\n\tm := make(map[string]int)\n\tscanner := bufio.NewScanner(strings.NewReader(diff))\n\tfor scanner.Scan() {\n\t\tline := scanner.Text()\n\t\tif strings.HasPrefix(line, \"--- a/\") {\n\t\t\t// removed\n\t\t\tdm.updateWithOr(m, line[6:], core.FileRemoved)\n\t\t} else if strings.HasPrefix(line, \"+++ b/\") {\n\t\t\t// added or updated\n\t\t\tdm.updateWithOr(m, line[6:], core.FileAdded)\n\t\t}\n\t}\n\treturn m\n}\n\nfunc (dm *diffManager) parseGitLabDiff(eventType core.EventType, diff []byte) (map[string]int, error) {\n\tm := make(map[string]int)\n\tvar diffList gitLabDiffList\n\terr := json.Unmarshal(diff, &diffList)\n\tif err != nil {\n\t\tdm.logger.Errorf(\"failed to unmarshall diff %v error %v\", string(diff), err)\n\t\treturn nil, err\n\t}\n\tdiffs := diffList.PRDiff\n\tif eventType == core.EventPush {\n\t\tdiffs = diffList.CommitDiff\n\t}\n\tfor _, diff := range diffs {\n\t\tif diff.DeletedFile {\n\t\t\t// removed\n\t\t\tdm.updateWithOr(m, diff.OldPath, core.FileRemoved)\n\t\t} else if diff.NewFile {\n\t\t\t// added\n\t\t\tdm.updateWithOr(m, diff.NewPath, core.FileAdded)\n\t\t} else {\n\t\t\t// updated\n\t\t\tdm.updateWithOr(m, diff.NewPath, core.FileModified)\n\t\t}\n\t}\n\treturn m, nil\n}\n\nfunc (dm *diffManager) parseGitDiff(gitprovider string, eventType core.EventType, diff []byte) (map[string]int, error) {\n\tswitch gitprovider {\n\tcase core.GitHub, core.Bitbucket:\n\t\treturn dm.parseDiff(string(diff)), nil\n\tcase core.GitLab:\n\t\treturn dm.parseGitLabDiff(eventType, diff)\n\tdefault:\n\t\treturn nil, errs.ErrUnsupportedGitProvider\n\t}\n}\n\n// GetChangedFiles Figure out changed files\nfunc (dm *diffManager) GetChangedFiles(ctx context.Context, payload *core.Payload, oauth *core.Oauth) (map[string]int, error) {\n\t// map to store file and type of change (added, removed, modified)\n\tvar m map[string]int\n\n\tvar diff []byte\n\tvar err error\n\tif payload.EventType == core.EventPullRequest {\n\t\tdiff, err = dm.getPRDiff(payload.GitProvider, payload.RepoLink, payload.PullRequestNumber, oauth)\n\t\tif err != nil {\n\t\t\tdm.logger.Errorf(\"failed to parse pr diff for gitprovider: %s error: %v\", payload.GitProvider, err)\n\t\t\treturn nil, err\n\t\t}\n\t} else {\n\t\tdiff, err = dm.getCommitDiff(payload.GitProvider, payload.RepoLink, oauth, payload.BuildBaseCommit, payload.BuildTargetCommit, payload.ForkSlug)\n\t\tif err != nil {\n\t\t\tdm.logger.Errorf(\"failed to get commit diff for gitprovider: %s error: %v\", payload.GitProvider, err)\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\tm, err = dm.parseGitDiff(payload.GitProvider, payload.EventType, diff)\n\tif err != nil {\n\t\tdm.logger.Errorf(\"failed to parse gitdiff for gitprovider: %s error: %v\", payload.GitProvider, err)\n\t\treturn nil, err\n\t}\n\treturn m, nil\n}\n"
  },
  {
    "path": "pkg/diffmanager/setup_test.go",
    "content": "// Package diffmanager is used for cloning repo\npackage diffmanager\n\nimport (\n\t\"context\"\n\t\"math/rand\"\n\t\"net/http\"\n\t\"net/http/httptest\"\n\t\"reflect\"\n\t\"testing\"\n\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/global\"\n\t\"github.com/LambdaTest/test-at-scale/testutils\"\n)\n\nfunc Test_updateWithOr(t *testing.T) {\n\tcheck := func(t *testing.T) {\n\t\tdm := &diffManager{}\n\t\tm := make(map[string]int)\n\t\tkey := \"key\"\n\t\tval := rand.Intn(1000) // nolint:gosec\n\n\t\tdm.updateWithOr(m, key, val)\n\t\tif ans, exists := m[key]; !exists || ans != val {\n\t\t\tt.Errorf(\"Expected: %v, received: %v\", val, m[key])\n\t\t}\n\n\t\tnewVal := rand.Intn(1000) // nolint:gosec\n\t\tdm.updateWithOr(m, key, newVal)\n\t\tif ans, exists := m[key]; !exists || ans != (val|newVal) {\n\t\t\tt.Errorf(\"Expected: %v, received: %v\", val|newVal, m[key])\n\t\t}\n\t}\n\tt.Run(\"Test_updateWithOr\", func(t *testing.T) {\n\t\tcheck(t)\n\t})\n}\n\nfunc Test_diffManager_GetChangedFiles_PRDiff(t *testing.T) {\n\tserver := httptest.NewServer( // mock server\n\t\thttp.FileServer(http.Dir(\"../../testutils\")), // mock data stored at testutils/testdata\n\t)\n\tdefer server.Close()\n\n\tlogger, err := testutils.GetLogger()\n\tif err != nil {\n\t\tt.Errorf(\"Can't get logger, received: %s\", err)\n\t}\n\tconfig, err := testutils.GetConfig()\n\tif err != nil {\n\t\tt.Errorf(\"Can't get logger, received: %s\", err)\n\t}\n\n\tdm := NewDiffManager(config, logger)\n\ttype args struct {\n\t\tctx     context.Context\n\t\tpayload *core.Payload\n\t\toauth   *core.Oauth\n\t}\n\ttests := []struct {\n\t\tname    string\n\t\targs    args\n\t\twant    map[string]int\n\t\twantErr bool\n\t}{\n\t\t// expects to hit Server.URL/testdata/pulls/2\n\t\t{\"Test GetChangedFile for PRdiff for github gitprovider\",\n\t\t\targs{ctx: context.TODO(),\n\t\t\t\tpayload: &core.Payload{RepoSlug: \"/testdata\", RepoLink: server.URL + \"/testdata\",\n\t\t\t\t\tGitProvider: \"github\", PrivateRepo: false, EventType: \"pull-request\", Diff: \"xyz\", PullRequestNumber: 2},\n\t\t\t\toauth: &core.Oauth{}}, map[string]int{}, false},\n\n\t\t// expects to hit Server.URL/testdata/merge_requests/2/changes\n\t\t{\"Test GetChangedFile for PRdiff for gitlab gitprovider\",\n\t\t\targs{ctx: context.TODO(),\n\t\t\t\tpayload: &core.Payload{RepoSlug: \"/testdata\", RepoLink: server.URL + \"/testdata\",\n\t\t\t\t\tGitProvider: \"gitlab\", PrivateRepo: false, EventType: \"pull-request\", Diff: \"xyz\", PullRequestNumber: 2},\n\t\t\t\toauth: &core.Oauth{}},\n\t\t\tmap[string]int{}, false},\n\n\t\t{\"Test GetChangedFile for Commitdiff for unsupported gitprovider\", args{ctx: context.TODO(),\n\t\t\tpayload: &core.Payload{GitProvider: \"unsupported\"},\n\t\t\toauth:   &core.Oauth{}},\n\t\t\tmap[string]int{}, true},\n\n\t\t{\"Test GetChangedFile for PRdiff for unsupported gitprovider\", args{ctx: context.TODO(),\n\t\t\tpayload: &core.Payload{GitProvider: \"unsupported\", EventType: \"pull-request\"},\n\t\t\toauth:   &core.Oauth{}}, map[string]int{}, true},\n\t}\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\tglobal.APIHostURLMap[tt.args.payload.GitProvider] = server.URL\n\t\t\tresp, err := dm.GetChangedFiles(tt.args.ctx, tt.args.payload, tt.args.oauth)\n\n\t\t\tif tt.wantErr {\n\t\t\t\tif err == nil {\n\t\t\t\t\tt.Errorf(\"GetChangedFiles() error = %v, wantErr %v\", err, tt.wantErr)\n\t\t\t\t}\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\texpResp := map[string]int{\"src/steps/resource.ts\": 3}\n\t\t\tif err != nil {\n\t\t\t\tt.Errorf(\"error in getting changed files, error %v\", err.Error())\n\t\t\t} else if tt.args.payload.GitProvider == \"github\" && !reflect.DeepEqual(resp, expResp) {\n\t\t\t\tt.Errorf(\"Expected: %+v, received: %+v\", expResp, resp)\n\t\t\t} else if tt.args.payload.GitProvider == \"gitlab\" && len(resp) != 17 {\n\t\t\t\tt.Errorf(\"Expected map entries: 17, received: %v, received map: %v\", len(resp), resp)\n\t\t\t}\n\t\t})\n\t}\n}\n\nfunc Test_diffManager_GetChangedFiles_CommitDiff_Github(t *testing.T) {\n\tserver := httptest.NewServer( // mock server\n\t\thttp.FileServer(http.Dir(\"../../testutils\")),\n\t)\n\tdefer server.Close()\n\n\tlogger, err := testutils.GetLogger()\n\tif err != nil {\n\t\tt.Errorf(\"Can't get logger, received: %s\", err)\n\t}\n\tconfig, err := testutils.GetConfig()\n\tif err != nil {\n\t\tt.Errorf(\"Can't get logger, received: %s\", err)\n\t}\n\n\tdm := NewDiffManager(config, logger)\n\ttype args struct {\n\t\tctx     context.Context\n\t\tpayload *core.Payload\n\t\toauth   *core.Oauth\n\t}\n\ttests := []struct {\n\t\tname    string\n\t\targs    args\n\t\twant    map[string]int\n\t\twantErr bool\n\t}{\n\t\t// expects to hit serverURL/testdata/compare/abc...xyz\n\t\t{\"Test GetChangedFile for CommitDiff for github gitprovider\",\n\t\t\targs{ctx: context.TODO(),\n\t\t\t\tpayload: &core.Payload{RepoSlug: \"/testdata\", RepoLink: server.URL + \"/testdata\", BuildTargetCommit: \"xyz\", BuildBaseCommit: \"abc\",\n\t\t\t\t\tGitProvider: \"github\", EventType: \"push\", Diff: \"xyz\", PullRequestNumber: 2},\n\t\t\t\toauth: &core.Oauth{}},\n\t\t\tmap[string]int{}, false},\n\n\t\t{\"Test GetChangedFile for CommitDiff for github provider and empty base commit\",\n\t\t\targs{ctx: context.TODO(),\n\t\t\t\tpayload: &core.Payload{RepoSlug: \"/testdata\", RepoLink: server.URL + \"/testdata\", BuildBaseCommit: \"\",\n\t\t\t\t\tGitProvider: \"gitlab\", EventType: \"push\"}, oauth: &core.Oauth{}}, map[string]int{}, true},\n\n\t\t{\"Test GetChangedFile for CommitDiff for github provider for non 200 response\",\n\t\t\targs{ctx: context.TODO(), payload: &core.Payload{RepoLink: server.URL + \"/notfound/\", BuildTargetCommit: \"xyz\", BuildBaseCommit: \"abc\",\n\t\t\t\tGitProvider: \"gitlab\", EventType: \"push\"}, oauth: &core.Oauth{}}, map[string]int{}, true},\n\n\t\t{\"Test GetChangedFile for CommitDiff for non supported git provider\",\n\t\t\targs{ctx: context.TODO(),\n\t\t\t\tpayload: &core.Payload{RepoSlug: \"/notfound/\", RepoLink: server.URL + \"/notfound/\", BuildTargetCommit: \"xyz\", BuildBaseCommit: \"abc\",\n\t\t\t\t\tGitProvider: \"gittest\", EventType: \"push\"}, oauth: &core.Oauth{}}, map[string]int{}, false},\n\t}\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\tglobal.APIHostURLMap[tt.args.payload.GitProvider] = server.URL\n\t\t\tresp, err := dm.GetChangedFiles(tt.args.ctx, tt.args.payload, tt.args.oauth)\n\t\t\t// t.Errorf(\"\")\n\t\t\tif tt.args.payload.GitProvider == \"gittest\" {\n\t\t\t\tif resp != nil || err == nil {\n\t\t\t\t\tt.Errorf(\"Expected error: 'unsupoorted git provider', received: %v\\nexpected response: nil, received: %v\", err, resp)\n\t\t\t\t}\n\t\t\t\treturn\n\t\t\t}\n\t\t\tif tt.wantErr {\n\t\t\t\tif err == nil {\n\t\t\t\t\tt.Errorf(\"Expected error: %v, Received error: %v, response: %v\", tt.wantErr, err, resp)\n\t\t\t\t}\n\t\t\t\treturn\n\t\t\t}\n\t\t\texpResp := make(map[string]int)\n\t\t\tif err != nil {\n\t\t\t\tt.Errorf(\"error in getting changed files, error %v\", err.Error())\n\t\t\t} else if !reflect.DeepEqual(resp, expResp) {\n\t\t\t\tt.Errorf(\"Expected: %+v, received: %+v\", expResp, resp)\n\t\t\t}\n\t\t})\n\t}\n}\n\nfunc Test_diffManager_GetChangedFiles_CommitDiff_Gitlab(t *testing.T) {\n\tdata, err := testutils.GetGitlabCommitDiff()\n\tif err != nil {\n\t\tt.Errorf(\"Received error in getting test gitlab commit diff, error: %v\", err)\n\t}\n\tserver := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\tif r.URL.Path != \"/testdata/repository/compare\" {\n\t\t\tt.Errorf(\"Expected to request, got: %v\", r.URL.Path)\n\t\t}\n\t\tw.WriteHeader(200)\n\t\tw.Header().Set(\"Content-Type\", \"application/json\")\n\t\t_, err2 := w.Write(data)\n\t\tif err2 != nil {\n\t\t\tt.Errorf(\"Error in writing response data, error: %v\", err)\n\t\t}\n\t}))\n\tdefer server.Close()\n\n\tlogger, err := testutils.GetLogger()\n\tif err != nil {\n\t\tt.Errorf(\"Can't get logger, received: %s\", err)\n\t}\n\tconfig, err := testutils.GetConfig()\n\tif err != nil {\n\t\tt.Errorf(\"Can't get logger, received: %s\", err)\n\t}\n\n\tdm := NewDiffManager(config, logger)\n\ttype args struct {\n\t\tctx     context.Context\n\t\tpayload *core.Payload\n\t\toauth   *core.Oauth\n\t}\n\ttests := []struct {\n\t\tname string\n\t\targs args\n\t\twant map[string]int\n\t}{\n\t\t// expects to hit serverURL/testdata/repository/compare?from=abc&to=abcd\n\t\t{\"Test GetChangedFile for CommitDiff for gitlab gitprovider\",\n\t\t\targs{ctx: context.TODO(),\n\t\t\t\tpayload: &core.Payload{RepoSlug: \"/testdata\", RepoLink: server.URL + \"/testdata\",\n\t\t\t\t\tBuildTargetCommit: \"abcd\", BuildBaseCommit: \"abc\", TaskID: \"taskid\", BranchName: \"branchname\", BuildID: \"buildid\", RepoID: \"repoid\",\n\t\t\t\t\tOrgID: \"orgid\", GitProvider: \"gitlab\", PrivateRepo: false, EventType: \"push\", Diff: \"xyz\", PullRequestNumber: 2},\n\t\t\t\toauth: &core.Oauth{}}, map[string]int{}},\n\t}\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\tglobal.APIHostURLMap[tt.args.payload.GitProvider] = server.URL\n\t\t\tresp, err := dm.GetChangedFiles(tt.args.ctx, tt.args.payload, tt.args.oauth)\n\n\t\t\tif err != nil {\n\t\t\t\tt.Errorf(\"error in getting changed files, error %v\", err.Error())\n\t\t\t} else if len(resp) != 202 {\n\t\t\t\tt.Errorf(\"Expected map length: 202, received: %v\\nreceived map: %v\", len(resp), resp)\n\t\t\t}\n\t\t})\n\t}\n}\n"
  },
  {
    "path": "pkg/driver/builder.go",
    "content": "package driver\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\t\"os\"\n\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/errs\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n)\n\nconst (\n\tfirstVersion  = 1\n\tsecondVersion = 2\n)\n\ntype (\n\tBuilder struct {\n\t\tLogger               lumber.Logger\n\t\tTestExecutionService core.TestExecutionService\n\t\tTestDiscoveryService core.TestDiscoveryService\n\t\tAzureClient          core.AzureClient\n\t\tBlockTestService     core.BlockTestService\n\t\tExecutionManager     core.ExecutionManager\n\t\tTASConfigManager     core.TASConfigManager\n\t\tCacheStore           core.CacheStore\n\t\tDiffManager          core.DiffManager\n\t\tListSubModuleService core.ListSubModuleService\n\t}\n\tNodeInstaller struct {\n\t\tlogger           lumber.Logger\n\t\tExecutionManager core.ExecutionManager\n\t}\n)\n\nfunc (b *Builder) GetDriver(version int, filePath string) (core.Driver, error) {\n\tswitch version {\n\tcase firstVersion:\n\t\treturn &driverV1{\n\t\t\tlogger:               b.Logger,\n\t\t\tTestExecutionService: b.TestExecutionService,\n\t\t\tTestDiscoveryService: b.TestDiscoveryService,\n\t\t\tAzureClient:          b.AzureClient,\n\t\t\tBlockTestService:     b.BlockTestService,\n\t\t\tExecutionManager:     b.ExecutionManager,\n\t\t\tTASConfigManager:     b.TASConfigManager,\n\t\t\tCacheStore:           b.CacheStore,\n\t\t\tDiffManager:          b.DiffManager,\n\t\t\tListSubModuleService: b.ListSubModuleService,\n\t\t\tTASVersion:           firstVersion,\n\t\t\tTASFilePath:          filePath,\n\t\t\tnodeInstaller: NodeInstaller{\n\t\t\t\tlogger:           b.Logger,\n\t\t\t\tExecutionManager: b.ExecutionManager,\n\t\t\t},\n\t\t}, nil\n\tcase secondVersion:\n\t\treturn &driverV2{\n\t\t\tlogger:               b.Logger,\n\t\t\tTestExecutionService: b.TestExecutionService,\n\t\t\tTestDiscoveryService: b.TestDiscoveryService,\n\t\t\tAzureClient:          b.AzureClient,\n\t\t\tBlockTestService:     b.BlockTestService,\n\t\t\tExecutionManager:     b.ExecutionManager,\n\t\t\tTASConfigManager:     b.TASConfigManager,\n\t\t\tCacheStore:           b.CacheStore,\n\t\t\tDiffManager:          b.DiffManager,\n\t\t\tListSubModuleService: b.ListSubModuleService,\n\t\t\tTASVersion:           secondVersion,\n\t\t\tTASFilePath:          filePath,\n\t\t\tnodeInstaller: NodeInstaller{\n\t\t\t\tlogger:           b.Logger,\n\t\t\t\tExecutionManager: b.ExecutionManager,\n\t\t\t},\n\t\t}, nil\n\tdefault:\n\t\treturn nil, fmt.Errorf(\"invalid version ( %d )  mentioned in yml file\", version)\n\t}\n}\n\nfunc (n *NodeInstaller) InstallNodeVersion(ctx context.Context, nodeVersion string) error {\n\t// Running the `source` commands in a directory where .nvmrc is present, exits with exitCode 3\n\t// https://github.com/nvm-sh/nvm/issues/1985\n\t// TODO [good-to-have]: Auto-read and install from .nvmrc file, if present\n\tcommands := []string{\n\t\t\"source /home/nucleus/.nvm/nvm.sh\",\n\t\tfmt.Sprintf(\"nvm install %s\", nodeVersion),\n\t}\n\tn.logger.Infof(\"Using user-defined node version: %v\", nodeVersion)\n\terr := n.ExecutionManager.ExecuteInternalCommands(ctx, core.InstallNodeVer, commands, \"\", nil, nil)\n\tif err != nil {\n\t\tn.logger.Errorf(\"Unable to install user-defined nodeversion %v\", err)\n\t\terr = errs.New(errs.GenericErrRemark.Error())\n\t\treturn err\n\t}\n\torigPath := os.Getenv(\"PATH\")\n\tos.Setenv(\"PATH\", fmt.Sprintf(\"/home/nucleus/.nvm/versions/node/v%s/bin:%s\", nodeVersion, origPath))\n\treturn nil\n}\n"
  },
  {
    "path": "pkg/driver/builder_test.go",
    "content": "package driver\n\nimport (\n\t\"fmt\"\n\t\"testing\"\n)\n\nfunc Test_driver(t *testing.T) {\n\tb := Builder{}\n\tinvalidVersion := 4\n\t_, err := b.GetDriver(invalidVersion, \"\")\n\twantErr := fmt.Sprintf(\"invalid version ( %d )  mentioned in yml file\", invalidVersion)\n\tif err.Error() != wantErr {\n\t\tt.Errorf(\"want %s , got %s\", err.Error(), wantErr)\n\t}\n}\n"
  },
  {
    "path": "pkg/driver/driver_v1.go",
    "content": "/*\nThis file implements core.Driver  with operation over TAS config (YAML) version 1\n*/\npackage driver\n\nimport (\n\t\"context\"\n\t\"errors\"\n\t\"fmt\"\n\t\"os\"\n\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/errs\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/global\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/logwriter\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/utils\"\n\t\"golang.org/x/sync/errgroup\"\n)\n\nconst languageJs = \"javascript\"\n\ntype (\n\tdriverV1 struct {\n\t\tlogger               lumber.Logger\n\t\tnodeInstaller        NodeInstaller\n\t\tTestExecutionService core.TestExecutionService\n\t\tTestDiscoveryService core.TestDiscoveryService\n\t\tAzureClient          core.AzureClient\n\t\tBlockTestService     core.BlockTestService\n\t\tExecutionManager     core.ExecutionManager\n\t\tTASConfigManager     core.TASConfigManager\n\t\tCacheStore           core.CacheStore\n\t\tDiffManager          core.DiffManager\n\t\tListSubModuleService core.ListSubModuleService\n\t\tTASVersion           int\n\t\tTASFilePath          string\n\t}\n\n\tsetUpResultV1 struct {\n\t\tdiffExists bool\n\t\tdiff       map[string]int\n\t\tcacheKey   string\n\t}\n)\n\nfunc (d *driverV1) RunDiscovery(ctx context.Context, payload *core.Payload,\n\ttaskPayload *core.TaskPayload, oauth *core.Oauth, coverageDir string, secretMap map[string]string) error {\n\ttas, err := d.TASConfigManager.LoadAndValidate(ctx, d.TASVersion, d.TASFilePath, payload.EventType, payload.LicenseTier, d.TASFilePath)\n\tif err != nil {\n\t\td.logger.Errorf(\"Unable to load tas yaml file, error: %v\", err)\n\t\terr = &errs.StatusFailed{Remark: err.Error()}\n\t\treturn err\n\t}\n\ttasConfig := tas.(*core.TASConfig)\n\tlanguage := global.FrameworkLanguageMap[tasConfig.Framework]\n\tsetupResults, err := d.setUp(ctx, payload, tasConfig, oauth, language)\n\tif err != nil {\n\t\td.logger.Errorf(\"Error while doing common opertations error %v\", err)\n\t\treturn err\n\t}\n\n\tif postErr := d.ListSubModuleService.Send(ctx, payload.BuildID, 1); postErr != nil {\n\t\treturn postErr\n\t}\n\n\tif tasConfig.Prerun != nil {\n\t\td.logger.Infof(\"Running pre-run steps for top module\")\n\t\tazureLogWriter := logwriter.NewAzureLogWriter(d.AzureClient, core.PurposePreRunLogs, d.logger)\n\t\terr = d.ExecutionManager.ExecuteUserCommands(ctx, core.PreRun, payload, tasConfig.Prerun, secretMap, azureLogWriter, global.RepoDir)\n\t\tif err != nil {\n\t\t\td.logger.Errorf(\"Unable to run pre-run steps %v\", err)\n\t\t\terr = &errs.StatusFailed{Remark: \"Failed in running pre-run steps\"}\n\t\t\treturn err\n\t\t}\n\t}\n\n\terr = d.ExecutionManager.ExecuteInternalCommands(ctx, core.InstallRunners, global.InstallRunnerCmds, global.RepoDir, nil, nil)\n\tif err != nil {\n\t\td.logger.Errorf(\"Unable to install custom runners %v\", err)\n\t\terr = errs.New(errs.GenericErrRemark.Error())\n\t\treturn err\n\t}\n\n\td.logger.Debugf(\"Caching workspace\")\n\n\tif err = d.CacheStore.CacheWorkspace(ctx, \"\"); err != nil {\n\t\td.logger.Errorf(\"Error caching workspace: %+v\", err)\n\t\terr = errs.New(errs.GenericErrRemark.Error())\n\t\treturn err\n\t}\n\n\targs := d.buildDiscoveryArgs(payload, tasConfig, secretMap, setupResults.diffExists, setupResults.diff)\n\n\tdiscoveryResult, err := d.TestDiscoveryService.Discover(ctx, &args)\n\tif err != nil {\n\t\td.logger.Errorf(\"Unable to perform test discovery: %+v\", err)\n\t\terr = &errs.StatusFailed{Remark: \"Failed in discovering tests\"}\n\t\treturn err\n\t}\n\n\tpopulateDiscovery(discoveryResult, tasConfig)\n\tif err = d.TestDiscoveryService.SendResult(ctx, discoveryResult); err != nil {\n\t\td.logger.Errorf(\"error while sending discovery API call , error %v\", err)\n\t\treturn err\n\t}\n\tif language == languageJs {\n\t\tif err = d.CacheStore.Upload(ctx, setupResults.cacheKey, tasConfig.Cache.Paths...); err != nil {\n\t\t\td.logger.Errorf(\"Unable to upload cache: %v\", err)\n\t\t\terr = errs.New(errs.GenericErrRemark.Error())\n\t\t\treturn err\n\t\t}\n\t}\n\n\ttaskPayload.Status = core.Passed\n\td.logger.Debugf(\"Cache uploaded successfully\")\n\treturn nil\n\t// return nil\n}\n\nfunc (d *driverV1) RunExecution(ctx context.Context, payload *core.Payload,\n\ttaskPayload *core.TaskPayload, oauth *core.Oauth, coverageDir string, secretMap map[string]string) error {\n\ttas, err := d.TASConfigManager.LoadAndValidate(ctx, 1, d.TASFilePath, payload.EventType, payload.LicenseTier, d.TASFilePath)\n\tif err != nil {\n\t\td.logger.Errorf(\"Unable to load tas yaml file, error: %v\", err)\n\t\terr = &errs.StatusFailed{Remark: err.Error()}\n\t\treturn err\n\t}\n\ttasConfig := tas.(*core.TASConfig)\n\tif cachErr := d.setCache(tasConfig); cachErr != nil {\n\t\treturn cachErr\n\t}\n\tif errG := d.BlockTestService.GetBlockTests(ctx, tasConfig.Blocklist, payload.BranchName); errG != nil {\n\t\td.logger.Errorf(\"Unable to fetch blocklisted tests: %v\", errG)\n\t\terrG = errs.New(errs.GenericErrRemark.Error())\n\t\treturn errG\n\t}\n\tbuildArgs := d.buildTestExecutionArgs(payload, tasConfig, secretMap, coverageDir)\n\texecutionResults, err := d.TestExecutionService.Run(ctx, &buildArgs)\n\tif err != nil {\n\t\td.logger.Infof(\"Unable to perform test execution: %v\", err)\n\t\terr = &errs.StatusFailed{Remark: \"Failed in executing tests.\"}\n\t\tif executionResults == nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tresp, err := d.TestExecutionService.SendResults(ctx, executionResults)\n\tif err != nil {\n\t\td.logger.Errorf(\"error while sending test reports %v\", err)\n\t\terr = errs.New(errs.GenericErrRemark.Error())\n\t\treturn err\n\t}\n\n\ttaskPayload.Status = resp.TaskStatus\n\tlogWriter := logwriter.NewAzureLogWriter(d.AzureClient, core.PurposePostRunLogs, d.logger)\n\n\tif tasConfig.Postrun != nil {\n\t\td.logger.Infof(\"Running post-run steps\")\n\t\terr = d.ExecutionManager.ExecuteUserCommands(ctx, core.PostRun, payload, tasConfig.Postrun, secretMap, logWriter, global.RepoDir)\n\t\tif err != nil {\n\t\t\td.logger.Errorf(\"Unable to run post-run steps %v\", err)\n\t\t\terr = &errs.StatusFailed{Remark: \"Failed in running post-run steps.\"}\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}\n\nfunc (d *driverV1) setUp(ctx context.Context, payload *core.Payload,\n\ttasConfig *core.TASConfig, oauth *core.Oauth, language string) (*setUpResultV1, error) {\n\td.logger.Infof(\"Tas yaml: %+v\", tasConfig)\n\tif err := d.setCache(tasConfig); err != nil {\n\t\treturn nil, err\n\t}\n\tcacheKey := \"\"\n\tif language == languageJs {\n\t\tcacheKey = tasConfig.Cache.Key\n\t}\n\n\tos.Setenv(\"REPO_CACHE_DIR\", global.RepoCacheDir)\n\tif tasConfig.NodeVersion != \"\" && language == languageJs {\n\t\tnodeVersion := tasConfig.NodeVersion\n\t\tif nodeErr := d.nodeInstaller.InstallNodeVersion(ctx, nodeVersion); nodeErr != nil {\n\t\t\treturn nil, nodeErr\n\t\t}\n\t}\n\tblYml := tasConfig.Blocklist\n\tif errG := d.BlockTestService.GetBlockTests(ctx, blYml, payload.BranchName); errG != nil {\n\t\td.logger.Errorf(\"Unable to fetch blocklisted tests: %v\", errG)\n\t\terrG = errs.New(errs.GenericErrRemark.Error())\n\t\treturn nil, errG\n\t}\n\n\tg, errCtx := errgroup.WithContext(ctx)\n\tif language == languageJs {\n\t\tg.Go(func() error {\n\t\t\tif errG := d.CacheStore.Download(errCtx, cacheKey); errG != nil {\n\t\t\t\td.logger.Errorf(\"Unable to download cache: %v\", errG)\n\t\t\t\terrG = errs.New(errs.GenericErrRemark.Error())\n\t\t\t\treturn errG\n\t\t\t}\n\t\t\treturn nil\n\t\t})\n\t}\n\n\td.logger.Infof(\"Identifying changed files ...\")\n\tdiffExists := true\n\tdiff := map[string]int{}\n\tg.Go(func() error {\n\t\tdiffC, errG := d.DiffManager.GetChangedFiles(errCtx, payload, oauth)\n\t\tif errG != nil {\n\t\t\tif errors.Is(errG, errs.ErrGitDiffNotFound) {\n\t\t\t\tdiffExists = false\n\t\t\t} else {\n\t\t\t\td.logger.Errorf(\"Unable to identify changed files %s\", errG)\n\t\t\t\terrG = errs.New(\"Error occurred in fetching diff from GitHub\")\n\t\t\t\treturn errG\n\t\t\t}\n\t\t}\n\t\tdiff = diffC\n\t\treturn nil\n\t})\n\n\tif err := g.Wait(); err != nil {\n\t\treturn nil, err\n\t}\n\treturn &setUpResultV1{\n\t\tdiffExists: diffExists,\n\t\tdiff:       diff,\n\t\tcacheKey:   cacheKey,\n\t}, nil\n}\n\nfunc (d *driverV1) buildDiscoveryArgs(payload *core.Payload, tasConfig *core.TASConfig,\n\tsecretMap map[string]string,\n\tdiffExists bool,\n\tdiff map[string]int) core.DiscoveyArgs {\n\ttestPattern, envMap := d.getEnvAndPattern(payload, tasConfig)\n\treturn core.DiscoveyArgs{\n\t\tTestPattern:      testPattern,\n\t\tPayload:          payload,\n\t\tEnvMap:           envMap,\n\t\tSecretData:       secretMap,\n\t\tTestConfigFile:   tasConfig.ConfigFile,\n\t\tFrameWork:        tasConfig.Framework,\n\t\tSmartRun:         tasConfig.SmartRun,\n\t\tDiff:             diff,\n\t\tDiffExists:       diffExists,\n\t\tFrameWorkVersion: tasConfig.FrameworkVersion,\n\t\tCWD:              global.RepoDir,\n\t}\n}\n\nfunc (d *driverV1) buildTestExecutionArgs(payload *core.Payload, tasConfig *core.TASConfig,\n\tsecretMap map[string]string,\n\tcoverageDir string) core.TestExecutionArgs {\n\ttestPattern, envMap := d.getEnvAndPattern(payload, tasConfig)\n\tlogWriter := logwriter.NewAzureLogWriter(d.AzureClient, core.PurposeExecutionLogs, d.logger)\n\treturn core.TestExecutionArgs{\n\t\tPayload:           payload,\n\t\tCoverageDir:       coverageDir,\n\t\tLogWriterStrategy: logWriter,\n\t\tTestPattern:       testPattern,\n\t\tEnvMap:            envMap,\n\t\tTestConfigFile:    tasConfig.ConfigFile,\n\t\tFrameWork:         tasConfig.Framework,\n\t\tSecretData:        secretMap,\n\t\tFrameWorkVersion:  tasConfig.FrameworkVersion,\n\t\tCWD:               global.RepoDir,\n\t}\n}\n\nfunc (d *driverV1) getEnvAndPattern(payload *core.Payload, tasConfig *core.TASConfig) (target []string, envMap map[string]string) {\n\tif payload.EventType == core.EventPullRequest {\n\t\treturn tasConfig.Premerge.Patterns, tasConfig.Premerge.EnvMap\n\t}\n\treturn tasConfig.Postmerge.Patterns, tasConfig.Postmerge.EnvMap\n}\n\nfunc populateDiscovery(testDiscoveryResult *core.DiscoveryResult, tasConfig *core.TASConfig) {\n\ttestDiscoveryResult.Parallelism = tasConfig.Parallelism\n\ttestDiscoveryResult.SplitMode = tasConfig.SplitMode\n}\n\nfunc (d *driverV1) setCache(tasConfig *core.TASConfig) error {\n\tlanguage := global.FrameworkLanguageMap[tasConfig.Framework]\n\tif tasConfig.Cache == nil && language == \"javascript\" {\n\t\tchecksum, err := utils.ComputeChecksum(fmt.Sprintf(\"%s/%s\", global.RepoDir, global.PackageJSON))\n\t\tif err != nil {\n\t\t\td.logger.Errorf(\"Error while computing checksum, error %v\", err)\n\t\t\treturn err\n\t\t}\n\t\ttasConfig.Cache = &core.Cache{\n\t\t\tKey:   checksum,\n\t\t\tPaths: []string{},\n\t\t}\n\t}\n\treturn nil\n}\n"
  },
  {
    "path": "pkg/driver/driver_v2.go",
    "content": "/*\nThis file implements core.Driver with operation over TAS config (YAML) version 2\n*/\npackage driver\n\nimport (\n\t\"bytes\"\n\t\"context\"\n\t\"errors\"\n\t\"fmt\"\n\t\"os\"\n\t\"path\"\n\t\"strings\"\n\t\"sync\"\n\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/errs\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/global\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/logwriter\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/utils\"\n\t\"golang.org/x/sync/errgroup\"\n)\n\nconst preRunLog = \"Running Pre Run on Top level\"\n\ntype (\n\tdriverV2 struct {\n\t\tlogger               lumber.Logger\n\t\tTestExecutionService core.TestExecutionService\n\t\tAzureClient          core.AzureClient\n\t\tBlockTestService     core.BlockTestService\n\t\tExecutionManager     core.ExecutionManager\n\t\tTASConfigManager     core.TASConfigManager\n\t\tCacheStore           core.CacheStore\n\t\tDiffManager          core.DiffManager\n\t\tListSubModuleService core.ListSubModuleService\n\t\tnodeInstaller        NodeInstaller\n\t\tTestDiscoveryService core.TestDiscoveryService\n\t\tTASVersion           int\n\t\tTASFilePath          string\n\t}\n\n\tsetUpResultV2 struct {\n\t\tdiffExists bool\n\t\tdiff       map[string]int\n\t\tcacheKey   string\n\t}\n)\n\nfunc (d *driverV2) RunDiscovery(ctx context.Context, payload *core.Payload,\n\ttaskPayload *core.TaskPayload, oauth *core.Oauth, coverageDir string, secretMap map[string]string) error {\n\t// do something\n\td.logger.Debugf(\"Running in %d version\", d.TASVersion)\n\ttas, err := d.TASConfigManager.LoadAndValidate(ctx, d.TASVersion, d.TASFilePath, payload.EventType, payload.LicenseTier, d.TASFilePath)\n\tif err != nil {\n\t\td.logger.Errorf(\"Unable to load tas yaml file, error: %v\", err)\n\t\terr = &errs.StatusFailed{Remark: err.Error()}\n\t\treturn err\n\t}\n\ttasConfig := tas.(*core.TASConfigV2)\n\ttaskPayload.Status = core.Passed\n\tsetUpResult, err := d.setUpDiscovery(ctx, payload, tasConfig, oauth)\n\tif err != nil {\n\t\treturn err\n\t}\n\tmainBuffer := new(bytes.Buffer)\n\tazureLogWriter := logwriter.NewAzureLogWriter(d.AzureClient, core.PurposePreRunLogs, d.logger)\n\n\tdefer func() {\n\t\tif writeErr := <-azureLogWriter.Write(ctx, mainBuffer); writeErr != nil {\n\t\t\t// error in writing log should not fail the build\n\t\t\td.logger.Errorf(\"error in writing pre run log, error %v\", writeErr)\n\t\t}\n\t}()\n\n\tif payload.EventType == core.EventPush {\n\t\tif discoveryErr := d.runDiscoveryHelper(ctx, tasConfig.PostMerge.PreRun,\n\t\t\ttasConfig.PostMerge.SubModules, payload, tasConfig,\n\t\t\ttaskPayload, setUpResult.diff, setUpResult.diffExists, mainBuffer, secretMap); discoveryErr != nil {\n\t\t\treturn discoveryErr\n\t\t}\n\t} else {\n\t\tif discoveryErr := d.runDiscoveryHelper(ctx, tasConfig.PreMerge.PreRun, tasConfig.PreMerge.SubModules,\n\t\t\tpayload, tasConfig, taskPayload, setUpResult.diff, setUpResult.diffExists, mainBuffer, secretMap); discoveryErr != nil {\n\t\t\treturn discoveryErr\n\t\t}\n\t}\n\tif err = d.CacheStore.Upload(ctx, setUpResult.cacheKey, tasConfig.Cache.Paths...); err != nil {\n\t\t// cache upload failure should not fail the task\n\t\td.logger.Errorf(\"Unable to upload cache: %v\", err)\n\t}\n\td.logger.Debugf(\"Cache uploaded successfully\")\n\n\treturn nil\n}\n\nfunc (d *driverV2) RunExecution(ctx context.Context, payload *core.Payload,\n\ttaskPayload *core.TaskPayload, oauth *core.Oauth, coverageDir string, secretMap map[string]string) error {\n\ttas, err := d.TASConfigManager.LoadAndValidate(ctx, d.TASVersion, d.TASFilePath, payload.EventType, payload.LicenseTier, d.TASFilePath)\n\tif err != nil {\n\t\td.logger.Errorf(\"Unable to load tas yaml file, error: %v\", err)\n\t\terr = &errs.StatusFailed{Remark: err.Error()}\n\t\treturn err\n\t}\n\n\tsubModuleName := os.Getenv(global.SubModuleName)\n\ttasConfig := tas.(*core.TASConfigV2)\n\tif cachErr := d.setCache(tasConfig); cachErr != nil {\n\t\treturn cachErr\n\t}\n\tsubModule, err := d.findSubmodule(tasConfig, payload, subModuleName)\n\tif err != nil {\n\t\td.logger.Errorf(\"Error finding sub module %s in tas config file\", subModuleName)\n\t\treturn err\n\t}\n\t// Get blocklist data before execution\n\tblYML := subModule.Blocklist\n\tif err = d.BlockTestService.GetBlockTests(ctx, blYML, payload.BranchName); err != nil {\n\t\td.logger.Errorf(\"Unable to fetch blocklisted tests: %v\", err)\n\t\terr = errs.New(errs.GenericErrRemark.Error())\n\t\treturn err\n\t}\n\n\tmodulePath := path.Join(global.RepoDir, subModule.Path)\n\t// PRE RUN steps should be run only if RunPrerunEveryTime is set to true\n\tif subModule.Prerun != nil && subModule.RunPrerunEveryTime {\n\t\tif preErr := d.runPreRunBeforeTestExecution(ctx, tasConfig, subModule, payload, secretMap, modulePath); preErr != nil {\n\t\t\treturn preErr\n\t\t}\n\t}\n\targs := d.buildTestExecutionArgs(payload, tasConfig, subModule, secretMap, coverageDir)\n\ttestResult, err := d.TestExecutionService.Run(ctx, &args)\n\tif err != nil {\n\t\treturn err\n\t}\n\tresp, err := d.TestExecutionService.SendResults(ctx, testResult)\n\tif err != nil {\n\t\td.logger.Errorf(\"error while sending test reports %v\", err)\n\t\terr = errs.New(errs.GenericErrRemark.Error())\n\t\treturn err\n\t}\n\ttaskPayload.Status = resp.TaskStatus\n\n\tif subModule.Postrun != nil {\n\t\td.logger.Infof(\"Running post-run steps\")\n\t\tazureLogwriter := logwriter.NewAzureLogWriter(d.AzureClient, core.PurposePostRunLogs, d.logger)\n\n\t\terr = d.ExecutionManager.ExecuteUserCommands(ctx, core.PostRun, payload, subModule.Postrun, secretMap, azureLogwriter, modulePath)\n\t\tif err != nil {\n\t\t\td.logger.Errorf(\"Unable to run post-run steps %v\", err)\n\t\t\terr = &errs.StatusFailed{Remark: \"Failed in running post-run steps.\"}\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}\n\nfunc (d *driverV2) runPreRunBeforeTestExecution(ctx context.Context,\n\ttasConfig *core.TASConfigV2,\n\tsubModule *core.SubModule,\n\tpayload *core.Payload,\n\tsecretMap map[string]string,\n\tmodulePath string) error {\n\tif tasConfig.NodeVersion != \"\" {\n\t\t// install node version before preRuns\n\t\tif err := d.nodeInstaller.InstallNodeVersion(ctx, tasConfig.NodeVersion); err != nil {\n\t\t\td.logger.Debugf(\"error while installing node of version %s, error %v \", tasConfig.NodeVersion, err)\n\t\t\treturn err\n\t\t}\n\t}\n\n\td.logger.Infof(\"Running pre-run steps for submodule %s\", subModule.Name)\n\tazureLogwriter := logwriter.NewAzureLogWriter(d.AzureClient, core.PurposePreRunLogs, d.logger)\n\terr := d.ExecutionManager.ExecuteUserCommands(ctx, core.PreRun, payload, subModule.Prerun, secretMap, azureLogwriter, modulePath)\n\tif err != nil {\n\t\td.logger.Errorf(\"Unable to run pre-run steps %v\", err)\n\t\terr = &errs.StatusFailed{Remark: \"Failed in running pre-run steps\"}\n\t\treturn err\n\t}\n\td.logger.Debugf(\"installing runners at path %s\", modulePath)\n\tif err = d.ExecutionManager.ExecuteInternalCommands(ctx, core.InstallRunners, global.InstallRunnerCmds,\n\t\tmodulePath, nil, nil); err != nil {\n\t\td.logger.Errorf(\"Unable to install custom runners %v\", err)\n\t\terr = errs.New(errs.GenericErrRemark.Error())\n\t\treturn err\n\t}\n\treturn nil\n}\n\nfunc (d *driverV2) runDiscoveryHelper(ctx context.Context,\n\ttopPreRun *core.Run,\n\tsubModuleList []core.SubModule,\n\tpayload *core.Payload,\n\ttasConfig *core.TASConfigV2,\n\ttaskPayload *core.TaskPayload,\n\tdiff map[string]int,\n\tdiffExists bool,\n\tmainBuffer *bytes.Buffer,\n\tsecretMap map[string]string) error {\n\ttotalSubmoduleCount := len(subModuleList)\n\tif apiErr := d.ListSubModuleService.Send(ctx, payload.BuildID, totalSubmoduleCount); apiErr != nil {\n\t\treturn apiErr\n\t}\n\n\tif tasConfig.NodeVersion != \"\" {\n\t\tif err := d.nodeInstaller.InstallNodeVersion(ctx, tasConfig.NodeVersion); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tif err := d.runPreRunCommand(ctx, topPreRun, mainBuffer, payload, secretMap, taskPayload, subModuleList); err != nil {\n\t\treturn err\n\t}\n\td.logger.Debugf(\"Caching workspace\")\n\t// TODO: this will be change after we move to parallel pod executuon\n\tif err := d.CacheStore.CacheWorkspace(ctx, \"\"); err != nil {\n\t\td.logger.Errorf(\"Error caching workspace: %+v\", err)\n\t\terr = errs.New(errs.GenericErrRemark.Error())\n\t\treturn err\n\t}\n\n\terrChannelDiscovery := make(chan error, totalSubmoduleCount)\n\tdiscoveryWaitGroup := sync.WaitGroup{}\n\tfor i := 0; i < totalSubmoduleCount; i++ {\n\t\tdiscoveryWaitGroup.Add(1)\n\t\tgo func(subModule *core.SubModule) {\n\t\t\tdefer discoveryWaitGroup.Done()\n\t\t\terr := d.runDiscoveryForEachSubModule(ctx, payload, subModule, tasConfig, diff, diffExists, secretMap)\n\t\t\terrChannelDiscovery <- err\n\t\t}(&subModuleList[i])\n\t}\n\tdiscoveryWaitGroup.Wait()\n\tfor i := 0; i < totalSubmoduleCount; i++ {\n\t\te := <-errChannelDiscovery\n\t\tif e != nil {\n\t\t\treturn e\n\t\t}\n\t}\n\treturn nil\n}\n\nfunc (d *driverV2) runPreRunCommand(ctx context.Context,\n\ttopPreRun *core.Run,\n\tmainBuffer *bytes.Buffer, payload *core.Payload,\n\tsecretMap map[string]string, taskPayload *core.TaskPayload,\n\tsubModuleList []core.SubModule) error {\n\ttotalSubmoduleCount := len(subModuleList)\n\n\terrChannelPreRun := make(chan error, totalSubmoduleCount)\n\n\tpreRunWaitGroup := sync.WaitGroup{}\n\n\tif topPreRun != nil {\n\t\td.logger.Debugf(\"Running Pre Run on top level\")\n\t\tif _, err := mainBuffer.WriteString(preRunLog); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tbufferWirter := logwriter.NewBufferLogWriter(\"TOP-LEVEL\", mainBuffer, d.logger)\n\t\tif err := d.ExecutionManager.ExecuteUserCommands(ctx, core.PreRun, payload,\n\t\t\ttopPreRun, secretMap, bufferWirter, global.RepoDir); err != nil {\n\t\t\td.logger.Errorf(\"Error occurred running top level PreRun , err %v\", err)\n\t\t\treturn err\n\t\t}\n\t}\n\n\tbufferList := []*bytes.Buffer{}\n\n\td.logger.Debugf(\"pre run on top level ended\")\n\tfor i := 0; i < totalSubmoduleCount; i++ {\n\t\tpreRunWaitGroup.Add(1)\n\n\t\tnewBuffer := new(bytes.Buffer)\n\n\t\tbufferList = append(bufferList, newBuffer)\n\n\t\tgo func(subModule *core.SubModule) {\n\t\t\tdefer preRunWaitGroup.Done()\n\t\t\tbufferWirterSubmodule := logwriter.NewBufferLogWriter(subModule.Name, newBuffer, d.logger)\n\t\t\tdicoveryErr := d.runPreRunForEachSubModule(ctx, payload, subModule, secretMap, bufferWirterSubmodule)\n\t\t\tif dicoveryErr != nil {\n\t\t\t\ttaskPayload.Status = core.Error\n\t\t\t\td.logger.Errorf(\"error while running discovery for sub module %s, error %v\", subModule.Name, dicoveryErr)\n\t\t\t}\n\t\t\terrChannelPreRun <- dicoveryErr\n\t\t}(&subModuleList[i])\n\t}\n\n\tpreRunWaitGroup.Wait()\n\n\tfor i := 0; i < totalSubmoduleCount; i++ {\n\t\tmainBuffer.WriteString(bufferList[i].String())\n\t}\n\tfor i := 0; i < totalSubmoduleCount; i++ {\n\t\te := <-errChannelPreRun\n\t\tif e != nil {\n\t\t\td.logger.Debugf(\"pre run failed with error %v\", e)\n\t\t\treturn e\n\t\t}\n\t}\n\treturn nil\n}\n\nfunc (d *driverV2) runDiscoveryForEachSubModule(ctx context.Context,\n\tpayload *core.Payload,\n\tsubModule *core.SubModule,\n\ttasConfig *core.TASConfigV2,\n\tdiff map[string]int,\n\tdiffExists bool,\n\tsecretMap map[string]string) error {\n\targs := d.buildDiscoveryArgs(payload, tasConfig, subModule, secretMap, diffExists, diff)\n\n\tdiscoveryResult, err := d.TestDiscoveryService.Discover(ctx, &args)\n\tif err != nil {\n\t\td.logger.Errorf(\"Unable to perform test discovery: %+v\", err)\n\t\terr = &errs.StatusFailed{Remark: \"Failed in discovering tests\"}\n\t\treturn err\n\t}\n\tpopulateTestDiscoveryV2(discoveryResult, subModule, tasConfig)\n\tif err := d.TestDiscoveryService.SendResult(ctx, discoveryResult); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}\n\nfunc (d *driverV2) runPreRunForEachSubModule(ctx context.Context,\n\tpayload *core.Payload,\n\tsubModule *core.SubModule,\n\tsecretMap map[string]string,\n\tbufferWirterSubmodule core.LogWriterStrategy) error {\n\td.logger.Debugf(\"Running discovery for sub module %s\", subModule.Name)\n\tblYML := subModule.Blocklist\n\tif err := d.BlockTestService.GetBlockTests(ctx, blYML, payload.BranchName); err != nil {\n\t\td.logger.Errorf(\"Unable to fetch blocklisted tests: %v\", err)\n\t\terr = errs.New(errs.GenericErrRemark.Error())\n\t\treturn err\n\t}\n\tmodulePath := path.Join(global.RepoDir, subModule.Path)\n\t// PRE RUN steps\n\tif subModule.Prerun != nil {\n\t\td.logger.Infof(\"Running pre-run steps for submodule %s\", subModule.Name)\n\t\terr := d.ExecutionManager.ExecuteUserCommands(ctx, core.PreRun, payload, subModule.Prerun,\n\t\t\tsecretMap, bufferWirterSubmodule, modulePath)\n\t\tif err != nil {\n\t\t\td.logger.Errorf(\"Unable to run pre-run steps %v\", err)\n\t\t\terr = &errs.StatusFailed{Remark: \"Failed in running pre-run steps\"}\n\t\t\treturn err\n\t\t}\n\t\td.logger.Debugf(\"error checks end\")\n\t}\n\terr := d.ExecutionManager.ExecuteInternalCommands(ctx, core.InstallRunners, global.InstallRunnerCmds, modulePath, nil, nil)\n\tif err != nil {\n\t\td.logger.Errorf(\"Unable to install custom runners %v\", err)\n\t\terr = errs.New(errs.GenericErrRemark.Error())\n\t\treturn err\n\t}\n\n\treturn nil\n}\n\nfunc (d *driverV2) setUpDiscovery(ctx context.Context,\n\tpayload *core.Payload,\n\ttasConfig *core.TASConfigV2,\n\toauth *core.Oauth) (*setUpResultV2, error) {\n\tif err := d.setCache(tasConfig); err != nil {\n\t\treturn nil, err\n\t}\n\tcacheKey := tasConfig.Cache.Key\n\n\tg, errCtx := errgroup.WithContext(ctx)\n\tg.Go(func() error {\n\t\tif errG := d.CacheStore.Download(errCtx, cacheKey); errG != nil {\n\t\t\td.logger.Errorf(\"Unable to download cache: %v\", errG)\n\t\t\terrG = errs.New(errs.GenericErrRemark.Error())\n\t\t\treturn errG\n\t\t}\n\t\treturn nil\n\t})\n\tdiffExists := true\n\tdiff := map[string]int{}\n\tg.Go(func() error {\n\t\tdiffC, errG := d.DiffManager.GetChangedFiles(errCtx, payload, oauth)\n\t\tif errG != nil {\n\t\t\tif errors.Is(errG, errs.ErrGitDiffNotFound) {\n\t\t\t\tdiffExists = false\n\t\t\t} else {\n\t\t\t\td.logger.Errorf(\"Unable to identify changed files %s\", errG)\n\t\t\t\terrG = errs.New(\"Error occurred in fetching diff from GitHub\")\n\t\t\t\treturn errG\n\t\t\t}\n\t\t}\n\t\tdiff = diffC\n\t\treturn nil\n\t})\n\terr := g.Wait()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &setUpResultV2{\n\t\tcacheKey:   cacheKey,\n\t\tdiffExists: diffExists,\n\t\tdiff:       diff,\n\t}, nil\n}\n\nfunc (d *driverV2) buildDiscoveryArgs(payload *core.Payload, tasConfig *core.TASConfigV2,\n\tsubModule *core.SubModule,\n\tsecretMap map[string]string,\n\tdiffExists bool,\n\tdiff map[string]int) core.DiscoveyArgs {\n\ttestPattern := subModule.Patterns\n\tenvMap := getEnv(payload, tasConfig, subModule)\n\tmodulePath := path.Join(global.RepoDir, subModule.Path)\n\n\treturn core.DiscoveyArgs{\n\t\tTestPattern:    testPattern,\n\t\tPayload:        payload,\n\t\tEnvMap:         envMap,\n\t\tSecretData:     secretMap,\n\t\tTestConfigFile: subModule.ConfigFile,\n\t\tFrameWork:      subModule.Framework,\n\t\tSmartRun:       tasConfig.SmartRun,\n\t\tDiff:           GetSubmoduleBasedDiff(diff, subModule.Path),\n\t\tDiffExists:     diffExists,\n\t\tCWD:            modulePath,\n\t}\n}\n\nfunc getEnv(payload *core.Payload, tasConfig *core.TASConfigV2, subModule *core.SubModule) map[string]string {\n\tvar envMap map[string]string\n\tif payload.EventType == core.EventPullRequest {\n\t\tenvMap = tasConfig.PreMerge.EnvMap\n\t} else {\n\t\tenvMap = tasConfig.PostMerge.EnvMap\n\t}\n\tif envMap == nil {\n\t\tenvMap = map[string]string{}\n\t}\n\n\t// overwrite the existing env with more specific one\n\tif subModule.Prerun != nil && subModule.Prerun.EnvMap != nil {\n\t\tfor k, v := range subModule.Prerun.EnvMap {\n\t\t\tenvMap[k] = v\n\t\t}\n\t}\n\tif path.Join(global.RepoDir, subModule.Path) == global.RepoDir {\n\t\tenvMap[global.ModulePath] = \"\"\n\t} else {\n\t\tenvMap[global.ModulePath] = subModule.Path\n\t}\n\treturn envMap\n}\n\nfunc populateTestDiscoveryV2(testDiscoveryResult *core.DiscoveryResult, subModule *core.SubModule, tasConfig *core.TASConfigV2) {\n\ttestDiscoveryResult.Parallelism = subModule.Parallelism\n\ttestDiscoveryResult.SplitMode = tasConfig.SplitMode\n\ttestDiscoveryResult.SubModule = subModule.Name\n}\n\nfunc (d *driverV2) findSubmodule(tasConfig *core.TASConfigV2, payload *core.Payload, subModuleName string) (*core.SubModule, error) {\n\tif payload.EventType == core.EventPullRequest {\n\t\tfor i := 0; i < len(tasConfig.PreMerge.SubModules); i++ {\n\t\t\tif tasConfig.PreMerge.SubModules[i].Name == subModuleName {\n\t\t\t\treturn &tasConfig.PreMerge.SubModules[i], nil\n\t\t\t}\n\t\t}\n\t} else {\n\t\tfor i := 0; i < len(tasConfig.PostMerge.SubModules); i++ {\n\t\t\tif tasConfig.PostMerge.SubModules[i].Name == subModuleName {\n\t\t\t\treturn &tasConfig.PostMerge.SubModules[i], nil\n\t\t\t}\n\t\t}\n\t}\n\treturn nil, errs.ErrSubModuleNotFound\n}\n\nfunc (d *driverV2) buildTestExecutionArgs(payload *core.Payload,\n\ttasConfig *core.TASConfigV2,\n\tsubModule *core.SubModule,\n\tsecretMap map[string]string,\n\tcoverageDir string) core.TestExecutionArgs {\n\ttarget := subModule.Patterns\n\tenvMap := getEnv(payload, tasConfig, subModule)\n\tmodulePath := path.Join(global.RepoDir, subModule.Path)\n\n\tazureLogWriter := logwriter.NewAzureLogWriter(d.AzureClient, core.PurposeExecutionLogs, d.logger)\n\treturn core.TestExecutionArgs{\n\t\tPayload:           payload,\n\t\tCoverageDir:       coverageDir,\n\t\tLogWriterStrategy: azureLogWriter,\n\t\tTestPattern:       target,\n\t\tEnvMap:            envMap,\n\t\tTestConfigFile:    subModule.ConfigFile,\n\t\tFrameWork:         subModule.Framework,\n\t\tSecretData:        secretMap,\n\t\tCWD:               modulePath,\n\t}\n}\n\nfunc GetSubmoduleBasedDiff(diff map[string]int, subModulePath string) map[string]int {\n\tnewDiff := map[string]int{}\n\tsubModulePath = strings.TrimPrefix(subModulePath, \"./\")\n\tif !strings.HasSuffix(subModulePath, \"/\") {\n\t\tsubModulePath += \"/\"\n\t}\n\n\tfor file, value := range diff {\n\t\tfilePath := strings.TrimPrefix(file, subModulePath)\n\n\t\tnewDiff[filePath] = value\n\t}\n\treturn newDiff\n}\n\nfunc (d *driverV2) setCache(tasConfig *core.TASConfigV2) error {\n\tif tasConfig.Cache == nil {\n\t\tchecksum, err := utils.ComputeChecksum(fmt.Sprintf(\"%s/%s\", global.RepoDir, global.PackageJSON))\n\t\tif err != nil {\n\t\t\td.logger.Errorf(\"Error while computing checksum, error %v\", err)\n\t\t\treturn err\n\t\t}\n\t\ttasConfig.Cache = &core.Cache{\n\t\t\tKey:   checksum,\n\t\t\tPaths: []string{},\n\t\t}\n\t}\n\treturn nil\n}\n"
  },
  {
    "path": "pkg/driver/driver_v2_test.go",
    "content": "package driver\n\nimport (\n\t\"reflect\"\n\t\"testing\"\n)\n\ntype testArgs struct {\n\tname          string\n\tsubModulePath string\n\tdiffMap       map[string]int\n\twantDiffMap   map[string]int\n}\n\nfunc TestGetSubmoduleBasedDiff(t *testing.T) {\n\ttests :=\n\t\t[]testArgs{\n\t\t\t{\n\t\t\t\tname:          \"test with subModule package included in diff 1\",\n\t\t\t\tsubModulePath: \"./package/subModule-1\",\n\t\t\t\tdiffMap: map[string]int{\n\t\t\t\t\t\"package/subModule-1/test/testFile1.js\": 1,\n\t\t\t\t\t\"package/subModule-1/test/testFile2.js\": 2,\n\t\t\t\t\t\"package/subModule-2/test/testFile1.js\": 3,\n\t\t\t\t\t\"package/subModule-2/test/testFile2.js\": 4,\n\t\t\t\t},\n\t\t\t\twantDiffMap: map[string]int{\n\t\t\t\t\t\"test/testFile1.js\":                     1,\n\t\t\t\t\t\"test/testFile2.js\":                     2,\n\t\t\t\t\t\"package/subModule-2/test/testFile1.js\": 3,\n\t\t\t\t\t\"package/subModule-2/test/testFile2.js\": 4,\n\t\t\t\t},\n\t\t\t},\n\n\t\t\t{\n\t\t\t\tname:          \"test with subModule package included in diff 2\",\n\t\t\t\tsubModulePath: \"package/subModule-1\",\n\t\t\t\tdiffMap: map[string]int{\n\t\t\t\t\t\"package/subModule-1/test/testFile1.js\": 1,\n\t\t\t\t\t\"package/subModule-1/test/testFile2.js\": 2,\n\t\t\t\t\t\"package/subModule-2/test/testFile1.js\": 3,\n\t\t\t\t\t\"package/subModule-2/test/testFile2.js\": 4,\n\t\t\t\t},\n\t\t\t\twantDiffMap: map[string]int{\n\t\t\t\t\t\"test/testFile1.js\":                     1,\n\t\t\t\t\t\"test/testFile2.js\":                     2,\n\t\t\t\t\t\"package/subModule-2/test/testFile1.js\": 3,\n\t\t\t\t\t\"package/subModule-2/test/testFile2.js\": 4,\n\t\t\t\t},\n\t\t\t},\n\t\t\t{\n\t\t\t\tname:          \"test with subModule package included in diff 3\",\n\t\t\t\tsubModulePath: \"./package/subModule-1/\",\n\t\t\t\tdiffMap: map[string]int{\n\t\t\t\t\t\"package/subModule-1/test/testFile1.js\": 1,\n\t\t\t\t\t\"package/subModule-1/test/testFile2.js\": 2,\n\t\t\t\t\t\"package/subModule-2/test/testFile1.js\": 3,\n\t\t\t\t\t\"package/subModule-2/test/testFile2.js\": 4,\n\t\t\t\t},\n\t\t\t\twantDiffMap: map[string]int{\n\t\t\t\t\t\"test/testFile1.js\":                     1,\n\t\t\t\t\t\"test/testFile2.js\":                     2,\n\t\t\t\t\t\"package/subModule-2/test/testFile1.js\": 3,\n\t\t\t\t\t\"package/subModule-2/test/testFile2.js\": 4,\n\t\t\t\t},\n\t\t\t},\n\t\t\t{\n\t\t\t\tname:          \"test with subModule package included in diff 4\",\n\t\t\t\tsubModulePath: \"package/subModule-1/\",\n\t\t\t\tdiffMap: map[string]int{\n\t\t\t\t\t\"package/subModule-1/test/testFile1.js\": 1,\n\t\t\t\t\t\"package/subModule-1/test/testFile2.js\": 2,\n\t\t\t\t\t\"package/subModule-2/test/testFile1.js\": 3,\n\t\t\t\t\t\"package/subModule-2/test/testFile2.js\": 4,\n\t\t\t\t},\n\t\t\t\twantDiffMap: map[string]int{\n\t\t\t\t\t\"test/testFile1.js\":                     1,\n\t\t\t\t\t\"test/testFile2.js\":                     2,\n\t\t\t\t\t\"package/subModule-2/test/testFile1.js\": 3,\n\t\t\t\t\t\"package/subModule-2/test/testFile2.js\": 4,\n\t\t\t\t},\n\t\t\t},\n\t\t\t{\n\t\t\t\tname:          \"test with subModule package not included in diff \",\n\t\t\t\tsubModulePath: \"package/subModule-1/\",\n\t\t\t\tdiffMap: map[string]int{\n\t\t\t\t\t\"package/subModule-2/test/testFile1.js\": 1,\n\t\t\t\t\t\"package/subModule-2/test/testFile2.js\": 2,\n\t\t\t\t\t\"package/subModule-2/test/testFile3.js\": 3,\n\t\t\t\t\t\"package/subModule-2/test/testFile4.js\": 4,\n\t\t\t\t},\n\t\t\t\twantDiffMap: map[string]int{\n\t\t\t\t\t\"package/subModule-2/test/testFile1.js\": 1,\n\t\t\t\t\t\"package/subModule-2/test/testFile2.js\": 2,\n\t\t\t\t\t\"package/subModule-2/test/testFile3.js\": 3,\n\t\t\t\t\t\"package/subModule-2/test/testFile4.js\": 4,\n\t\t\t\t},\n\t\t\t},\n\t\t}\n\tfor _, test := range tests {\n\t\tt.Run(test.name, func(t *testing.T) {\n\t\t\tactualMap := GetSubmoduleBasedDiff(test.diffMap, test.subModulePath)\n\t\t\tif !reflect.DeepEqual(actualMap, test.wantDiffMap) {\n\t\t\t\tt.Errorf(\"not equal wanted %+v , got %+v\", test.diffMap, actualMap)\n\t\t\t}\n\t\t})\n\t}\n}\n"
  },
  {
    "path": "pkg/errs/nucleus.go",
    "content": "package errs\n\nimport (\n\t\"fmt\"\n)\n\n// Err represents structure of a custom error\ntype Err struct {\n\tCode    string\n\tMessage string\n\tURL     string\n}\n\nfunc (e Err) Error() string {\n\treturn fmt.Sprintf(\"%s : %s \", e.Code, e.Message)\n}\n\n// Error represents a json-encoded API error.\ntype Error struct {\n\tMessage string `json:\"message\"`\n}\n\nfunc (e *Error) Error() string {\n\treturn e.Message\n}\n\n// New returns a new error message.\nfunc New(text string) error {\n\treturn &Error{Message: text}\n}\n\n// ErrInvalidPayload returns an error when the  nucleus payload is invalid.\nfunc ErrInvalidPayload(errMsg string) error {\n\treturn New(errMsg)\n}\n\n// ErrSecretNotFound represents the error when a secret is not found in map.\nfunc ErrSecretNotFound(secret string) error {\n\treturn New(fmt.Sprintf(\"secret with name %s not found\", secret))\n}\n\nvar (\n\t// ErrParseVariableName represents the error when unable to parse a\n\t// variable name within a substitution.\n\tErrParseVariableName = New(\"unable to parse variable name\")\n\t// ErrSecretRegexMatch represents the error when a regex does not match.\n\tErrSecretRegexMatch = New(\"secret regex match failed\")\n\t// ErrNotFound return when azure blob is not found.\n\tErrNotFound = New(\"blob not found\")\n\t// ErrSASToken returns when sas token is not found.\n\tErrSASToken = New(\"azure client requires SAS Token\")\n\t// ErrAzureCredentials is returned when the azure credentials are invalid.\n\tErrAzureCredentials = New(\"azure client requires credentials\")\n\t// ErrAPIStatus is returned when the api status is not 200.\n\tErrAPIStatus = New(\"non OK status\")\n\t// ErrInvalidLoggerInstance is returned when logger instance is not supported.\n\tErrInvalidLoggerInstance = New(\"Invalid logger instance\")\n\t// ErrUnsupportedGitProvider is returned when try to integrate unsupported provider repo\n\tErrUnsupportedGitProvider = New(\"unsupported gitprovider\")\n\t// ErrGitDiffNotFound is returned when basecommit is null or git provider returns empty diff\n\tErrGitDiffNotFound = New(\"diff not found\")\n\t// GenericErrRemark returns a generic error message for user facing errors.\n\tGenericErrRemark = New(\"Unexpected error\")\n\t// ErrMarshalJSON is returned when json marshal failed\n\tErrMarshalJSON = New(\"JSON marshal failed\")\n\t// ErrUnMarshalJSON is returned when json unmarshal failed\n\tErrUnMarshalJSON = New(\"JSON unmarshal failed\")\n\t// ErrMissingAccessToken is returned when Oauth token is missing\n\tErrMissingAccessToken = New(\"Missing OAuth access token. Please add an OAuth token\")\n\t// ErrSubModuleNotFound will be thrown if submodule is not present in yml\n\tErrSubModuleNotFound = New(\"Submodule not found in tas config file\")\n)\n\ntype StatusFailed struct {\n\tRemark string\n}\n\nfunc (e *StatusFailed) Error() string {\n\treturn e.Remark\n}\n\n// ErrInvalidConf represents field validation failures of TAS configuration\ntype ErrInvalidConf struct {\n\tMessage string\n\tFields  []string\n\tValues  []interface{}\n}\n\nfunc (e ErrInvalidConf) Error() string {\n\terrMsg := e.Message\n\tfor idx, field := range e.Fields {\n\t\terrMsg += fmt.Sprintf(\"%s: %s\\n\", field, e.Values[idx])\n\t}\n\treturn errMsg\n}\n"
  },
  {
    "path": "pkg/errs/nucleus_test.go",
    "content": "package errs\n\nimport (\n\t\"testing\"\n)\n\nfunc TestError_Error(t *testing.T) {\n\te := New(\"A secret message\")\n\tgot := e.Error()\n\twant := \"A secret message\"\n\tif got != want {\n\t\tt.Errorf(\"Received: %v, Expected: %v\", got, want)\n\t}\n}\n\nfunc TestErr_Error(t *testing.T) {\n\te := &Err{\n\t\tCode:    \"fmt.Print(error)\",\n\t\tMessage: \"This is the message\",\n\t}\n\tgot := e.Error()\n\twant := \"fmt.Print(error) : This is the message \"\n\tif got != want {\n\t\tt.Errorf(\"Received: %v, Expected: %v\", got, want)\n\t}\n}\n\nfunc Test_ErrInvalidPayload(t *testing.T) {\n\tgot := ErrInvalidPayload(\"Error for invalid nucleus payload\")\n\twant := \"Error for invalid nucleus payload\"\n\tif got.Error() != want {\n\t\tt.Errorf(\"Received: %v, Expected: %v\", got, want)\n\t}\n}\n\nfunc TestErrSecretNotFound(t *testing.T) {\n\tgot := ErrSecretNotFound(\"SECRET_STRING\")\n\twant := \"secret with name SECRET_STRING not found\"\n\tif got.Error() != want {\n\t\tt.Errorf(\"Received: %v, Expected: %v\", got, want)\n\t}\n}\n"
  },
  {
    "path": "pkg/errs/synapse.go",
    "content": "package errs\n\nimport (\n\t\"fmt\"\n\t\"strings\"\n)\n\n// ERR_DUMMY dummy error\nvar ERR_DUMMY = Err{\n\tCode:    \"ERR::DUMMY\",\n\tMessage: \"Dummy error \"}\n\n// ERR_INVALID_ENVIRONMENT  should be thorwn when invalid environment specified\"\nvar ERR_INVALID_ENVIRONMENT = Err{\n\tCode:    \"ERR::INV::ENV\",\n\tMessage: \"Invalid environment specified\"}\n\n// ERR_CTRL_CONN_MAX_ATTEMPT should be thrown when control websocket reconnection max attempt reached\nvar ERR_CTRL_CONN_MAX_ATTEMPT = Err{\n\tCode:    \"ERR::CTRL::CONN::MAX::ATTEMPT\",\n\tMessage: \"Control websocket reconnection max attempt reached\"}\n\n// ERR_SNK_PRX_MAX_ATTEMPT should be thrown when sink proxy restart max attempt reache\nvar ERR_SNK_PRX_MAX_ATTEMPT = Err{\n\tCode:    \"ERR::SNK::PRX::MAX::ATTEMPT\",\n\tMessage: \"Sink proxy restart max attempt reached\"}\n\n// ERR_INF_API_MAX_ATTEMPT should be thrown when info api server restart max attempt reached\nvar ERR_INF_API_MAX_ATTEMPT = Err{\n\tCode:    \"ERR::INF::API::MAX::ATTEMPT\",\n\tMessage: \"Info api server restart max attempt reached\"}\n\n// ERR_FS_MAX_ATTEMPT should be thrown when file server restart max attempt reached\nvar ERR_FS_MAX_ATTEMPT = Err{\n\tCode:    \"ERR::FS::MAX::ATTEMPT\",\n\tMessage: \"File server restart max attempt reached\"}\n\n// ERR_INV_WS_DAT_TYPE should be thrown when invalid data type reader received from websocket\nvar ERR_INV_WS_DAT_TYPE = Err{\n\tCode:    \"ERR::INV::WS::DAT::TYPE\",\n\tMessage: \"Invalid data type reader received from websocket\"}\n\n// ERR_BIN_UPD  function retruns err with code  \"ERR::BIN::UPD\"\nfunc ERR_BIN_UPD(err string) Err {\n\treturn Err{\n\t\tCode:    \"ERR::BIN::UPD\",\n\t\tMessage: \"Unable to update binary \" + err}\n}\n\n// ERR_WS_CTRL_CONN function returns err with code  \"ERR::WS::Conn\"\nfunc ERR_WS_CTRL_CONN(err string) Err {\n\treturn Err{\n\t\tCode:    \"ERR::WS::Conn\",\n\t\tMessage: \"Unable to establish control websocket connection \" + err}\n}\n\n// ERR_WS_CONN function returns err with code  \"ERR::WS::Conn\"\nfunc ERR_WS_CONN(err string) Err {\n\treturn Err{\n\t\tCode:    \"ERR::WS::Conn\",\n\t\tMessage: \"Unable to  establish websocket connection \" + err}\n}\n\n// ERR_WS_CTRL_CONN_DWN function returns err with code \"ERR::WS::CTRL::CONN::DWN\"\nfunc ERR_WS_CTRL_CONN_DWN(err string) Err {\n\treturn Err{\n\t\tCode:    \"ERR::WS::CTRL::CONN::DWN\",\n\t\tMessage: \"Control websocket connection closed \" + err}\n}\n\n// ERR_DAT_CONN_DWN function returns err with code \"ERR::DAT::CONN::DWN\"\nfunc ERR_DAT_CONN_DWN(err string) Err {\n\treturn Err{\n\t\tCode:    \"ERR::DAT::CONN::DWN\",\n\t\tMessage: \"Data websocket connection closed \" + err}\n}\n\n// ERR_INVALID_WS_URL function returns err with code \"ERR::INV::WS::URL\"\nfunc ERR_INVALID_WS_URL(err string) Err {\n\treturn Err{\n\t\tCode:    \"ERR::INV::WS::URL\",\n\t\tMessage: \"Invalid websocket url error \" + err}\n}\n\n// ERR_SNK_PRX function return error with code \"ERR::SNK::PRX\"\nfunc ERR_SNK_PRX(err string) Err {\n\treturn Err{\n\t\tCode:    \"ERR::SNK::PRX\",\n\t\tMessage: \"Sink proxy failed :  \" + err}\n}\n\n// ERR_SNK_PRX_CONN function returns error with code \"ERR::SNK::PRX::CONN\"\nfunc ERR_SNK_PRX_CONN(err string) Err {\n\treturn Err{\n\t\tCode:    \"ERR::SNK::PRX::CONN\",\n\t\tMessage: \"Unable to establish connection to local proxy :  \" + err}\n}\n\n// ERR_WS_WRT function returns error with code \"ERR::WS::WRT\"\nfunc ERR_WS_WRT(err string) Err {\n\treturn Err{\n\t\tCode:    \"ERR::WS::WRT\",\n\t\tMessage: \"Unable to valid retrieve writer from ws :  \" + err}\n}\n\n// ERR_WS_RDR function returns error with code \"ERR::WS::RDR\"\nfunc ERR_WS_RDR(err string) Err {\n\treturn Err{\n\t\tCode:    \"ERR::WS::RDR\",\n\t\tMessage: \"Unable to retrieve valid reader from ws :  \" + err}\n}\n\n// ERR_ATT_PRX function returns error with code \"ERR::ATT::PRX\"\nfunc ERR_ATT_PRX(reqType string, err string) Err {\n\treturn Err{\n\t\tCode:    \"ERR::ATT::PRX\",\n\t\tMessage: fmt.Sprintf(\"Unable to attach proxy to [ %s ]request :  %s\", reqType, err)}\n}\n\n// ERR_DNS_RLV function returns error with code  \"ERR::DNS::RLV\"\nfunc ERR_DNS_RLV(err string) Err {\n\treturn Err{\n\t\tCode:    \"ERR::DNS::RLV\",\n\t\tMessage: fmt.Sprintf(\"Error while resolving dns :  %s\", err)}\n}\n\n// ERR_VLD_CFG function return error with code ERR::CNF::FLD::VLD\nfunc ERR_VLD_CFG(errs []string) Err {\n\treturn Err{\n\t\tCode:    \"ERR::CNF::FLD::VLD\",\n\t\tMessage: fmt.Sprintf(\"Validation errors :  \\n%s\", strings.Join(errs, \"\\n\"))}\n}\n\n// ERR_DAT_WS_RD function returns error with code ERR::DAT::WS::RD\nfunc ERR_DAT_WS_RD(err string) Err {\n\treturn Err{\n\t\tCode:    \"ERR::DAT::WS::RD\",\n\t\tMessage: fmt.Sprintf(\"Unable to read from websocket :  \\n%s\", err)}\n}\n\n// ERR_SNK_WRT function returns error with code ERR::SNK::WRT\nfunc ERR_SNK_WRT(err string) Err {\n\treturn Err{\n\t\tCode:    \"ERR::SNK::WRT\",\n\t\tMessage: fmt.Sprintf(\"Unable to read from websocket :  \\n%s\", err)}\n}\n\n// ERR_API_SRV_STR function returns error with code ERR::API::SRV::STR\nfunc ERR_API_SRV_STR(err string) Err {\n\treturn Err{\n\t\tCode:    \"ERR::API::SRV::STR\",\n\t\tMessage: fmt.Sprintf(\"Unable to start api server :  \\n%s\", err)}\n}\n\n// ERR_FIL_SRV_STR function returns error with code \"ERR::FIL::SRV::STR\"\nfunc ERR_FIL_SRV_STR(err string) Err {\n\treturn Err{\n\t\tCode:    \"ERR::FIL::SRV::STR\",\n\t\tMessage: fmt.Sprintf(\"Unable to start file server :  \\n%s\", err)}\n}\n\n// ERR_DIR_CRT function returns error with code \"ERR::DIR::CRT\"\nfunc ERR_DIR_CRT(err string) Err {\n\treturn Err{\n\t\tCode:    \"ERR::DIR::CRT\",\n\t\tMessage: fmt.Sprintf(\"Unable to create directory :  \\n%s\", err)}\n}\n\n// ErrDirDel function returns error with code \"ERR::DIR::DEL\"\nfunc ErrDirDel(err string) Err {\n\treturn Err{\n\t\tCode:    \"ERR::DIR::DEL\",\n\t\tMessage: fmt.Sprintf(\"Unable to delete directory :  \\n%s\", err)}\n}\n\n// ERR_FIL_CRT function returns error with code ERR::FIL::CRT\nfunc ERR_FIL_CRT(err string) Err {\n\treturn Err{\n\t\tCode:    \"ERR::FIL::CRT\",\n\t\tMessage: fmt.Sprintf(\"Unable to create file :  \\n%s\", err)}\n}\n\n// ERR_API_WEB_HOK function returns error with code ERR::API::WEB::HOK\nfunc ERR_API_WEB_HOK(err string) Err {\n\treturn Err{\n\t\tCode:    \"ERR::API::WEB::HOK\",\n\t\tMessage: fmt.Sprintf(\"Unable to call webhook url :  \\n%s\", err)}\n}\n\n// ERR_DOCKER_RUN function returns error with code ERR::DOCKER::RUN\nfunc ERR_DOCKER_RUN(err string) Err {\n\treturn Err{\n\t\tCode:    \"ERR::DOCKER::RUN\",\n\t\tMessage: fmt.Sprintf(\"Docker run failed with error:  \\n%s\", err)}\n}\n\n// ERR_DOCKER_CRT function returns error with code ERR::DOCKER::CRT\nfunc ERR_DOCKER_CRT(err string) Err {\n\treturn Err{\n\t\tCode:    \"ERR::DOCKER::CRT\",\n\t\tMessage: fmt.Sprintf(\"Docker create failed with error:  \\n%s\", err)}\n}\n\n// ERR_DOCKER_STRT function returns error with code \"ERR::DOCKER::STRT\"\nfunc ERR_DOCKER_STRT(err string) Err {\n\treturn Err{\n\t\tCode:    \"ERR::DOCKER::STRT\",\n\t\tMessage: fmt.Sprintf(\"Docker start failed with error:  \\n%s\", err)}\n}\n\n// ErrDockerVolCrt function returns error with code \"ERR::DOCKER::VOL::CRT\"\nfunc ErrDockerVolCrt(err string) Err {\n\treturn Err{\n\t\tCode:    \"ERR::DOCKER::VOL::CRT\",\n\t\tMessage: fmt.Sprintf(\"Docker volume create failed with error:  \\n%s\", err)}\n}\n\n// ErrDockerCP function returns error with code \"ERR::DOCKER::CP\"\nfunc ErrDockerCP(err string) Err {\n\treturn Err{\n\t\tCode:    \"ERR::DOCKER::CP\",\n\t\tMessage: fmt.Sprintf(\"Error copying file to docker:  \\n%s\", err)}\n}\n\n// ErrSecretLoad function returns error with code \"ERR::SECRET::LOAD\"\nfunc ErrSecretLoad(err string) Err {\n\treturn Err{\n\t\tCode:    \"ERR::SECRET::LOAD\",\n\t\tMessage: fmt.Sprintf(\"Error in loading secrets:  \\n%s\", err)}\n}\n\n// ERR_JSON_MAR function returns error with code \"ERR::JSON::MAR\"\nfunc ERR_JSON_MAR(err string) Err {\n\treturn Err{\n\t\tCode:    \"ERR::JSON::MAR\",\n\t\tMessage: fmt.Sprintf(\"Error marshaling JSON:  \\n%s\", err)}\n}\n\n// ERR_JSON_UNMAR function returns error with code \"ERR::JSON::UNMAR\"\nfunc ERR_JSON_UNMAR(err string) Err {\n\treturn Err{\n\t\tCode:    \"ERR::JSON::UNMAR\",\n\t\tMessage: fmt.Sprintf(\"Error unmarshaling JSON:  \\n%s\", err)}\n}\n\n// ERR_LT_CRDS functio returns error with code \"ERR::LT::CRDS\"\nfunc ERR_LT_CRDS() Err {\n\treturn Err{\n\t\tCode:    \"ERR::LT::CRDS\",\n\t\tMessage: \"No lambdatest config provided\"}\n}\n\n// ERR_SNK_RD_WRT_MSM should be raise when there is read write mismatch in sink proxy\nvar ERR_SNK_RD_WRT_MSM = Err{\n\tCode:    \"ERR::SNK::RD::WRT::MSM\",\n\tMessage: \"Read write mismatch in sink proxy \"}\n\n// CR_AUTH_NF should be raise when container registry auth are not present for private repo\nvar CR_AUTH_NF = Err{\n\tCode:    \"CR::AUTH:NF\",\n\tMessage: \"Container registry auth are not present for private repo\"}\n"
  },
  {
    "path": "pkg/fileutils/fileutils.go",
    "content": "package fileutils\n\nimport (\n\t\"fmt\"\n\t\"io\"\n\t\"io/fs\"\n\t\"os\"\n\t\"path/filepath\"\n)\n\n// CopyFile copies the contents of the file named src to the file named\n// by dst. The file will be created if it does not already exist. If the\n// destination file exists, all it's contents will be replaced by the contents\n// of the source file. The file mode will be copied from the source and\n// the copied data is synced/flushed to stable storage.\nfunc CopyFile(src, dst string, changeMode bool) (err error) {\n\tin, err := os.Open(src)\n\tif err != nil {\n\t\treturn\n\t}\n\tdefer in.Close()\n\n\tout, err := os.Create(dst)\n\tif err != nil {\n\t\treturn\n\t}\n\tdefer func() {\n\t\tif e := out.Close(); e != nil {\n\t\t\terr = e\n\t\t}\n\t}()\n\n\t_, err = io.Copy(out, in)\n\tif err != nil {\n\t\treturn\n\t}\n\n\terr = out.Sync()\n\tif err != nil {\n\t\treturn\n\t}\n\n\tif !changeMode {\n\t\treturn\n\t}\n\n\tsi, err := os.Lstat(src)\n\tif err != nil {\n\t\treturn\n\t}\n\terr = os.Chmod(dst, si.Mode())\n\tif err != nil {\n\t\treturn\n\t}\n\n\treturn\n}\n\n// CopyDir recursively copies a directory tree, attempting to preserve permissions.\n// Source directory must exist, destination directory must *not* exist.\n// Symlinks are ignored and skipped.\nfunc CopyDir(src, dst string, changeMode bool) (err error) {\n\tsrc = filepath.Clean(src)\n\tdst = filepath.Clean(dst)\n\n\tsi, err := os.Lstat(src)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif !si.IsDir() {\n\t\treturn fmt.Errorf(\"source is not a directory\")\n\t}\n\n\t_, err = os.Lstat(dst)\n\tif err != nil && !os.IsNotExist(err) {\n\t\treturn\n\t}\n\tif err == nil {\n\t\treturn fmt.Errorf(\"destination %+v already exists\", dst)\n\t}\n\n\terr = os.MkdirAll(dst, si.Mode())\n\tif err != nil {\n\t\treturn\n\t}\n\n\t// NOTE: ioutil.ReadDir -> os.ReadDir as the latter is better:\n\t// \"\"\"\n\t// As of Go 1.16, os.ReadDir is a more efficient and correct choice:\n\t// it returns a list of fs.DirEntry instead of fs.FileInfo,\n\t// and it returns partial results in the case of an error\n\t// midway through reading a directory.\n\t// \"\"\"\n\tentries, err := os.ReadDir(src)\n\tif err != nil {\n\t\treturn\n\t}\n\n\tvar fileInfo fs.FileInfo\n\tfor _, entry := range entries {\n\t\tsrcPath := filepath.Join(src, entry.Name())\n\t\tdstPath := filepath.Join(dst, entry.Name())\n\n\t\tif entry.IsDir() {\n\t\t\terr = CopyDir(srcPath, dstPath, changeMode)\n\t\t\tif err != nil {\n\t\t\t\treturn\n\t\t\t}\n\t\t} else {\n\t\t\t// Skip symlinks.\n\t\t\tfileInfo, err = entry.Info()\n\t\t\tif err != nil || fileInfo.Mode()&os.ModeSymlink != 0 {\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\terr = CopyFile(srcPath, dstPath, changeMode)\n\t\t\tif err != nil {\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t}\n\n\treturn\n}\n\n// CheckIfExists checks if file or directory exists in the given path.\nfunc CheckIfExists(path string) (bool, error) {\n\tif _, err := os.Lstat(path); err != nil {\n\t\tif os.IsNotExist(err) {\n\t\t\treturn false, nil\n\t\t}\n\t\treturn false, err\n\t}\n\treturn true, nil\n}\n\n// CreateIfNotExists creates a file or a directory only if it does not already exist.\nfunc CreateIfNotExists(path string, isDir bool) error {\n\texists, err := CheckIfExists(path)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif !exists {\n\t\tif isDir {\n\t\t\treturn os.MkdirAll(path, 0755)\n\t\t}\n\t\tif err := os.MkdirAll(filepath.Dir(path), 0755); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tf, err := os.OpenFile(path, os.O_CREATE, 0755)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tf.Close()\n\t}\n\n\treturn nil\n}\n"
  },
  {
    "path": "pkg/fileutils/fileutils_test.go",
    "content": "package fileutils\n\nimport (\n\t\"fmt\"\n\t\"os\"\n\t\"testing\"\n)\n\nfunc removeCopiedPath(path string) {\n\terr := os.RemoveAll(path)\n\tif err != nil {\n\t\tfmt.Println(\"error in removing!!\")\n\t}\n}\n\n// nolint:dupl\nfunc TestCopyFile(t *testing.T) {\n\ttype args struct {\n\t\tsrc        string\n\t\tdst        string\n\t\tchangeMode bool\n\t}\n\ttests := []struct {\n\t\tname          string\n\t\targs          args\n\t\twantErr       bool\n\t\trequireDelete bool // if new file is created, we need to delete for clean up\n\t}{\n\t\t{\n\t\t\t\"Check open error\",\n\t\t\targs{src: \"../../testutils/file\", dst: \"./dst\", changeMode: true},\n\t\t\ttrue,\n\t\t\tfalse,\n\t\t}, // this file is not present\n\n\t\t{\n\t\t\t\"Check create error for invalid path\",\n\t\t\targs{src: \"../../testutils/testfile\", dst: \"../xyz/dst\", changeMode: true},\n\t\t\ttrue,\n\t\t\tfalse,\n\t\t}, // file present at given args.src\n\n\t\t{\n\t\t\t\"Check fasle change mode\",\n\t\t\targs{src: \"../../testutils/testfile\", dst: \"./dst\", changeMode: true},\n\t\t\tfalse,\n\t\t\ttrue,\n\t\t}, // new file will be created, delete it\n\n\t\t{\n\t\t\t\"Check success\",\n\t\t\targs{src: \"../../testutils/testfile\", dst: \"./copyfile\", changeMode: true},\n\t\t\tfalse,\n\t\t\ttrue,\n\t\t}, // new file will be created, delete it\n\t}\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\terr := CopyFile(tt.args.src, tt.args.dst, tt.args.changeMode)\n\n\t\t\tif tt.requireDelete {\n\t\t\t\tdefer removeCopiedPath(tt.args.dst)\n\t\t\t}\n\n\t\t\tif (err != nil) != tt.wantErr {\n\t\t\t\tt.Errorf(\"CopyFile() error = %v, wantErr %v\", err, tt.wantErr)\n\t\t\t}\n\t\t})\n\t}\n}\n\n// nolint:dupl\nfunc TestCopyDir(t *testing.T) {\n\ttype args struct {\n\t\tsrc        string\n\t\tdst        string\n\t\tchangeMode bool\n\t}\n\ttests := []struct {\n\t\tname          string\n\t\targs          args\n\t\twantErr       bool\n\t\trequireDelete bool // if new path/directory is created, we need to delete it for clean up\n\t}{\n\t\t{\n\t\t\t\"Check status error\",\n\t\t\targs{src: \"../../testutils/dne/file\", dst: \"./dst\", changeMode: true},\n\t\t\ttrue,\n\t\t\tfalse,\n\t\t}, // this dir is not present\n\n\t\t{\n\t\t\t\"Check for src is not a directory\",\n\t\t\targs{src: \"../../testutils/testfile\", dst: \"../xyz/dst\", changeMode: true},\n\t\t\ttrue,\n\t\t\tfalse,\n\t\t}, // file present at given args.src\n\n\t\t{\n\t\t\t\"Check for non-exist dst directory\",\n\t\t\targs{src: \"../../testutils/testdirectory\", dst: \"./xyz\", changeMode: true},\n\t\t\tfalse,\n\t\t\ttrue,\n\t\t}, // new dir will be created\n\n\t\t{\n\t\t\t\"Check existing dst\",\n\t\t\targs{src: \"../../testutils/testdirectory\", dst: \"../../testutils/testdirectory\", changeMode: true},\n\t\t\ttrue,\n\t\t\tfalse,\n\t\t},\n\t}\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\terr := CopyDir(tt.args.src, tt.args.dst, tt.args.changeMode)\n\n\t\t\tif tt.requireDelete {\n\t\t\t\tdefer removeCopiedPath(tt.args.dst)\n\t\t\t}\n\n\t\t\tif (err != nil) != tt.wantErr {\n\t\t\t\tt.Errorf(\"CopyDir() error = %v, wantErr %v\", err, tt.wantErr)\n\t\t\t}\n\t\t})\n\t}\n}\n\nfunc TestCheckIfExists(t *testing.T) {\n\ttype args struct {\n\t\tpath string\n\t}\n\ttests := []struct {\n\t\tname    string\n\t\targs    args\n\t\twant    bool\n\t\twantErr bool\n\t}{\n\t\t{\n\t\t\t\"Check false path error\",\n\t\t\targs{path: \"../pathnotexist/dir\"},\n\t\t\tfalse,\n\t\t\tfalse,\n\t\t}, // this dir is not present\n\n\t\t{\n\t\t\t\"Check for existing path, should not give error\",\n\t\t\targs{path: \"../../testutils/\"},\n\t\t\ttrue,\n\t\t\tfalse,\n\t\t}, // this dir is present\n\t}\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\tgot, err := CheckIfExists(tt.args.path)\n\t\t\tif (err != nil) != tt.wantErr {\n\t\t\t\tt.Errorf(\"CheckIfExists() error = %v, wantErr %v\", err, tt.wantErr)\n\t\t\t\treturn\n\t\t\t}\n\t\t\tif got != tt.want {\n\t\t\t\tt.Errorf(\"CheckIfExists() = %v, want %v\", got, tt.want)\n\t\t\t}\n\t\t})\n\t}\n}\n\nfunc TestCreateIfNotExists(t *testing.T) {\n\ttype args struct {\n\t\tpath  string\n\t\tisDir bool\n\t}\n\ttests := []struct {\n\t\tname          string\n\t\targs          args\n\t\twantErr       bool\n\t\trequireDelete bool\n\t}{\n\t\t{\n\t\t\t\"Check false path directory\",\n\t\t\targs{path: \"../pathnotexist\", isDir: true},\n\t\t\tfalse,\n\t\t\ttrue,\n\t\t}, // new dir will be created\n\n\t\t{\n\t\t\t\"Check make directory error\",\n\t\t\targs{path: \"pathnotexist\", isDir: true},\n\t\t\tfalse,\n\t\t\ttrue,\n\t\t}, // new dir will be created\n\t}\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\terr := CreateIfNotExists(tt.args.path, tt.args.isDir)\n\n\t\t\tif tt.requireDelete {\n\t\t\t\tdefer removeCopiedPath(tt.args.path)\n\t\t\t}\n\n\t\t\tif (err != nil) != tt.wantErr {\n\t\t\t\tt.Errorf(\"CreateIfNotExists() error = %v, wantErr %v\", err, tt.wantErr)\n\t\t\t}\n\t\t})\n\t}\n}\n"
  },
  {
    "path": "pkg/gitmanager/setup.go",
    "content": "// Package gitmanager is used for cloning repo\npackage gitmanager\n\nimport (\n\t\"context\"\n\t\"encoding/base64\"\n\t\"fmt\"\n\t\"net/http\"\n\t\"net/url\"\n\t\"os\"\n\t\"path/filepath\"\n\t\"strings\"\n\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/global\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/requestutils\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/urlmanager\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/utils\"\n\t\"github.com/cenkalti/backoff/v4\"\n\t\"github.com/mholt/archiver/v3\"\n)\n\ntype GitLabSingleFileResponse struct {\n\tContent string `json:\"content\"`\n}\n\ntype gitManager struct {\n\tlogger      lumber.Logger\n\thttpClient  http.Client\n\texecManager core.ExecutionManager\n\trequest     core.Requests\n}\n\nconst (\n\tauthorization = \"Authorization\"\n)\n\n// NewGitManager returns a new GitManager\nfunc NewGitManager(logger lumber.Logger, execManager core.ExecutionManager) core.GitManager {\n\treturn &gitManager{\n\t\tlogger: logger,\n\t\thttpClient: http.Client{\n\t\t\tTimeout: global.DefaultGitCloneTimeout,\n\t\t},\n\t\texecManager: execManager,\n\t\trequest:     requestutils.New(logger, global.DefaultAPITimeout, &backoff.StopBackOff{}),\n\t}\n}\n\nfunc (gm *gitManager) Clone(ctx context.Context, payload *core.Payload, oauth *core.Oauth) error {\n\trepoLink := payload.RepoLink\n\trepoItems := strings.Split(repoLink, \"/\")\n\trepoName := repoItems[len(repoItems)-1]\n\tcommitID := payload.BuildTargetCommit\n\n\tarchiveURL, err := urlmanager.GetCloneURL(payload.GitProvider, repoLink, repoName, commitID, payload.ForkSlug, payload.RepoSlug)\n\tif err != nil {\n\t\tgm.logger.Errorf(\"failed to get clone url for provider %s, error %v\", payload.GitProvider, err)\n\t\treturn err\n\t}\n\n\tgm.logger.Debugf(\"cloning from %s\", archiveURL)\n\terr = gm.downloadFile(ctx, archiveURL, commitID+\".zip\", oauth)\n\tif err != nil {\n\t\tgm.logger.Errorf(\"failed to download file %v\", err)\n\t\treturn err\n\t}\n\n\tif err = gm.initGit(ctx, payload, oauth); err != nil {\n\t\tgm.logger.Errorf(\"failed to initialize git, error %v\", err)\n\t\treturn err\n\t}\n\n\treturn nil\n}\n\n// downloadFile clones the archive from github and extracts the file if it is a zip file.\nfunc (gm *gitManager) downloadFile(ctx context.Context, archiveURL, fileName string, oauth *core.Oauth) error {\n\theader := getHeaderMap(oauth)\n\trespBody, stausCode, err := gm.request.MakeAPIRequest(ctx, http.MethodGet, archiveURL, nil, nil, header)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif stausCode >= http.StatusMultipleChoices {\n\t\treturn fmt.Errorf(\"received non 200 status code [%d]\", stausCode)\n\t}\n\n\terr = gm.copyAndExtractFile(ctx, respBody, fileName)\n\tif err != nil {\n\t\tgm.logger.Errorf(\"failed to copy file %v\", err)\n\t\treturn err\n\t}\n\treturn nil\n}\n\n// copyAndExtractFile copies the content of http response directly to the local storage\n// and extracts the file if it is a zip file.\nfunc (gm *gitManager) copyAndExtractFile(ctx context.Context, respBody []byte, path string) error {\n\tout, err := os.Create(path)\n\tif err != nil {\n\t\tgm.logger.Errorf(\"failed to create file err %v\", err)\n\t\treturn err\n\t}\n\t_, err = out.Write(respBody)\n\tif err != nil {\n\t\tgm.logger.Errorf(\"failed to write to file %v\", err)\n\t\tout.Close()\n\t\treturn err\n\t}\n\tout.Close()\n\n\t// if zip file, then unarchive the file in same path\n\tif filepath.Ext(path) == \".zip\" {\n\t\tzip := archiver.NewZip()\n\t\tzip.OverwriteExisting = true\n\t\tif err = zip.Unarchive(path, fmt.Sprintf(\"%s/clonedir\", filepath.Dir(path))); err != nil {\n\t\t\tgm.logger.Errorf(\"failed to unarchive file %v\", err)\n\t\t\treturn err\n\t\t}\n\t}\n\n\tcommands := []string{\n\t\tfmt.Sprintf(\"mkdir %s\", global.RepoDir),\n\t\tfmt.Sprintf(\"mv %s/clonedir/*/* %s\", filepath.Dir(path), global.RepoDir),\n\t}\n\n\terr = gm.execManager.ExecuteInternalCommands(ctx, core.RenameCloneFile, commands, filepath.Dir(path), nil, nil)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn err\n}\n\nfunc (gm *gitManager) initGit(ctx context.Context, payload *core.Payload, oauth *core.Oauth) error {\n\tbranch := payload.BranchName\n\trepoLink := payload.RepoLink\n\tif payload.GitProvider == core.Bitbucket && payload.ForkSlug != \"\" {\n\t\trepoLink = strings.Replace(repoLink, payload.RepoSlug, payload.ForkSlug, -1)\n\t}\n\n\trepoURL, perr := url.Parse(repoLink)\n\tif perr != nil {\n\t\treturn perr\n\t}\n\n\tif oauth.Type == core.Basic {\n\t\tdecodedToken, err := base64.StdEncoding.DecodeString(oauth.AccessToken)\n\t\tif err != nil {\n\t\t\tgm.logger.Errorf(\"Failed to decode basic oauth token for RepoID %s: %s\", payload.RepoID, err)\n\t\t\treturn err\n\t\t}\n\n\t\tcreds := strings.Split(string(decodedToken), \":\")\n\t\trepoURL.User = url.UserPassword(creds[0], creds[1])\n\t} else {\n\t\trepoURL.User = url.UserPassword(\"x-token-auth\", oauth.AccessToken)\n\t\tif payload.GitProvider == core.GitLab {\n\t\t\trepoURL.User = url.UserPassword(\"oauth2\", oauth.AccessToken)\n\t\t}\n\t}\n\n\turlWithToken := repoURL.String()\n\tcommands := []string{\n\t\t\"git init\",\n\t\tfmt.Sprintf(\"git remote add origin %s.git\", repoLink),\n\t\tfmt.Sprintf(\"git config --global url.%s.InsteadOf %s\", urlWithToken, repoLink),\n\t\tfmt.Sprintf(\"git fetch --depth=1 origin +%s:refs/remotes/origin/%s\", payload.BuildTargetCommit, branch),\n\t\tfmt.Sprintf(\"git config --global --remove-section url.%s\", urlWithToken),\n\t\tfmt.Sprintf(\"git checkout --progress --force -B %s refs/remotes/origin/%s\", branch, branch),\n\t}\n\tif err := gm.execManager.ExecuteInternalCommands(ctx, core.InitGit, commands, global.RepoDir, nil, nil); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}\n\nfunc (gm *gitManager) DownloadFileByCommit(ctx context.Context, gitProvider, repoSlug,\n\tcommitID, filePath string, oauth *core.Oauth) (string, error) {\n\tdownloadURL, err := urlmanager.GetFileDownloadURL(gitProvider, commitID, repoSlug, filePath)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\theader := getHeaderMap(oauth)\n\trespBody, stausCode, err := gm.request.MakeAPIRequest(ctx, http.MethodGet, downloadURL, nil, nil, header)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\tif stausCode >= http.StatusMultipleChoices {\n\t\treturn \"\", fmt.Errorf(\"received non 200 status code [%d]\", stausCode)\n\t}\n\tpath := utils.GenerateUUID() + \".yml\"\n\tout, err := os.Create(path)\n\tif err != nil {\n\t\tgm.logger.Errorf(\"failed to create file err %v\", err)\n\t\treturn \"\", err\n\t}\n\t_, err = out.Write(respBody)\n\tif err != nil {\n\t\tgm.logger.Errorf(\"failed to copy file %v\", err)\n\t\tout.Close()\n\t\treturn \"\", err\n\t}\n\tout.Close()\n\treturn path, nil\n}\n\nfunc getHeaderMap(oauth *core.Oauth) map[string]string {\n\theader := map[string]string{}\n\n\theader[authorization] = fmt.Sprintf(\"%s %s\", oauth.Type, oauth.AccessToken)\n\treturn header\n}\n"
  },
  {
    "path": "pkg/gitmanager/setup_test.go",
    "content": "package gitmanager\n\nimport (\n\t\"bytes\"\n\t\"context\"\n\t\"errors\"\n\t\"fmt\"\n\t\"io\"\n\t\"net/http\"\n\t\"net/http/httptest\"\n\t\"os\"\n\t\"strings\"\n\t\"testing\"\n\n\t\"github.com/LambdaTest/test-at-scale/mocks\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/command\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/global\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/requestutils\"\n\t\"github.com/LambdaTest/test-at-scale/testutils\"\n\t\"github.com/cenkalti/backoff/v4\"\n\t\"github.com/stretchr/testify/mock\"\n)\n\nfunc Test_downloadFile(t *testing.T) {\n\tserver := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\tif r.URL.Path != \"/archive/zipfile.zip\" {\n\t\t\tt.Errorf(\"Expected to request '/archive/zipfile.zip', got: %v\", r.URL)\n\t\t\treturn\n\t\t}\n\t\treqToken := r.Header.Get(\"Authorization\")\n\t\tsplitToken := strings.Split(reqToken, \"Bearer \")\n\t\texpectedOauth := &core.Oauth{AccessToken: \"dummy\", Type: core.Bearer}\n\t\tif splitToken[1] != expectedOauth.AccessToken {\n\t\t\tt.Errorf(\"Invalid clone token, expected: %v\\nreceived: %v\", expectedOauth.AccessToken, splitToken[1])\n\t\t\tw.WriteHeader(http.StatusUnauthorized)\n\t\t} else {\n\t\t\tw.WriteHeader(http.StatusOK)\n\t\t}\n\t}))\n\tdefer server.Close()\n\tlogger, err := testutils.GetLogger()\n\tif err != nil {\n\t\tt.Errorf(\"Couldn't get logger, error: %v\", err)\n\t\treturn\n\t}\n\tvar httpClient http.Client\n\texecManager := new(mocks.ExecutionManager)\n\texecManager.On(\"ExecuteInternalCommands\",\n\t\tmock.AnythingOfType(\"*context.emptyCtx\"),\n\t\tmock.AnythingOfType(\"core.CommandType\"),\n\t\tmock.AnythingOfType(\"[]string\"),\n\t\tmock.AnythingOfType(\"string\"),\n\t\tmock.AnythingOfType(\"map[string]string\"),\n\t\tmock.AnythingOfType(\"map[string]string\")).Return(\n\t\tfunc(ctx context.Context, commandType core.CommandType, commands []string, cwd string, envMap, secretData map[string]string) error {\n\t\t\treturn nil\n\t\t},\n\t)\n\tgm := &gitManager{\n\t\tlogger:      logger,\n\t\thttpClient:  httpClient,\n\t\texecManager: execManager,\n\t\trequest:     requestutils.New(logger, global.DefaultAPITimeout, &backoff.StopBackOff{}),\n\t}\n\tarchiveURL := server.URL + \"/archive/zipfile.zip\"\n\tfileName := \"copyAndExtracted\"\n\toauth := &core.Oauth{AccessToken: \"dummy\", Type: core.Bearer}\n\terr2 := gm.downloadFile(context.TODO(), archiveURL, fileName, oauth)\n\tdefer removeFile(fileName) // remove the file created after downloading and extracting\n\tif err2 != nil {\n\t\tt.Errorf(\"Error: %v\", err2)\n\t}\n}\n\nfunc Test_copyAndExtractFile(t *testing.T) {\n\tlogger, err := testutils.GetLogger()\n\tif err != nil {\n\t\tt.Errorf(\"Couldn't get logger, error: %v\", err)\n\t}\n\tvar httpClient http.Client\n\texecManager := new(mocks.ExecutionManager)\n\texecManager.On(\"ExecuteInternalCommands\",\n\t\tmock.AnythingOfType(\"*context.emptyCtx\"),\n\t\tmock.AnythingOfType(\"core.CommandType\"),\n\t\tmock.AnythingOfType(\"[]string\"),\n\t\tmock.AnythingOfType(\"string\"),\n\t\tmock.AnythingOfType(\"map[string]string\"),\n\t\tmock.AnythingOfType(\"map[string]string\")).Return(\n\t\tfunc(ctx context.Context, commandType core.CommandType, commands []string, cwd string, envMap, secretData map[string]string) error {\n\t\t\treturn nil\n\t\t},\n\t)\n\tgm := &gitManager{\n\t\tlogger:      logger,\n\t\thttpClient:  httpClient,\n\t\texecManager: execManager,\n\t\trequest:     requestutils.New(logger, global.DefaultAPITimeout, &backoff.StopBackOff{}),\n\t}\n\tfileBody := \"Hello World!\"\n\tresp := http.Response{\n\t\tBody: io.NopCloser(bytes.NewBufferString(fileBody)),\n\t}\n\tpath := \"newFile\"\n\tdefer removeFile(path)\n\trespBodyBuffer := bytes.Buffer{}\n\t_, err = io.Copy(&respBodyBuffer, resp.Body)\n\tif err != nil {\n\t\tt.Errorf(\"Error: %v\", err)\n\t\treturn\n\t}\n\terr2 := gm.copyAndExtractFile(context.TODO(), respBodyBuffer.Bytes(), path)\n\tif err2 != nil {\n\t\tt.Errorf(\"Error: %v\", err2)\n\t\treturn\n\t}\n\n\tfileContent, err := os.ReadFile(\"./newFile\")\n\tif err != nil {\n\t\tt.Errorf(\"Error: %v\", err)\n\t\treturn\n\t}\n\n\tif string(fileContent) != fileBody {\n\t\tt.Errorf(\"Expected file content: %v\\nReceived: %v\", fileBody, string(fileContent))\n\t}\n}\n\nfunc TestClone(t *testing.T) {\n\tcheckClone := func(t *testing.T) {\n\t\tserver := httptest.NewServer( // mock server\n\t\t\thttp.FileServer(http.Dir(\"../../testutils/testdata/archive\")), // mock data stored at tests/mock/index.txt\n\t\t)\n\t\tdefer server.Close()\n\t\tglobal.TestEnv = true\n\t\tglobal.TestServer = server.URL\n\t\tlogger, err := lumber.NewLogger(lumber.LoggingConfig{ConsoleLevel: lumber.Debug}, true, 1)\n\t\tif err != nil {\n\t\t\tfmt.Println(\"Logger can't be established\")\n\t\t}\n\t\tazureClient := new(mocks.AzureClient)\n\t\tsecretParser := new(mocks.SecretParser)\n\t\texecManager := command.NewExecutionManager(secretParser, azureClient, logger)\n\t\tgm := NewGitManager(logger, execManager)\n\n\t\tpayload, err := testutils.GetPayload()\n\t\tif err != nil {\n\t\t\tt.Errorf(\"Unable to load payload, error %v\", err)\n\t\t}\n\n\t\tpayload.RepoLink = server.URL\n\t\tpayload.BuildTargetCommit = \"testRepo\"\n\t\toauth := &core.Oauth{AccessToken: \"dummy\", Type: core.Bearer}\n\t\tcommitID := payload.BuildTargetCommit\n\n\t\terr = gm.Clone(context.TODO(), payload, oauth)\n\t\tglobal.TestEnv = false\n\t\texpErr := \"opening zip archive for reading: creating reader: zip: not a valid zip file\"\n\n\t\tdefer removeFile(\"testRepo\")\n\t\tdefer removeFile(commitID + \".zip\")\n\t\tdefer removeFile(global.RepoDir)\n\n\t\tif err != nil && err.Error() != expErr {\n\t\t\tt.Errorf(\"Error: %v\", err)\n\t\t\treturn\n\t\t}\n\n\t\t_, err2 := os.OpenFile(commitID+\".zip\", 0440, 0440)\n\t\t_, err3 := os.OpenFile(\"zipFile\", 0440, 0440)\n\n\t\t// check if downloaded file exist now\n\t\tif errors.Is(err2, os.ErrNotExist) {\n\t\t\tt.Errorf(\"Could not find the downloaded file, got error: %v\", err2)\n\t\t\treturn\n\t\t}\n\t\tif err.Error() == expErr {\n\t\t\treturn\n\t\t}\n\t\t// check if unzipped folder exist\n\t\tif errors.Is(err3, os.ErrNotExist) {\n\t\t\tt.Errorf(\"Could not find the unzipped folder, got error: %v\", err3)\n\t\t\treturn\n\t\t}\n\n\t\t// global.RepoDir does not exist on local\n\t\tif err != nil && (errors.Is(err, os.ErrNotExist)) == false {\n\t\t\tt.Errorf(\"Expected error: %v, Received: %v\\n\", os.ErrNotExist, err)\n\t\t\treturn\n\t\t}\n\n\t\tif err == nil {\n\t\t\tif _, err4 := os.OpenFile(global.RepoDir, 0440, 0440); errors.Is(err4, os.ErrNotExist) {\n\t\t\t\tt.Errorf(\"Failed to find file in global repodir, got error: %v\", err4)\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t}\n\tt.Run(\"Check the clone function\", func(t *testing.T) {\n\t\tcheckClone(t)\n\t})\n}\n\nfunc removeFile(path string) {\n\terr := os.RemoveAll(path)\n\tif err != nil {\n\t\tfmt.Println(\"error in removing!!\")\n\t}\n}\n"
  },
  {
    "path": "pkg/global/nucleusconstants.go",
    "content": "package global\n\nimport \"time\"\n\n// TestEnv : to set test env for urlmanager package\nvar TestEnv bool = false\n\n// TestServer : store server URL of test server while doing mock testing\nvar TestServer string\n\n// All constant related to nucleus\nconst (\n\tCoverageManifestFileName   = \"manifest.json\"\n\tHomeDir                    = \"/home/nucleus\"\n\tWorkspaceCacheDir          = \"/workspace-cache\"\n\tRepoDir                    = HomeDir + \"/repo\"\n\tCodeCoverageDir            = RepoDir + \"/coverage\"\n\tRepoCacheDir               = RepoDir + \"/__tas\"\n\tDefaultAPITimeout          = 45 * time.Second\n\tDefaultGitCloneTimeout     = 30 * time.Minute\n\tSamplingTime               = 5 * time.Millisecond\n\tRepoSecretPath             = \"/vault/secrets/reposecrets\"\n\tOauthSecretPath            = \"/vault/secrets/oauth\"\n\tNeuronRemoteHost           = \"http://neuron-service.phoenix.svc.cluster.local\"\n\tBlockTestFileLocation      = \"/tmp/blocktests.json\"\n\tSecretRegex                = `\\${{\\s*secrets\\.(.*?)\\s*}}` // nolint: gosec\n\tExecutionResultChunkSize   = 50\n\tTestLocatorsDelimiter      = \"#TAS#\"\n\tExpiryDelta                = 15 * time.Minute\n\tNewTASVersion              = 2\n\tModulePath                 = \"MODULE_PATH\"\n\tPackageJSON                = \"package.json\"\n\tSubModuleName              = \"SUBMODULE_NAME\"\n\tArgPattern                 = \"--pattern\"\n\tArgConfig                  = \"--config\"\n\tArgDiff                    = \"--diff\"\n\tArgCommand                 = \"--command\"\n\tArgLocator                 = \"--locator-file\"\n\tArgFrameworVersion         = \"--frameworkVersion\"\n\tDefaultTASVersion          = \"1.0.0\"\n\tTASYmlConfigurationDocLink = \"https://www.lambdatest.com/support/docs/tas-configuring-tas-yml\"\n)\n\n// FrameworkRunnerMap is map of framework with there respective runner location\nvar FrameworkRunnerMap = map[string]string{\n\t\"jasmine\": \"./node_modules/.bin/jasmine-runner\",\n\t\"mocha\":   \"./node_modules/.bin/mocha-runner\",\n\t\"jest\":    \"./node_modules/.bin/jest-runner\",\n\t\"golang\":  \"/home/nucleus/server\",\n\t\"junit\":   \"java\",\n}\n\n// APIHostURLMap is map of git provider with there api url\nvar APIHostURLMap = map[string]string{\n\t\"github\":    \"https://api.github.com/repos\",\n\t\"gitlab\":    \"https://gitlab.com/api/v4/projects\",\n\t\"bitbucket\": \"https://api.bitbucket.org/2.0\",\n}\n\n// InstallRunnerCmds  are list of command used to install custom runner\nvar InstallRunnerCmds = []string{\"tar -xzf /custom-runners/custom-runners.tgz\"}\n\n// NeuronHost is neuron host end point\nvar NeuronHost string\n\n// SetNeuronHost is setter for NeuronHost\nfunc SetNeuronHost(host string) {\n\tNeuronHost = host\n}\n\nvar FrameworkLanguageMap = map[string]string{\n\t\"jasmine\": \"javascript\",\n\t\"mocha\":   \"javascript\",\n\t\"jest\":    \"javascript\",\n\t\"golang\":  \"golang\",\n\t\"junit\":   \"java\",\n}\n\n// ValidYMLVersions defines all valid yml version\nvar ValidYMLVersions = []int{1, 2}\n"
  },
  {
    "path": "pkg/global/synapseconstants.go",
    "content": "package global\n\nimport (\n\t\"time\"\n)\n\n// all constant related to synapse\nconst (\n\tGracefulTimeout       = 100 * time.Second\n\tProxyServerPort       = \"8000\"\n\tDirectoryPermissions  = 0755\n\tFilePermissions       = 0755\n\tVaultSecretDir        = \"/vault/secrets\"\n\tGitConfigFileName     = \"oauth\"\n\tRepoSecretsFileName   = \"reposecrets\"\n\tSynapseContainerURL   = \"http://synapse:8000\"\n\tNetworkEnvName        = \"NetworkName\"\n\tAutoRemoveEnv         = \"AutoRemove\"\n\tSynapseHostEnv        = \"synapsehost\"\n\tLocalEnv              = \"local\"\n\tNetworkName           = \"test-at-scale\"\n\tAutoRemove            = true\n\tLocal                 = true\n\tMaxConnectionAttempts = 10\n\tExecutionLogsPath     = \"/var/log/synapse\"\n\tPingWait              = 30 * time.Second\n\tMaxMessageSize        = 4096\n)\n\n// SocketURL lambdatest url for synapse socket\nvar SocketURL map[string]string\n\n// TASCloudURL url to send reports\nvar TASCloudURL map[string]string\n\nfunc init() {\n\tSocketURL = map[string]string{\n\t\t\"stage\": \"wss://stage-api-tas.lambdatestinternal.com/ws/\",\n\t\t\"dev\":   \"ws://host.docker.internal/ws/\",\n\t\t\"prod\":  \"wss://api.tas.lambdatest.com/ws/\",\n\t\t\"pi\":    \"wss://api.tas-pi.lambdatest.com/ws/\",\n\t}\n\tTASCloudURL = map[string]string{\n\t\t\"stage\": \"https://stage-api-tas.lambdatestinternal.com\",\n\t\t\"dev\":   \"http://host.docker.internal\",\n\t\t\"prod\":  \"https://api.tas.lambdatest.com\",\n\t\t\"pi\":    \"https://api.tas-pi.lambdatest.com\",\n\t}\n}\n"
  },
  {
    "path": "pkg/global/version.go",
    "content": "package global\n\nimport \"os\"\n\nvar (\n\t// NucleusBinaryVersion Nucleus version\n\tNucleusBinaryVersion = os.Getenv(\"VERSION\")\n\t// SynapseBinaryVersion Synapse version\n\tSynapseBinaryVersion = os.Getenv(\"VERSION\")\n)\n"
  },
  {
    "path": "pkg/listsubmoduleservice/setup.go",
    "content": "package listsubmoduleservice\n\nimport (\n\t\"context\"\n\t\"encoding/json\"\n\t\"net/http\"\n\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/global\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/utils\"\n)\n\ntype subModuleListService struct {\n\tlogger                lumber.Logger\n\trequests              core.Requests\n\tsubModuleListEndpoint string\n}\n\nfunc New(request core.Requests, logger lumber.Logger) core.ListSubModuleService {\n\treturn &subModuleListService{\n\t\tlogger:                logger,\n\t\trequests:              request,\n\t\tsubModuleListEndpoint: global.NeuronHost + \"/submodule-list\",\n\t}\n}\n\nfunc (s *subModuleListService) Send(ctx context.Context, buildID string, totalSubmodule int) error {\n\tsubModuleList := core.SubModuleList{\n\t\tBuildID:        buildID,\n\t\tTotalSubModule: totalSubmodule,\n\t}\n\treqBody, err := json.Marshal(&subModuleList)\n\tif err != nil {\n\t\ts.logger.Errorf(\"error while json marshal %v\", err)\n\t\treturn err\n\t}\n\tquery, headers := utils.GetDefaultQueryAndHeaders()\n\tif _, statusCode, err := s.requests.MakeAPIRequest(ctx, http.MethodPost, s.subModuleListEndpoint,\n\t\treqBody, query, headers); err != nil || statusCode != 200 {\n\t\ts.logger.Errorf(\"error while making submodule-list api call status code %d, err %v\", statusCode, err)\n\t\treturn err\n\t}\n\treturn nil\n}\n"
  },
  {
    "path": "pkg/logstream/mask.go",
    "content": "package logstream\n\nimport (\n\t\"io\"\n\t\"strings\"\n)\n\nconst (\n\tmaskedStr = \"****************\"\n)\n\n// masker wraps a stream writer with a masker\ntype masker struct {\n\tw io.Writer\n\tr *strings.Replacer\n}\n\n// NewMasker returns a masker that wraps io.Writer w.\nfunc NewMasker(w io.Writer, secretData map[string]string) io.Writer {\n\tvar oldnew []string\n\tfor _, secret := range secretData {\n\t\tif secret == \"\" {\n\t\t\tcontinue\n\t\t}\n\t\tfor _, part := range strings.Split(secret, \"\\n\") {\n\t\t\tpart = strings.TrimSpace(part)\n\t\t\t// avoid masking empty or single character strings.\n\t\t\tif len(part) < 2 {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\toldnew = append(oldnew, part, maskedStr)\n\t\t}\n\t}\n\tif len(oldnew) == 0 {\n\t\treturn w\n\t}\n\treturn &masker{\n\t\tw: w,\n\t\tr: strings.NewReplacer(oldnew...),\n\t}\n}\n\n// Write writes p to the base writer. The method scans for any\n// sensitive data in p and masks before writing.\nfunc (m *masker) Write(p []byte) (n int, err error) {\n\t_, err = m.w.Write([]byte(m.r.Replace(string(p))))\n\treturn len(p), err\n}\n"
  },
  {
    "path": "pkg/logstream/mask_test.go",
    "content": "package logstream\n\nimport (\n\t\"bytes\"\n\t\"testing\"\n)\n\nconst keyLine = `{\n  \"token\":\"dXNlcm5hbWU6cGFzc3dvcmQ=\"\n}`\n\nfunc TestReplace(t *testing.T) {\n\tsecrets := map[string]string{\n\t\t\"cipher\":  \"lazy dog\",\n\t\t\"cipher2\": \"\",\n\t}\n\tbuf := &bytes.Buffer{}\n\tw := NewMasker(buf, secrets)\n\tw.Write([]byte(\"The quick brown fox jumps over the lazy dog\")) // nolint:errcheck\n\n\tif got, want := buf.String(), \"The quick brown fox jumps over the ****************\"; got != want {\n\t\tt.Errorf(\"Want masked string %s, got %s\", want, got)\n\t}\n}\n\nfunc TestReplaceMultiline(t *testing.T) {\n\tkey := `\n-----BEGIN RSA PRIVATE KEY-----\nMIICXAIBAAKBgQCqGKukO1De7zhZj6+H0qtjTkVxwTCpvKe4eCZ0FPqri0cb2JZfXJ/DgYSF6vUp\nwmJG8wVQZKjeGcjDOL5UlsuusFncCzWBQ7RKNUSesmQRMSGkVb1/3j+skZ6UtW+5u09lHNsj6tQ5\n1s1SPrCBkedbNf0Tp0GbMJDyR4e9T04ZZwIDAQABAoGAFijko56+qGyN8M0RVyaRAXz++xTqHBLh\n3tx4VgMtrQ+WEgCjhoTwo23KMBAuJGSYnRmoBZM3lMfTKevIkAidPExvYCdm5dYq3XToLkkLv5L2\npIIVOFMDG+KESnAFV7l2c+cnzRMW0+b6f8mR1CJzZuxVLL6Q02fvLi55/mbSYxECQQDeAw6fiIQX\nGukBI4eMZZt4nscy2o12KyYner3VpoeE+Np2q+Z3pvAMd/aNzQ/W9WaI+NRfcxUJrmfPwIGm63il\nAkEAxCL5HQb2bQr4ByorcMWm/hEP2MZzROV73yF41hPsRC9m66KrheO9HPTJuo3/9s5p+sqGxOlF\nL0NDt4SkosjgGwJAFklyR1uZ/wPJjj611cdBcztlPdqoxssQGnh85BzCj/u3WqBpE2vjvyyvyI5k\nX6zk7S0ljKtt2jny2+00VsBerQJBAJGC1Mg5Oydo5NwD6BiROrPxGo2bpTbu/fhrT8ebHkTz2epl\nU9VQQSQzY1oZMVX8i1m5WUTLPz2yLJIBQVdXqhMCQBGoiuSoSjafUhV7i1cEGpb88h5NBYZzWXGZ\n37sJ5QsW+sJyoNde3xH8vdXhzU7eT82D6X/scw9RZz+/6rCJ4p0=\n-----END RSA PRIVATE KEY-----`\n\n\tline := `> MIICXAIBAAKBgQCqGKukO1De7zhZj6+H0qtjTkVxwTCpvKe4eCZ0FPqri0cb2JZfXJ/DgYSF6vUp`\n\n\tsecrets := map[string]string{\n\t\t\"cipher\": key,\n\t}\n\tbuf := &bytes.Buffer{}\n\tw := NewMasker(buf, secrets)\n\tw.Write([]byte(line)) // nolint:errcheck\n\n\tif got, want := buf.String(), \"> ****************\"; got != want {\n\t\tt.Errorf(\"Want masked string %s, got %s\", want, got)\n\t}\n}\n\nfunc TestSkipSingleCharacterMask(t *testing.T) {\n\tsecrets := map[string]string{\n\t\t\"cipher\": \"l\",\n\t}\n\tbuf := &bytes.Buffer{}\n\tw := NewMasker(buf, secrets)\n\tw.Write([]byte(\"The quick brown fox jumps over the lazy dog\")) // nolint:errcheck\n\n\tif got, want := buf.String(), \"The quick brown fox jumps over the lazy dog\"; got != want {\n\t\tt.Errorf(\"Want masked string %s, got %s\", want, got)\n\t}\n}\n\nfunc TestReplaceMultilineJson(t *testing.T) {\n\tkey := keyLine\n\n\tline := keyLine\n\n\tsecrets := map[string]string{\n\t\t\"cipher\": key,\n\t}\n\tbuf := &bytes.Buffer{}\n\tw := NewMasker(buf, secrets)\n\tw.Write([]byte(line)) // nolint:errcheck\n\n\tif got, want := buf.String(), \"{\\n  ****************\\n}\"; got != want {\n\t\tt.Errorf(\"Want masked string %s, got %s\", want, got)\n\t}\n}\n"
  },
  {
    "path": "pkg/logwriter/setup.go",
    "content": "package logwriter\n\nimport (\n\t\"bytes\"\n\t\"context\"\n\t\"fmt\"\n\t\"io\"\n\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n)\n\ntype (\n\tBufferLogWriter struct {\n\t\tsubModule string\n\t\tbuffer    *bytes.Buffer\n\t\tlogger    lumber.Logger\n\t}\n\n\tAzureLogWriter struct {\n\t\tazureClient core.AzureClient\n\t\tpurpose     core.SASURLPurpose\n\t\tlogger      lumber.Logger\n\t}\n)\n\nfunc NewAzureLogWriter(azureClient core.AzureClient,\n\tpurpose core.SASURLPurpose,\n\tlogger lumber.Logger) core.LogWriterStrategy {\n\treturn &AzureLogWriter{\n\t\tazureClient: azureClient,\n\t\tpurpose:     purpose,\n\t\tlogger:      logger,\n\t}\n}\n\nfunc NewBufferLogWriter(subModule string,\n\tbuffer *bytes.Buffer,\n\tlogger lumber.Logger) core.LogWriterStrategy {\n\treturn &BufferLogWriter{\n\t\tsubModule: subModule,\n\t\tbuffer:    buffer,\n\t\tlogger:    logger,\n\t}\n}\n\nfunc (b *BufferLogWriter) Write(ctx context.Context, reader io.Reader) <-chan error {\n\terrChan := make(chan error, 1)\n\tgo func() {\n\t\tif _, err := fmt.Fprintf(b.buffer, \"\\n<------ PRE RUN for %s  ------> \\n\", b.subModule); err != nil {\n\t\t\tb.logger.Debugf(\"Error writing the logs separator for submodule %s, error %v\", b.subModule, err)\n\t\t\terrChan <- err\n\t\t\treturn\n\t\t}\n\t\tif _, err := b.buffer.ReadFrom(reader); err != nil {\n\t\t\tb.logger.Debugf(\"Error writing the logs to buffer for submodule %s, error %v\", b.subModule, err)\n\t\t\terrChan <- err\n\t\t\treturn\n\t\t}\n\t\tclose(errChan)\n\t\tb.logger.Debugf(\"written logs for sub module %s to buffer\", b.subModule)\n\t}()\n\treturn errChan\n}\n\nfunc (a *AzureLogWriter) Write(ctx context.Context, reader io.Reader) <-chan error {\n\terrChan := make(chan error, 1)\n\tgo func() {\n\t\tsasURL, err := a.azureClient.GetSASURL(ctx, a.purpose, nil)\n\t\tif err != nil {\n\t\t\ta.logger.Errorf(\"failed to genereate SAS URL for purpose %s, error: %v\", a.purpose, err)\n\t\t\terrChan <- err\n\t\t\treturn\n\t\t}\n\t\tblobPath, err := a.azureClient.CreateUsingSASURL(ctx, sasURL, reader, \"text/plain\")\n\t\tif err != nil {\n\t\t\ta.logger.Errorf(\"failed to create SAS URL for path %s, error: %v\", blobPath, err)\n\t\t\terrChan <- err\n\t\t\treturn\n\t\t}\n\t\tclose(errChan)\n\t\ta.logger.Debugf(\"created blob path %s\", blobPath)\n\t}()\n\treturn errChan\n}\n"
  },
  {
    "path": "pkg/logwriter/setup_test.go",
    "content": "package logwriter\n\nimport (\n\t\"context\"\n\t\"io\"\n\t\"strings\"\n\t\"testing\"\n\n\t\"github.com/LambdaTest/test-at-scale/mocks\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/errs\"\n\t\"github.com/LambdaTest/test-at-scale/testutils\"\n\t\"github.com/stretchr/testify/mock\"\n)\n\nfunc Test_azure_write_logger_strategy(t *testing.T) {\n\tlogger, err := testutils.GetLogger()\n\tif err != nil {\n\t\tt.Errorf(\"Couldn't initialize logger, error: %v\", err)\n\t}\n\tazureClientGetSASURL := new(mocks.AzureClient)\n\tmockUtil(azureClientGetSASURL, \"getSASURL\", \"createUsingSASURL\", \"error in GetSASURL\", \"error in CreateUsingSASURL\", true, false)\n\n\tazureClientCreateSASURL := new(mocks.AzureClient)\n\tmockUtil(azureClientCreateSASURL, \"getSASURL\", \"createUsingSASURL\", \"error in GetSASURL\", \"error in CreateUsingSASURL\", false, true)\n\n\tazureClientSuccess := new(mocks.AzureClient)\n\tmockUtil(azureClientSuccess, \"getSASURL\", \"createUsingSASURL\", \"error in GetSASURL\", \"error in CreateUsingSASURL\", false, false)\n\n\terrGetSASURL := make(chan error, 1)\n\tdefer func() { close(errGetSASURL) }()\n\n\terrCreateUsingSASURL := make(chan error, 1)\n\tdefer func() { close(errCreateUsingSASURL) }()\n\n\terrSuccess := make(chan error, 1)\n\tdefer func() { close(errSuccess) }()\n\n\ttype fields struct {\n\t\tazureClient core.AzureClient\n\t}\n\ttype args struct {\n\t\tctx      context.Context\n\t\tblobPath string\n\t\treader   io.Reader\n\t}\n\ttests := []struct {\n\t\tname    string\n\t\tfields  fields\n\t\targs    args\n\t\twant    <-chan error\n\t\twantErr bool\n\t}{\n\t\t{\"Test StoreCommandLogs for getSASURL error\",\n\t\t\tfields{\n\t\t\t\tazureClient: azureClientGetSASURL,\n\t\t\t},\n\t\t\targs{\n\t\t\t\tctx:      context.TODO(),\n\t\t\t\tblobPath: \"blobpath\",\n\t\t\t\treader:   &strings.Reader{},\n\t\t\t},\n\t\t\terrGetSASURL,\n\t\t\ttrue,\n\t\t},\n\t\t{\"Test StoreCommandLogs for CreateUsingSASURL error\",\n\t\t\tfields{\n\t\t\t\tazureClient: azureClientCreateSASURL,\n\t\t\t},\n\t\t\targs{\n\t\t\t\tctx:      context.TODO(),\n\t\t\t\tblobPath: \"blobpath\",\n\t\t\t\treader:   &strings.Reader{},\n\t\t\t},\n\t\t\terrCreateUsingSASURL,\n\t\t\ttrue,\n\t\t},\n\t\t{\"Test StoreCommandLogs for success\",\n\t\t\tfields{\n\t\t\t\tazureClient: azureClientSuccess,\n\t\t\t},\n\t\t\targs{\n\t\t\t\tctx:      context.TODO(),\n\t\t\t\tblobPath: \"blobpath\",\n\t\t\t\treader:   &strings.Reader{},\n\t\t\t},\n\t\t\terrSuccess,\n\t\t\tfalse,\n\t\t},\n\t}\n\terrGetSASURL <- errs.New(\"error in GetSASURL\")\n\terrCreateUsingSASURL <- errs.New(\"error in CreateUsingSASURL\")\n\terrSuccess <- errs.New(\"\")\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\tm := &AzureLogWriter{\n\t\t\t\tlogger:      logger,\n\t\t\t\tpurpose:     core.PurposeCache,\n\t\t\t\tazureClient: tt.fields.azureClient,\n\t\t\t}\n\t\t\tgot := m.Write(tt.args.ctx, tt.args.reader)\n\t\t\tif !tt.wantErr {\n\t\t\t\tif len(got) != 0 {\n\t\t\t\t\tt.Errorf(\"Expected channel to be empty, received: %v\", <-got)\n\t\t\t\t}\n\t\t\t\treturn\n\t\t\t}\n\t\t\treceived := <-got\n\t\t\twant := <-tt.want\n\t\t\tif received.Error() != want.Error() {\n\t\t\t\tt.Errorf(\"manager.StoreCommandLogs() = %+v, want %+v\", received, want)\n\t\t\t}\n\t\t})\n\t}\n}\n\nfunc mockUtil(azureClient *mocks.AzureClient, msgGet, msgCreate, errGet, errCreate string, wantErrGet, wantErrCreate bool) {\n\tvar x map[string]interface{}\n\tazureClient.On(\"GetSASURL\", mock.AnythingOfType(\"*context.emptyCtx\"),\n\t\tmock.AnythingOfType(\"core.SASURLPurpose\"), x).Return(\n\t\tfunc(ctx context.Context, purpose core.SASURLPurpose, data map[string]interface{}) string {\n\t\t\treturn msgGet\n\t\t},\n\t\tfunc(ctx context.Context, purpose core.SASURLPurpose, data map[string]interface{}) error {\n\t\t\tif !wantErrGet {\n\t\t\t\treturn nil\n\t\t\t}\n\t\t\treturn errs.New(errGet)\n\t\t})\n\n\tazureClient.On(\"CreateUsingSASURL\", mock.AnythingOfType(\"*context.emptyCtx\"),\n\t\tmock.AnythingOfType(\"string\"), mock.AnythingOfType(\"*strings.Reader\"), \"text/plain\").Return(\n\t\tfunc(ctx context.Context, sasURL string, reader io.Reader, mimeType string) string {\n\t\t\treturn msgCreate\n\t\t},\n\t\tfunc(ctx context.Context, sasURL string, reader io.Reader, mimeType string) error {\n\t\t\tif !wantErrCreate {\n\t\t\t\treturn nil\n\t\t\t}\n\t\t\treturn errs.New(errCreate)\n\t\t})\n}\n"
  },
  {
    "path": "pkg/lumber/logio.go",
    "content": "package lumber\n\nimport (\n\t\"bytes\"\n)\n\n// Writer must be closed when finished to flush buffered data to the logger.\ntype Writer struct {\n\t// Log specifies the logger to which the Writer will write messages.\n\t// The Writer will panic if Log is unspecified.\n\tLog  Logger\n\tbuff bytes.Buffer\n}\n\n// NewWriter returns a new Writer that writes to the provided Logger.\nfunc NewWriter(log Logger) *Writer {\n\treturn &Writer{Log: log}\n}\n\n// Write writes the provided bytes to the underlying logger at the configured\n// log level and returns the length of the bytes.\n//\n// Write will split the input on newlines and post each line as a new log entry\n// to the logger.\nfunc (w *Writer) Write(bs []byte) (n int, err error) {\n\tn = len(bs)\n\tfor len(bs) > 0 {\n\t\tbs = w.writeLine(bs)\n\t}\n\treturn n, nil\n}\n\n// writeLine writes a single line from the input, returning the remaining,\n// unconsumed bytes.\nfunc (w *Writer) writeLine(line []byte) (remaining []byte) {\n\tidx := bytes.IndexByte(line, '\\n')\n\tif idx < 0 {\n\t\t// If there are no newlines, buffer the entire string.\n\t\tw.buff.Write(line)\n\t\treturn nil\n\t}\n\n\t// Split on the newline, buffer and flush the left.\n\tline, remaining = line[:idx], line[idx+1:]\n\n\t// Fast path: if we don't have a partial message from a previous write\n\t// in the buffer, skip the buffer and log directly.\n\tif w.buff.Len() == 0 {\n\t\tw.log(line)\n\t\treturn\n\t}\n\n\tw.buff.Write(line)\n\n\t// Log empty messages in the middle of the stream so that we don't lose\n\t// information when the user writes \"foo\\n\\nbar\".\n\tw.flush(true /* allowEmpty */)\n\n\treturn remaining\n}\n\n// Close closes the writer, flushing any buffered data in the process.\nfunc (w *Writer) Close() error {\n\treturn w.Sync()\n}\n\n// Sync flushes buffered data to the logger as a new log entry even if it\n// doesn't contain a newline.\nfunc (w *Writer) Sync() error {\n\t// Don't allow empty messages on explicit Sync calls or on Close\n\t// because we don't want an extraneous empty message at the end of the\n\t// stream -- it's common for files to end with a newline.\n\tw.flush(false)\n\treturn nil\n}\n\n// flush flushes the buffered data to the logger, allowing empty messages only\n// if the bool is set.\nfunc (w *Writer) flush(allowEmpty bool) {\n\tif allowEmpty || w.buff.Len() > 0 {\n\t\tw.log(w.buff.Bytes())\n\t}\n\tw.buff.Reset()\n}\n\nfunc (w *Writer) log(b []byte) {\n\tw.Log.Debugf(\"%s\", string(b))\n}\n"
  },
  {
    "path": "pkg/lumber/logrus.go",
    "content": "package lumber\n\nimport (\n\t\"io\"\n\t\"os\"\n\n\t\"github.com/sirupsen/logrus\"\n\tlumberjack \"gopkg.in/natefinch/lumberjack.v2\"\n)\n\ntype logrusLogEntry struct {\n\tentry *logrus.Entry\n}\n\ntype logrusLogger struct {\n\tlogger *logrus.Logger\n}\n\nfunc getFormatter(isJSON bool) logrus.Formatter {\n\tif isJSON {\n\t\treturn &logrus.JSONFormatter{}\n\t}\n\treturn &logrus.TextFormatter{\n\t\tFullTimestamp:          true,\n\t\tDisableLevelTruncation: true,\n\t}\n}\n\nfunc newLogrusLogger(config LoggingConfig, verbose bool) (Logger, error) {\n\tlogLevel := config.ConsoleLevel\n\tif logLevel == \"\" {\n\t\tlogLevel = config.FileLevel\n\t}\n\t// command line args take highest precedence\n\tif verbose {\n\t\tlogLevel = \"debug\"\n\t}\n\n\tlevel, err := logrus.ParseLevel(logLevel)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tstdOutHandler := os.Stdout\n\tfileHandler := &lumberjack.Logger{\n\t\tFilename: config.FileLocation,\n\t\tMaxSize:  100,\n\t\tCompress: true,\n\t\tMaxAge:   28,\n\t}\n\tlLogger := &logrus.Logger{\n\t\tOut:       stdOutHandler,\n\t\tFormatter: getFormatter(config.ConsoleJSONFormat),\n\t\tHooks:     make(logrus.LevelHooks),\n\t\tLevel:     level,\n\t}\n\n\tmultiWriter := make([]io.Writer, 0)\n\tif config.EnableConsole {\n\t\tmultiWriter = append(multiWriter, stdOutHandler)\n\t}\n\tif config.EnableFile {\n\t\tmultiWriter = append(multiWriter, fileHandler)\n\t\tlLogger.SetFormatter(getFormatter(config.FileJSONFormat))\n\t}\n\n\tlLogger.SetOutput(io.MultiWriter(multiWriter...))\n\treturn &logrusLogger{\n\t\tlogger: lLogger,\n\t}, nil\n}\n\nfunc (l *logrusLogger) Debugf(format string, args ...interface{}) {\n\tl.logger.Debugf(format, args...)\n}\n\nfunc (l *logrusLogger) Infof(format string, args ...interface{}) {\n\tl.logger.Infof(format, args...)\n}\n\nfunc (l *logrusLogger) Warnf(format string, args ...interface{}) {\n\tl.logger.Warnf(format, args...)\n}\n\nfunc (l *logrusLogger) Errorf(format string, args ...interface{}) {\n\tl.logger.Errorf(format, args...)\n}\n\nfunc (l *logrusLogger) Fatalf(format string, args ...interface{}) {\n\tl.logger.Fatalf(format, args...)\n}\n\nfunc (l *logrusLogger) Panicf(format string, args ...interface{}) {\n\tl.logger.Fatalf(format, args...)\n}\n\nfunc (l *logrusLogger) WithFields(fields Fields) Logger {\n\treturn &logrusLogEntry{\n\t\tentry: l.logger.WithFields(convertToLogrusFields(fields)),\n\t}\n}\n\nfunc (l *logrusLogEntry) Debugf(format string, args ...interface{}) {\n\tl.entry.Debugf(format, args...)\n}\n\nfunc (l *logrusLogEntry) Infof(format string, args ...interface{}) {\n\tl.entry.Infof(format, args...)\n}\n\nfunc (l *logrusLogEntry) Warnf(format string, args ...interface{}) {\n\tl.entry.Warnf(format, args...)\n}\n\nfunc (l *logrusLogEntry) Errorf(format string, args ...interface{}) {\n\tl.entry.Errorf(format, args...)\n}\n\nfunc (l *logrusLogEntry) Fatalf(format string, args ...interface{}) {\n\tl.entry.Fatalf(format, args...)\n}\n\nfunc (l *logrusLogEntry) Panicf(format string, args ...interface{}) {\n\tl.entry.Panicf(format, args...)\n}\n\nfunc (l *logrusLogEntry) WithFields(fields Fields) Logger {\n\treturn &logrusLogEntry{\n\t\tentry: l.entry.WithFields(convertToLogrusFields(fields)),\n\t}\n}\n\nfunc convertToLogrusFields(fields Fields) logrus.Fields {\n\tlogrusFields := logrus.Fields{}\n\tfor index, val := range fields {\n\t\tlogrusFields[index] = val\n\t}\n\treturn logrusFields\n}\n"
  },
  {
    "path": "pkg/lumber/setup.go",
    "content": "// Logging package for tunnel server\n\npackage lumber\n\nimport \"github.com/LambdaTest/test-at-scale/pkg/errs\"\n\n// LoggingConfig stores the config for the logger\n// For some loggers there can only be one level across writers, for such the level of Console is picked by default\ntype LoggingConfig struct {\n\tEnableConsole     bool\n\tConsoleJSONFormat bool\n\tConsoleLevel      string\n\tEnableFile        bool\n\tFileJSONFormat    bool\n\tFileLevel         string\n\tFileLocation      string\n}\n\n// Fields Type to pass when we want to call WithFields for structured logging\ntype Fields map[string]interface{}\n\nconst (\n\t// Debug has verbose message\n\tDebug = \"debug\"\n\t// Info is default log level\n\tInfo = \"info\"\n\t// Warn is for logging messages about possible issues\n\tWarn = \"warn\"\n\t// Error is for logging errors\n\tError = \"error\"\n\t// Fatal is for logging fatal messages. The system shutsdown after logging the message.\n\tFatal = \"fatal\"\n)\n\n// List of supported loggers.\nconst (\n\tInstanceZapLogger int = iota\n\tInstanceLogrusLogger\n)\n\n// Logger is our contract for the logger\ntype Logger interface {\n\t// Debugf logs a message at level Debug on the standard logger.\n\tDebugf(format string, args ...interface{})\n\t// Infof logs a message at level Info on the standard logger.\n\tInfof(format string, args ...interface{})\n\t// Warnf logs a message at level Warn on the standard logger.\n\tWarnf(format string, args ...interface{})\n\t// Errorf logs a message at level Error on the standard logger.\n\tErrorf(format string, args ...interface{})\n\t// Fatalf logs a message at level Fatal on the standard logger then the process will exit with status set to 1.\n\tFatalf(format string, args ...interface{})\n\t// Panicf logs a message at level Panic on the standard logger.\n\tPanicf(format string, args ...interface{})\n\t// WithField creates an entry from the standard logger and adds a field to\n\t// it. If you want multiple fields, use `WithFields`\n\t// Note that it doesn't log until you call Debug, Print, Info, Warn, Fatal\n\t// or Panic on the Entry it returns.\n\tWithFields(keyValues Fields) Logger\n}\n\n// NewLogger returns an instance of logger\nfunc NewLogger(config LoggingConfig, verbose bool, loggerInstance int) (Logger, error) {\n\tswitch loggerInstance {\n\tcase InstanceZapLogger:\n\t\tlogger := newZapLogger(config, verbose)\n\t\treturn logger, nil\n\n\tcase InstanceLogrusLogger:\n\t\tlogger, err := newLogrusLogger(config, verbose)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn logger, nil\n\n\tdefault:\n\t\treturn nil, errs.ErrInvalidLoggerInstance\n\t}\n}\n"
  },
  {
    "path": "pkg/lumber/zap.go",
    "content": "package lumber\n\nimport (\n\t\"os\"\n\n\t\"go.uber.org/zap\"\n\t\"go.uber.org/zap/zapcore\"\n\tlumberjack \"gopkg.in/natefinch/lumberjack.v2\"\n)\n\ntype zapLogger struct {\n\tsugaredLogger *zap.SugaredLogger\n}\n\nconst callDepth = 2\n\nfunc getEncoder(isJSON bool) zapcore.Encoder {\n\tencoderConfig := zap.NewProductionEncoderConfig()\n\tencoderConfig.EncodeTime = zapcore.ISO8601TimeEncoder\n\tencoderConfig.TimeKey = \"time\" // This will change the key from ts to time\n\tif isJSON {\n\t\treturn zapcore.NewJSONEncoder(encoderConfig)\n\t}\n\treturn zapcore.NewConsoleEncoder(encoderConfig)\n}\n\nfunc getZapLevel(level string) zapcore.Level {\n\tswitch level {\n\tcase Info:\n\t\treturn zapcore.InfoLevel\n\tcase Warn:\n\t\treturn zapcore.WarnLevel\n\tcase Debug:\n\t\treturn zapcore.DebugLevel\n\tcase Error:\n\t\treturn zapcore.ErrorLevel\n\tcase Fatal:\n\t\treturn zapcore.FatalLevel\n\tdefault:\n\t\treturn zapcore.InfoLevel\n\t}\n}\n\nfunc newZapLogger(config LoggingConfig, verbose bool) Logger {\n\tcores := []zapcore.Core{}\n\tif config.EnableConsole {\n\t\tlevel := getZapLevel(config.ConsoleLevel)\n\t\t// command line args take highest precedence\n\t\tif verbose {\n\t\t\tlevel = getZapLevel(\"debug\")\n\t\t}\n\t\twriter := zapcore.Lock(os.Stdout)\n\t\tcore := zapcore.NewCore(getEncoder(config.ConsoleJSONFormat), writer, level)\n\t\tcores = append(cores, core)\n\t}\n\n\tif config.EnableFile {\n\t\tlevel := getZapLevel(config.FileLevel)\n\t\twriter := zapcore.AddSync(&lumberjack.Logger{\n\t\t\tFilename: config.FileLocation,\n\t\t\tMaxSize:  100,\n\t\t\tCompress: true,\n\t\t\tMaxAge:   28,\n\t\t})\n\t\tcore := zapcore.NewCore(getEncoder(config.FileJSONFormat), writer, level)\n\t\tcores = append(cores, core)\n\t}\n\n\tcombinedCore := zapcore.NewTee(cores...)\n\n\t// AddCallerSkip skips 2 number of callers, this is important else the file that gets\n\t// logged will always be the wrapped file. In our case zap.go\n\tlogger := zap.New(combinedCore,\n\t\tzap.AddCallerSkip(callDepth),\n\t\tzap.AddCaller(),\n\t).Sugar()\n\n\treturn &zapLogger{\n\t\tsugaredLogger: logger,\n\t}\n}\n\nfunc (l *zapLogger) Debugf(format string, args ...interface{}) {\n\tl.sugaredLogger.Debugf(format, args...)\n}\n\nfunc (l *zapLogger) Infof(format string, args ...interface{}) {\n\tl.sugaredLogger.Infof(format, args...)\n}\n\nfunc (l *zapLogger) Warnf(format string, args ...interface{}) {\n\tl.sugaredLogger.Warnf(format, args...)\n}\n\nfunc (l *zapLogger) Errorf(format string, args ...interface{}) {\n\tl.sugaredLogger.Errorf(format, args...)\n}\n\nfunc (l *zapLogger) Fatalf(format string, args ...interface{}) {\n\tl.sugaredLogger.Fatalf(format, args...)\n}\n\nfunc (l *zapLogger) Panicf(format string, args ...interface{}) {\n\tl.sugaredLogger.Panicf(format, args...)\n}\n\nfunc (l *zapLogger) WithFields(fields Fields) Logger {\n\tvar f = make([]interface{}, 0, len(fields))\n\tfor k, v := range fields {\n\t\tf = append(f, k, v)\n\t}\n\tnewLogger := l.sugaredLogger.With(f...)\n\treturn &zapLogger{newLogger}\n}\n"
  },
  {
    "path": "pkg/payloadmanager/setup.go",
    "content": "// Package payloadmanager is used for fetching and validating the nucleus execution payload\npackage payloadmanager\n\nimport (\n\t\"context\"\n\t\"encoding/json\"\n\t\"net/http\"\n\n\t\"github.com/LambdaTest/test-at-scale/config\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/errs\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n)\n\n// PayloadManager represents the payload for nucleus\ntype payloadManager struct {\n\tlogger      lumber.Logger\n\tazureClient core.AzureClient\n\tcfg         *config.NucleusConfig\n\trequests    core.Requests\n}\n\n// NewPayloadManger creates and returns a new PayloadManager instance\nfunc NewPayloadManger(azureClient core.AzureClient,\n\tlogger lumber.Logger, cfg *config.NucleusConfig, requests core.Requests) core.PayloadManager {\n\treturn &payloadManager{\n\t\tazureClient: azureClient,\n\t\tlogger:      logger,\n\t\tcfg:         cfg,\n\t\trequests:    requests,\n\t}\n}\n\nfunc (pm *payloadManager) FetchPayload(ctx context.Context, payloadAddress string) (*core.Payload, error) {\n\trawBytes, _, err := pm.requests.MakeAPIRequest(ctx, http.MethodGet, payloadAddress, nil, nil, nil)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tp := new(core.Payload)\n\tif err := json.Unmarshal(rawBytes, p); err != nil {\n\t\treturn nil, err\n\t}\n\treturn p, nil\n}\n\nfunc (pm *payloadManager) ValidatePayload(ctx context.Context, payload *core.Payload) error {\n\tif payload.RepoLink == \"\" {\n\t\treturn errs.ErrInvalidPayload(\"Missing repo link\")\n\t}\n\n\tif payload.RepoSlug == \"\" {\n\t\treturn errs.ErrInvalidPayload(\"Missing repo slug\")\n\t}\n\n\tif payload.GitProvider == \"\" {\n\t\treturn errs.ErrInvalidPayload(\"Missing git provider\")\n\t}\n\n\tif payload.BuildID == \"\" {\n\t\treturn errs.ErrInvalidPayload(\"Missing BuildID\")\n\t}\n\tif payload.RepoID == \"\" {\n\t\treturn errs.ErrInvalidPayload(\"Missing RepoID\")\n\t}\n\n\tif payload.BranchName == \"\" {\n\t\treturn errs.ErrInvalidPayload(\"Missing Branch Name\")\n\t}\n\n\tif payload.OrgID == \"\" {\n\t\treturn errs.ErrInvalidPayload(\"Missing OrgID\")\n\t}\n\n\tif payload.TasFileName == \"\" {\n\t\treturn errs.ErrInvalidPayload(\"Missing tas yml filename\")\n\t}\n\n\tif pm.cfg.Locators != \"\" {\n\t\tpayload.Locators = pm.cfg.Locators\n\t}\n\n\tif pm.cfg.LocatorAddress != \"\" {\n\t\tpayload.LocatorAddress = pm.cfg.LocatorAddress\n\t}\n\tif payload.BuildTargetCommit == \"\" {\n\t\treturn errs.ErrInvalidPayload(\"Missing build target commit\")\n\t}\n\t// some checks are removed in case of coverage mode or parsing mode\n\tif !pm.cfg.CoverageMode {\n\t\tif pm.cfg.TaskID == \"\" {\n\t\t\treturn errs.ErrInvalidPayload(\"Missing taskID in config\")\n\t\t}\n\t\tpayload.TaskID = pm.cfg.TaskID\n\t}\n\n\tif payload.EventType != core.EventPush && payload.EventType != core.EventPullRequest {\n\t\treturn errs.ErrInvalidPayload(\"Invalid event type\")\n\t}\n\n\tif payload.EventType == core.EventPush && len(payload.Commits) == 0 {\n\t\treturn errs.ErrInvalidPayload(\"Missing commits error\")\n\t}\n\n\treturn nil\n}\n"
  },
  {
    "path": "pkg/payloadmanager/setup_test.go",
    "content": "// Package payloadmanager is used for fetching and validating the nucleus execution payload\npackage payloadmanager\n\nimport (\n\t\"context\"\n\t\"encoding/json\"\n\t\"fmt\"\n\t\"net/http\"\n\t\"net/http/httptest\"\n\t\"os\"\n\t\"testing\"\n\n\t\"github.com/LambdaTest/test-at-scale/config\"\n\t\"github.com/LambdaTest/test-at-scale/mocks\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/global\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/requestutils\"\n\t\"github.com/LambdaTest/test-at-scale/testutils\"\n\t\"github.com/cenkalti/backoff/v4\"\n)\n\ntype validatePayloadArgs struct {\n\tctx            context.Context\n\tpayload        *core.Payload\n\tcoverageMode   bool\n\tlocators       string\n\tlocatorAddress string\n\ttaskID         string\n}\ntype testCaseValidatePayload struct {\n\tname    string\n\targs    validatePayloadArgs\n\twantErr bool\n}\n\n// nolint: unparam\nfunc getPayloadManagerArgs() (core.AzureClient, lumber.Logger, *config.NucleusConfig, error) {\n\tlogger, err := testutils.GetLogger()\n\tif err != nil {\n\t\treturn nil, nil, nil, err\n\t}\n\n\tcfg, err := testutils.GetConfig()\n\tif err != nil {\n\t\treturn nil, nil, nil, err\n\t}\n\tvar azureClient core.AzureClient\n\treturn azureClient, logger, cfg, nil\n}\n\nfunc Test_payloadManager_FetchPayload(t *testing.T) {\n\tserver := httptest.NewServer( // mock server\n\t\thttp.FileServer(http.Dir(\"../../testutils/testdata\")), // mock data stored at testutils/testdata/index.txt\n\t)\n\tdefer server.Close()\n\n\tlogger, err := testutils.GetLogger()\n\tif err != nil {\n\t\tt.Errorf(\"Couldn't get logger, received: %s\", err)\n\t}\n\n\tcfg, err := testutils.GetConfig()\n\tif err != nil {\n\t\tt.Errorf(\"Couldn't get config, received: %s\", err)\n\t}\n\n\tazureClient := new(mocks.AzureClient)\n\n\twantResp, err := os.ReadFile(\"../../testutils/testdata/index.json\")\n\tif err != nil {\n\t\tfmt.Printf(\"error in reading file: %+v\\n\", err)\n\t}\n\n\trequests := requestutils.New(logger, global.DefaultAPITimeout, &backoff.StopBackOff{})\n\tpm := NewPayloadManger(azureClient, logger, cfg, requests)\n\n\ttype args struct {\n\t\tctx            context.Context\n\t\tpayloadAddress string\n\t}\n\ttests := []struct {\n\t\tname    string\n\t\targs    args\n\t\twant    string\n\t\twantErr bool\n\t}{\n\t\t{\n\t\t\t\"Test Payload fetch for success\",\n\t\t\targs{ctx: context.TODO(), payloadAddress: server.URL + \"/index.txt\"},\n\t\t\tstring(wantResp),\n\t\t\tfalse,\n\t\t},\n\t\t{\n\t\t\t\"Test Payload fetch for empty url\",\n\t\t\targs{ctx: context.TODO(), payloadAddress: \"\"},\n\t\t\t\"<nil>\",\n\t\t\ttrue,\n\t\t},\n\t}\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\tgot, err := pm.FetchPayload(tt.args.ctx, tt.args.payloadAddress)\n\t\t\tif tt.wantErr {\n\t\t\t\tif err == nil {\n\t\t\t\t\tt.Errorf(\"payloadManager.FetchPayload() error = %v, wantErr %v\", err, tt.wantErr)\n\t\t\t\t}\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\treceived, _ := json.Marshal(got)\n\t\t\treceivedPayload := fmt.Sprintf(\"%+v\\n\", string(received))\n\t\t\tif receivedPayload != tt.want {\n\t\t\t\tt.Errorf(\"payloadManager.FetchPayload() = \\n%v, \\nwant: %v\\n\", receivedPayload, tt.want)\n\t\t\t}\n\t\t})\n\t}\n}\n\nfunc Test_payloadManager_ValidatePayload(t *testing.T) {\n\tazureClient, logger, cfg, err := getPayloadManagerArgs()\n\tif err != nil {\n\t\tt.Errorf(\"Couldn't establish required arguments, error: %v\", err)\n\t\treturn\n\t}\n\n\ttests := getValidatePayloadTestCases()\n\tfor _, tt := range tests {\n\t\tcfg.CoverageMode = tt.args.coverageMode\n\t\tcfg.LocatorAddress = tt.args.locatorAddress\n\t\tcfg.Locators = tt.args.locators\n\t\tcfg.TaskID = tt.args.taskID\n\n\t\trequests := requestutils.New(logger, global.DefaultAPITimeout, &backoff.StopBackOff{})\n\t\tpm := NewPayloadManger(azureClient, logger, cfg, requests)\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\tif err := pm.ValidatePayload(tt.args.ctx, tt.args.payload); (err != nil) != tt.wantErr {\n\t\t\t\tt.Errorf(\"payloadManager.ValidatePayload() error = %v, wantErr %v\", err, tt.wantErr)\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\tif cfg.Locators != \"\" {\n\t\t\t\tif tt.args.payload.Locators != tt.args.locators {\n\t\t\t\t\tt.Errorf(\"payloadManager.ValidatePayload() payload.locatorAdress = %v, want: %v\", tt.args.payload.LocatorAddress, tt.args.locators)\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif cfg.LocatorAddress != \"\" {\n\t\t\t\tif tt.args.payload.LocatorAddress != tt.args.locatorAddress {\n\t\t\t\t\tt.Errorf(\"got = %v, want: %v\",\n\t\t\t\t\t\ttt.args.payload.LocatorAddress, tt.args.locatorAddress)\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t}\n\t\t\tif !(cfg.CoverageMode) {\n\t\t\t\tif cfg.TaskID != \"\" {\n\t\t\t\t\tif tt.args.payload.TaskID != tt.args.taskID {\n\t\t\t\t\t\tt.Errorf(\"got payload.TaskID: %v, want: %v\", tt.args.payload.TaskID, tt.args.taskID)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t})\n\t}\n}\n\nfunc getValidatePayloadTestCases() []*testCaseValidatePayload {\n\ttestCases := []*testCaseValidatePayload{\n\t\t{\"Test validate payload for empty repolink\",\n\t\t\tvalidatePayloadArgs{ctx: context.TODO(), payload: &core.Payload{RepoLink: \"\"}},\n\t\t\ttrue,\n\t\t},\n\t\t{\"Test validate payload for empty reposlug\",\n\t\t\tvalidatePayloadArgs{ctx: context.TODO(), payload: &core.Payload{RepoLink: \"github.com/abc/\", RepoSlug: \"\"}},\n\t\t\ttrue,\n\t\t},\n\t\t{\"Test validate payload for empty gitprovider\",\n\t\t\tvalidatePayloadArgs{ctx: context.TODO(), payload: &core.Payload{RepoLink: \"github.com/abc/\", RepoSlug: \"/slug\", GitProvider: \"\"}},\n\t\t\ttrue,\n\t\t},\n\t\t{\"Test validate payload for empty buildID\",\n\t\t\tvalidatePayloadArgs{ctx: context.TODO(), payload: &core.Payload{RepoLink: \"github.com/abc/\",\n\t\t\t\tRepoSlug: \"/slug\", GitProvider: \"fake\", BuildID: \"\"}},\n\t\t\ttrue,\n\t\t},\n\t\t{\"Test validate payload for empty repoID\",\n\t\t\tvalidatePayloadArgs{ctx: context.TODO(), payload: &core.Payload{RepoLink: \"github.com/abc/\", RepoSlug: \"/slug\",\n\t\t\t\tGitProvider: \"fake\", BuildID: \"build\", RepoID: \"\"}},\n\t\t\ttrue,\n\t\t},\n\t\t{\"Test validate payload for empty branchName\",\n\t\t\tvalidatePayloadArgs{ctx: context.TODO(), payload: &core.Payload{RepoLink: \"github.com/abc/\", RepoSlug: \"/slug\",\n\t\t\t\tGitProvider: \"fake\", BuildID: \"build\", RepoID: \"repo\", BranchName: \"\"}}, true,\n\t\t},\n\t\t{\"Test validate payload for empty orgID\",\n\t\t\tvalidatePayloadArgs{ctx: context.TODO(), payload: &core.Payload{RepoLink: \"github.com/abc/\", RepoSlug: \"/slug\",\n\t\t\t\tGitProvider: \"fake\", BuildID: \"build\", RepoID: \"repo\", BranchName: \"branch\", OrgID: \"\"}},\n\t\t\ttrue,\n\t\t},\n\t\t{\"Test validate payload for empty TASFileName\",\n\t\t\tvalidatePayloadArgs{ctx: context.TODO(), payload: &core.Payload{RepoLink: \"github.com/abc/\", RepoSlug: \"/slug\",\n\t\t\t\tGitProvider: \"fake\", BuildID: \"build\", RepoID: \"repo\", BranchName: \"branch\", OrgID: \"org\", TasFileName: \"\"}},\n\t\t\ttrue,\n\t\t},\n\t\t{\"Test validate payload for expected payload.Locator Address & payloadLocator\",\n\t\t\tvalidatePayloadArgs{ctx: context.TODO(), payload: &core.Payload{RepoLink: \"github.com/abc/\", RepoSlug: \"/slug\",\n\t\t\t\tGitProvider: \"fake\", BuildID: \"build\", RepoID: \"repo\", BranchName: \"branch\", OrgID: \"org\", TasFileName: \"a\"},\n\t\t\t\tlocators: \"/locator\", locatorAddress: \"/locatorAddr\"},\n\t\t\ttrue,\n\t\t},\n\t\t{\"Test validate payload for empty build target commit\",\n\t\t\tvalidatePayloadArgs{ctx: context.TODO(), payload: &core.Payload{RepoLink: \"github.com/abc/\", RepoSlug: \"/slug\",\n\t\t\t\tGitProvider: \"fake\", BuildID: \"build\", RepoID: \"repo\", BranchName: \"branch\", OrgID: \"org\",\n\t\t\t\tTasFileName: \"tas\", BuildTargetCommit: \"\"}},\n\t\t\ttrue,\n\t\t},\n\t\t{\"Test validate payload for empty target commit in config\",\n\t\t\tvalidatePayloadArgs{ctx: context.TODO(), payload: &core.Payload{RepoLink: \"github.com/abc/\", RepoSlug: \"/slug\",\n\t\t\t\tGitProvider: \"fake\", BuildID: \"build\", RepoID: \"repo\", BranchName: \"branch\", OrgID: \"org\",\n\t\t\t\tTasFileName: \"tas\", BuildTargetCommit: \"btg\"}, coverageMode: false, locators: \"../dummy\"},\n\t\t\ttrue,\n\t\t},\n\t\t{\"Test validate payload for target & base commit in config\",\n\t\t\tvalidatePayloadArgs{ctx: context.TODO(), payload: &core.Payload{RepoLink: \"github.com/abc/\", RepoSlug: \"/slug\",\n\t\t\t\tGitProvider: \"fake\", BuildID: \"build\", RepoID: \"repo\", BranchName: \"branch\", OrgID: \"org\",\n\t\t\t\tTasFileName: \"tas\", BuildTargetCommit: \"btg\"}, coverageMode: false, locators: \"../dummy\"},\n\t\t\ttrue,\n\t\t},\n\t\t{\"Test validate payload for target, base commit & taskID in config\",\n\t\t\tvalidatePayloadArgs{ctx: context.TODO(), payload: &core.Payload{RepoLink: \"github.com/abc/\", RepoSlug: \"/slug\",\n\t\t\t\tGitProvider: \"fake\", BuildID: \"build\", RepoID: \"repo\", BranchName: \"branch\", OrgID: \"org\",\n\t\t\t\tTasFileName: \"tas\", BuildTargetCommit: \"btg\"}, coverageMode: false, locators: \"../dummy\", taskID: \"tid\"},\n\t\t\ttrue,\n\t\t},\n\t\t{\"Test validate payload for non push and pull event\",\n\t\t\tvalidatePayloadArgs{ctx: context.TODO(), payload: &core.Payload{RepoLink: \"github.com/abc/\", RepoSlug: \"/slug\",\n\t\t\t\tGitProvider: \"fake\", BuildID: \"build\", RepoID: \"repo\", BranchName: \"branch\", OrgID: \"org\",\n\t\t\t\tTasFileName: \"tas\", BuildTargetCommit: \"btg\", EventType: \"invalid\"}, coverageMode: true},\n\t\t\ttrue,\n\t\t},\n\t\t{\"Test validate payload for push event with nil commit\",\n\t\t\tvalidatePayloadArgs{ctx: context.TODO(), payload: &core.Payload{RepoLink: \"github.com/abc/\", RepoSlug: \"/slug\",\n\t\t\t\tGitProvider: \"fake\", BuildID: \"build\", RepoID: \"repo\", BranchName: \"branch\", OrgID: \"org\",\n\t\t\t\tTasFileName: \"tas\", BuildTargetCommit: \"btg\", EventType: \"push\", Commits: []core.CommitChangeList{}}, coverageMode: true},\n\t\t\ttrue,\n\t\t},\n\t\t{\"Test validate payload for success\",\n\t\t\tvalidatePayloadArgs{ctx: context.TODO(), payload: &core.Payload{RepoLink: \"github.com/abc/\", RepoSlug: \"/slug\",\n\t\t\t\tGitProvider: \"fake\", BuildID: \"build\", RepoID: \"repo\", BranchName: \"branch\", OrgID: \"org\",\n\t\t\t\tTasFileName: \"tas\", BuildTargetCommit: \"btg\", EventType: \"push\",\n\t\t\t\tCommits: []core.CommitChangeList{{Sha: \"sha\", Message: \"msg\"}}}, coverageMode: true},\n\t\t\tfalse,\n\t\t},\n\t}\n\treturn testCases\n}\n"
  },
  {
    "path": "pkg/procfs/procfs.go",
    "content": "//go:build linux\n// +build linux\n\npackage procfs\n\nimport (\n\t\"context\"\n\t\"math\"\n\t\"runtime\"\n\t\"time\"\n\n\t\"github.com/shirou/gopsutil/v3/mem\"\n\t\"github.com/shirou/gopsutil/v3/process\"\n)\n\nconst hundred = 100\n\n// Proc represents the process for which we want to find stats\ntype Proc struct {\n\ttotalMem     uint64\n\tprocess      *process.Process\n\tsamplingTime time.Duration\n\tusePss       bool\n}\n\n// Stats represents the process stats\ntype Stats struct {\n\tCPUPercentage float64\n\tMemPercentage float64\n\tMemShared     uint64\n\tMemSwapped    uint64\n\tMemConsumed   uint64\n\tRecordTime    time.Time\n}\n\n// New returns new Proc struct\nfunc New(pid int32, samplingInterval time.Duration, usePss bool) (*Proc, error) {\n\tp, err := process.NewProcess(pid)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tmachineMemory, err := mem.VirtualMemory()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &Proc{process: p, samplingTime: samplingInterval, usePss: usePss, totalMem: machineMemory.Total}, nil\n}\n\n// GetStats returns process stats\nfunc (ps *Proc) GetStats() (stat *Stats, err error) {\n\n\ts := Stats{}\n\ts.RecordTime = time.Now()\n\tcpuPerc, err := ps.process.Percent(0)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\t// https://github.com/alibaba/sentinel-golang/pull/448.\n\t// The underlying library returns abnormally large number in some cases\n\ts.CPUPercentage = math.Min(hundred, cpuPerc/float64(runtime.NumCPU()))\n\n\tmemInfo, err := ps.process.MemoryInfo()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif !ps.usePss {\n\t\ts.MemConsumed = memInfo.RSS\n\t\ts.MemSwapped = memInfo.Swap\n\t\ts.MemPercentage = (hundred * float64(s.MemConsumed) / float64(ps.totalMem))\n\t\treturn &s, nil\n\t}\n\n\t// why use pss instead of rss, Ref #https://stackoverflow.com/questions/1420426/how-to-calculate-the-cpu-usage-of-a-process-by-pid-in-linux-from-c/1424556\n\tmaps, err := ps.process.MemoryMaps(true)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tvar pss uint64\n\tfor _, m := range *maps {\n\t\tpss += m.Pss\n\t\ts.MemSwapped += m.Swap\n\t}\n\n\ts.MemConsumed = pss * 1024 // PSS is in kB\n\ts.MemPercentage = (100 * float64(s.MemConsumed) / float64(ps.totalMem))\n\treturn &s, nil\n\n}\n\n// GetStatsInInterval returns process stats after every interval\nfunc (ps *Proc) GetStatsInInterval() []*Stats {\n\treturn ps.GetStatsInIntervalWithContext(context.Background())\n}\n\n// GetStatsInIntervalWithContext returns process stats after every interval\nfunc (ps *Proc) GetStatsInIntervalWithContext(ctx context.Context) []*Stats {\n\n\tvar stats []*Stats\n\ts, err := ps.GetStats()\n\tif err != nil {\n\t\treturn stats\n\t}\n\t//append initial values to slice, then check after an interval\n\tstats = append(stats, s)\n\tticker := time.NewTicker(ps.samplingTime)\n\tfor {\n\t\tselect {\n\t\tcase <-ticker.C:\n\t\t\ts, err := ps.GetStats()\n\t\t\tif err != nil {\n\t\t\t\treturn stats\n\t\t\t}\n\t\t\tstats = append(stats, s)\n\t\tcase <-ctx.Done():\n\t\t\treturn stats\n\t\t}\n\t}\n}\n"
  },
  {
    "path": "pkg/proxyserver/proxyhandler.go",
    "content": "package proxyserver\n\nimport (\n\t\"encoding/base64\"\n\t\"fmt\"\n\t\"net/http\"\n\t\"net/http/httputil\"\n\t\"net/url\"\n\n\t\"github.com/LambdaTest/test-at-scale/pkg/global\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n\t\"github.com/spf13/viper\"\n)\n\n// ProxyHandler defines struct for proxy handler\ntype ProxyHandler struct {\n\tremote *url.URL\n\tlogger lumber.Logger\n}\n\nconst synapseURL = \"/synapse\"\n\n// NewProxyHandler returns pointer of new instace of ProxyHandler\nfunc NewProxyHandler(logger lumber.Logger) (*ProxyHandler, error) {\n\tremote, err := url.Parse(global.TASCloudURL[viper.GetString(\"env\")])\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &ProxyHandler{\n\t\tremote: remote,\n\t\tlogger: logger,\n\t}, nil\n}\n\n// HandlerProxy handles the proxy server\nfunc (ph *ProxyHandler) HandlerProxy(w http.ResponseWriter, r *http.Request) {\n\n\tproxy := httputil.NewSingleHostReverseProxy(ph.remote)\n\tproxy.Director = func(req *http.Request) {\n\t\treq.Header = r.Header\n\n\t\tencodedSecret := base64.StdEncoding.EncodeToString([]byte(viper.GetString(\"Lambdatest.SecretKey\")))\n\t\treq.Header.Add(\"Lambdatest-SecretKey\", encodedSecret)\n\t\treq.Host = ph.remote.Host\n\t\treq.URL.Scheme = ph.remote.Scheme\n\t\treq.URL.Host = ph.remote.Host\n\t\treq.URL.Path = fmt.Sprintf(\"%s%s\", synapseURL, r.URL.Path)\n\n\t\tph.logger.Debugf(\"proxying to url: %s\", req.URL.Path)\n\t}\n\n\tproxy.ServeHTTP(w, r)\n}\n"
  },
  {
    "path": "pkg/proxyserver/setup.go",
    "content": "package proxyserver\n\nimport (\n\t\"context\"\n\t\"net/http\"\n\n\t\"github.com/LambdaTest/test-at-scale/config\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/global\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n\t\"github.com/gin-gonic/gin\"\n)\n\n// ListenAndServe  starts proxy http server for synapse\nfunc ListenAndServe(ctx context.Context, proxyHandler *ProxyHandler, config *config.SynapseConfig, logger lumber.Logger) error {\n\tgin.SetMode(gin.ReleaseMode)\n\tlogger.Infof(\"Setting up HTTP handler\")\n\n\terrChan := make(chan error)\n\n\t// HTTP server instance\n\tsrv := &http.Server{\n\t\tAddr:    \":\" + global.ProxyServerPort,\n\t\tHandler: http.HandlerFunc(proxyHandler.HandlerProxy),\n\t}\n\t// channel to signal server process exit\n\tdone := make(chan struct{})\n\tgo func() {\n\t\tlogger.Infof(\"Starting server on port %s\", global.ProxyServerPort)\n\t\t// service connections\n\t\tif err := srv.ListenAndServe(); err != nil && err != http.ErrServerClosed {\n\t\t\tlogger.Errorf(\"listen: %#v\", err)\n\t\t\terrChan <- err\n\t\t}\n\t}()\n\n\tselect {\n\tcase <-ctx.Done():\n\t\tlogger.Infof(\"Caller has requested graceful shutdown. shutting down the server\")\n\t\tif err := srv.Shutdown(ctx); err != nil {\n\t\t\tlogger.Errorf(\"Server Shutdown:\", \"error\", err)\n\t\t}\n\t\treturn nil\n\tcase err := <-errChan:\n\t\treturn err\n\tcase <-done:\n\t\treturn nil\n\t}\n}\n"
  },
  {
    "path": "pkg/requestutils/request.go",
    "content": "package requestutils\n\nimport (\n\t\"bytes\"\n\t\"context\"\n\t\"errors\"\n\t\"fmt\"\n\t\"io\"\n\t\"net/http\"\n\t\"net/url\"\n\t\"reflect\"\n\t\"time\"\n\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n\t\"github.com/cenkalti/backoff/v4\"\n)\n\ntype requests struct {\n\tlogger       lumber.Logger\n\tclient       http.Client\n\tretryBackoff backoff.BackOff\n}\n\nfunc New(logger lumber.Logger, requestTimeout time.Duration, retryBackoff backoff.BackOff) core.Requests {\n\treturn &requests{\n\t\tlogger:       logger,\n\t\tclient:       http.Client{Timeout: requestTimeout},\n\t\tretryBackoff: retryBackoff,\n\t}\n}\n\nfunc (r *requests) MakeAPIRequest(\n\tctx context.Context,\n\thttpMethod, endpoint string,\n\tbody []byte,\n\tquery map[string]interface{},\n\theaders map[string]string,\n) (respBody []byte, statusCode int, err error) {\n\tu, err := url.Parse(endpoint)\n\tif err != nil {\n\t\tr.logger.Errorf(\"error while parsing endpoint %s, %v\", endpoint, err)\n\t\treturn nil, 0, err\n\t}\n\tq := u.Query()\n\tfor id, val := range query {\n\t\tv := reflect.ValueOf(val)\n\t\t// nolint:exhaustive\n\t\tswitch v.Kind() {\n\t\tcase reflect.Array:\n\t\tcase reflect.Slice:\n\t\t\tfor i := 0; i < v.Len(); i += 1 {\n\t\t\t\tq.Add(id, v.Index(i).String())\n\t\t\t}\n\t\tdefault:\n\t\t\tq.Set(id, fmt.Sprintf(\"%v\", val))\n\t\t}\n\t}\n\tu.RawQuery = q.Encode()\n\treq, err := http.NewRequestWithContext(ctx, httpMethod, u.String(), bytes.NewBuffer(body))\n\tif err != nil {\n\t\tr.logger.Errorf(\"error while creating http request %v\", err)\n\t\treturn nil, 0, err\n\t}\n\tfor id, val := range headers {\n\t\treq.Header.Add(id, val)\n\t}\n\n\toperation := func() error {\n\t\tresp, errD := r.client.Do(req)\n\t\tif errD != nil {\n\t\t\tr.logger.Errorf(\"error while sending http request %v\", errD)\n\t\t\treturn errD\n\t\t}\n\t\tdefer resp.Body.Close()\n\t\tstatusCode = resp.StatusCode\n\t\tif 500 <= statusCode && statusCode < 600 {\n\t\t\treturn fmt.Errorf(\"status code %d received\", statusCode)\n\t\t}\n\t\trespBody, err = io.ReadAll(resp.Body)\n\t\tif err != nil {\n\t\t\tr.logger.Errorf(\"error while reading http response body %v\", err)\n\t\t\treturn nil\n\t\t}\n\t\treturn nil\n\t}\n\tif errR := backoff.Retry(operation, r.retryBackoff); errR != nil {\n\t\tr.logger.Errorf(\"Retry limit exceeded. Error %+v\", errR)\n\t\treturn respBody, statusCode, errors.New(\"retry limit exceeded\")\n\t}\n\tif statusCode != http.StatusOK {\n\t\tr.logger.Errorf(\"non 200 status code %s\", statusCode)\n\t\treturn respBody, statusCode, errors.New(\"non 200 status code\")\n\t}\n\treturn respBody, statusCode, err\n}\n"
  },
  {
    "path": "pkg/runner/docker/config.go",
    "content": "package docker\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\t\"os\"\n\t\"strconv\"\n\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/global\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/synapse\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/utils\"\n\t\"github.com/docker/docker/api/types\"\n\t\"github.com/docker/docker/api/types/container\"\n\t\"github.com/docker/docker/api/types/filters\"\n\t\"github.com/docker/docker/api/types/mount\"\n\t\"github.com/docker/docker/api/types/network\"\n\t\"github.com/docker/docker/api/types/volume\"\n\t\"github.com/docker/go-units\"\n)\n\nconst (\n\tdefaultVaultPath = \"/vault/secrets\"\n\trepoSourcePath   = \"/tmp/synapse/%s/nucleus\"\n\tnanoCPUUnit      = 1e9\n\tvolumePrefix     = \"tas-build\"\n)\n\nfunc (d *docker) getVolumeName(r *core.RunnerOptions) string {\n\treturn fmt.Sprintf(\"%s-%s\", volumePrefix, r.Label[synapse.BuildID])\n}\n\nfunc (d *docker) getVolumeConfiguration(r *core.RunnerOptions) *volume.VolumeCreateBody {\n\treturn &volume.VolumeCreateBody{\n\t\tDriver: \"local\",\n\t\tName:   d.getVolumeName(r),\n\t\tLabels: map[string]string{synapse.BuildID: r.Label[synapse.BuildID]},\n\t}\n}\n\nfunc (d *docker) getContainerConfiguration(r *core.RunnerOptions) *container.Config {\n\treturn &container.Config{\n\t\tImage:   r.DockerImage,\n\t\tEnv:     r.Env,\n\t\tTty:     false,\n\t\tCmd:     r.ContainerArgs,\n\t\tVolumes: make(map[string]struct{}),\n\t}\n}\n\nfunc (d *docker) getContainerHostConfiguration(r *core.RunnerOptions) *container.HostConfig {\n\tspecs := getSpecs(r.Tier)\n\t/*\n\t\thttps://pkg.go.dev/github.com/docker/docker@v20.10.12+incompatible/api/types/container#Resources\n\t\tAS per documentation , 1 core = 1e9 NanoCPUs\n\t*/\n\tnanoCPU := int64(specs.CPU * nanoCPUUnit)\n\td.logger.Infof(\"Specs %+v\", specs)\n\tmounts := []mount.Mount{\n\t\t{\n\t\t\tType:   mount.TypeVolume,\n\t\t\tSource: d.getVolumeName(r),\n\t\t\tTarget: defaultVaultPath,\n\t\t},\n\t}\n\tif r.PodType == core.NucleusPod || r.PodType == core.CoveragePod {\n\t\trepoBuildSourcePath := fmt.Sprintf(repoSourcePath, r.Label[synapse.BuildID])\n\t\tif err := utils.CreateDirectory(repoBuildSourcePath); err != nil {\n\t\t\td.logger.Errorf(\"error creating directory: %v\", err)\n\t\t}\n\t\tmounts = append(mounts, mount.Mount{\n\t\t\tType:   mount.TypeVolume,\n\t\t\tSource: d.getVolumeName(r),\n\t\t\tTarget: global.WorkspaceCacheDir,\n\t\t})\n\t}\n\thostConfig := container.HostConfig{\n\t\tMounts:      mounts,\n\t\tAutoRemove:  true,\n\t\tSecurityOpt: []string{\"seccomp=unconfined\"},\n\t\tResources:   container.Resources{Memory: specs.RAM * units.MiB, NanoCPUs: nanoCPU},\n\t}\n\n\tautoRemove, err := strconv.ParseBool(os.Getenv(global.AutoRemoveEnv))\n\tif err != nil {\n\t\td.logger.Errorf(\"Error reading os env AutoRemove with error: %v \\n returning default host config\", err)\n\t\treturn &hostConfig\n\t}\n\thostConfig.AutoRemove = autoRemove\n\treturn &hostConfig\n}\n\nfunc (d *docker) getContainerNetworkConfiguration() (*network.NetworkingConfig, error) {\n\tvar networkResource types.NetworkResource\n\topts := types.NetworkListOptions{\n\t\tFilters: filters.NewArgs(filters.Arg(\"name\", networkName)),\n\t}\n\tnetworkList, err := d.client.NetworkList(context.TODO(), opts)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tfor idx := 0; idx < len(networkList); idx += 1 {\n\t\tif networkList[idx].Name == networkName {\n\t\t\tnetworkResource = networkList[idx]\n\t\t}\n\t}\n\n\tendpointSettings := network.EndpointSettings{\n\t\tNetworkID: networkResource.ID,\n\t}\n\tnetworkConfig := network.NetworkingConfig{\n\t\tEndpointsConfig: map[string]*network.EndpointSettings{},\n\t}\n\tnetworkConfig.EndpointsConfig[networkName] = &endpointSettings\n\n\treturn &networkConfig, nil\n}\n\nfunc getSpecs(tier core.Tier) core.Specs {\n\tif val, ok := core.TierOpts[tier]; ok {\n\t\treturn core.Specs{CPU: val.CPU, RAM: val.RAM}\n\t}\n\treturn core.TierOpts[core.Small]\n}\n"
  },
  {
    "path": "pkg/runner/docker/docker.go",
    "content": "package docker\n\nimport (\n\t\"archive/tar\"\n\t\"bytes\"\n\t\"context\"\n\t\"encoding/json\"\n\t\"errors\"\n\t\"fmt\"\n\t\"io\"\n\t\"os\"\n\t\"strconv\"\n\t\"strings\"\n\t\"time\"\n\n\t\"github.com/LambdaTest/test-at-scale/config\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/errs\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/global\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/synapse\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/utils\"\n\t\"github.com/docker/docker/api/types\"\n\t\"github.com/docker/docker/api/types/container\"\n\t\"github.com/docker/docker/api/types/filters\"\n\t\"github.com/docker/docker/client\"\n\t\"github.com/docker/docker/pkg/stdcopy\"\n\t\"github.com/docker/go-units\"\n)\n\nconst (\n\tbuildCacheExpiry time.Duration = 4 * time.Hour\n\tBuildID                        = \"build-id\"\n)\n\nvar gracefulyContainerStopDuration = time.Second * 10\n\nvar networkName string\n\ntype docker struct {\n\tclient            *client.Client\n\tlogger            lumber.Logger\n\tcfg               *config.SynapseConfig\n\tsecretsManager    core.SecretsManager\n\tcpu               float32\n\tram               int64\n\tRunningContainers []*core.RunnerOptions\n}\n\n// newDockerClient creates a new docker client\nfunc newDockerClient(secretsManager core.SecretsManager) (*docker, error) {\n\tclient, err := client.NewClientWithOpts(client.FromEnv)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tdockerInfo, err := client.Info(context.TODO())\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tnetworkName = os.Getenv(global.NetworkEnvName)\n\treturn &docker{\n\t\tclient:         client,\n\t\tcpu:            float32(dockerInfo.NCPU),\n\t\tram:            dockerInfo.MemTotal / units.MiB,\n\t\tsecretsManager: secretsManager,\n\t}, nil\n}\n\n// New initialize a new docker configuration\nfunc New(secretsManager core.SecretsManager,\n\tlogger lumber.Logger,\n\tcfg *config.SynapseConfig) (core.DockerRunner, error) {\n\tdockerConfig, err := newDockerClient(secretsManager)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdockerConfig.logger = logger\n\tdockerConfig.cfg = cfg\n\n\tlogger.Infof(\"available cpu: %f\", dockerConfig.cpu)\n\tlogger.Infof(\"available memory: %d\", dockerConfig.ram)\n\n\treturn dockerConfig, nil\n}\n\nfunc (d *docker) CreateVolume(ctx context.Context, r *core.RunnerOptions) error {\n\tvolumeOptions := d.getVolumeConfiguration(r)\n\tisVolume, err := d.FindVolumes(volumeOptions.Name)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif !isVolume {\n\t\tif _, err := d.client.VolumeCreate(ctx, *volumeOptions); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}\n\nfunc (d *docker) CopyFileToContainer(ctx context.Context, path, fileName, containerID string, content []byte) error {\n\tvar buf bytes.Buffer\n\ttw := tar.NewWriter(&buf)\n\tdefer tw.Close()\n\n\tif err := tw.WriteHeader(&tar.Header{\n\t\tName: fileName,\n\t\tMode: 0777,\n\t\tSize: int64(len(content)),\n\t}); err != nil {\n\t\treturn err\n\t}\n\tif _, err := tw.Write(content); err != nil {\n\t\treturn err\n\t}\n\n\tif err := d.client.CopyToContainer(\n\t\tctx,\n\t\tcontainerID,\n\t\tglobal.VaultSecretDir,\n\t\t&buf,\n\t\ttypes.CopyToContainerOptions{AllowOverwriteDirWithFile: true},\n\t); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}\n\nfunc (d *docker) Create(ctx context.Context, r *core.RunnerOptions) core.ContainerStatus {\n\tcontainerStatus := core.ContainerStatus{Done: true}\n\tcontainerImageConfig, err := d.secretsManager.GetDockerSecrets(r)\n\tif err != nil {\n\t\td.logger.Errorf(\"Something went wrong while seeking docker secrets %+v\", err)\n\t\tcontainerStatus.Done = false\n\t\tcontainerStatus.Error = errs.ERR_DOCKER_CRT(err.Error())\n\t\treturn containerStatus\n\t}\n\n\tif err = d.CreateVolume(ctx, r); err != nil {\n\t\td.logger.Errorf(\"Error in creating docker volume: %+v\", err)\n\t\tcontainerStatus.Done = false\n\t\tcontainerStatus.Error = errs.ErrDockerVolCrt(err.Error())\n\t\treturn containerStatus\n\t}\n\n\tif errP := d.PullImage(&containerImageConfig, r); errP != nil {\n\t\td.logger.Errorf(\"Something went wrong while pulling container image %+v\", errP)\n\t\tcontainerStatus.Done = false\n\t\tcontainerStatus.Error = errs.ERR_DOCKER_CRT(errP.Error())\n\t\treturn containerStatus\n\t}\n\tcontainerConfig := d.getContainerConfiguration(r)\n\thostConfig := d.getContainerHostConfiguration(r)\n\tnetworkConfig, err := d.getContainerNetworkConfiguration()\n\tif err != nil {\n\t\td.logger.Errorf(\"error retrieving network: %v\", err)\n\t\tcontainerStatus.Done = false\n\t\tcontainerStatus.Error = errs.ERR_DOCKER_CRT(err.Error())\n\t\treturn containerStatus\n\t}\n\tcontainerName := fmt.Sprintf(\"%s-%s\", r.ContainerName, r.PodType)\n\tresp, err := d.client.ContainerCreate(ctx, containerConfig, hostConfig, networkConfig, nil, containerName)\n\tr.ContainerID = resp.ID\n\tif err != nil {\n\t\td.logger.Errorf(\"error creating container: %v\", err)\n\t\tcontainerStatus.Done = false\n\t\tcontainerStatus.Error = errs.ERR_DOCKER_CRT(err.Error())\n\t\treturn containerStatus\n\t}\n\td.logger.Debugf(\"container created with name: %s, updating status %+v\",\n\t\tfmt.Sprintf(\"%s-%s\", r.ContainerName, r.PodType), containerStatus)\n\n\tgitSecretBytes, err := d.secretsManager.GetGitSecretBytes()\n\tif err != nil {\n\t\td.logger.Errorf(\"Error in loading git secrets: %s\", err.Error())\n\t\tcontainerStatus.Done = false\n\t\tcontainerStatus.Error = errs.ErrSecretLoad(err.Error())\n\t\treturn containerStatus\n\t}\n\tif err = d.CopyFileToContainer(\n\t\tctx,\n\t\tglobal.VaultSecretDir,\n\t\tglobal.GitConfigFileName,\n\t\tr.ContainerID,\n\t\tgitSecretBytes,\n\t); err != nil {\n\t\tcontainerStatus.Done = false\n\t\tcontainerStatus.Error = errs.ErrDockerCP(err.Error())\n\t\treturn containerStatus\n\t}\n\n\t// copies repo secrets to container\n\trepoSecretBytes, err := d.secretsManager.GetRepoSecretBytes(r.Label[\"repo\"])\n\tif err != nil {\n\t\td.logger.Debugf(\"Error in loading repo secrets: %s\", err.Error())\n\t} else {\n\t\tif err := d.CopyFileToContainer(\n\t\t\tctx,\n\t\t\tglobal.VaultSecretDir,\n\t\t\tglobal.RepoSecretsFileName,\n\t\t\tr.ContainerID,\n\t\t\trepoSecretBytes,\n\t\t); err != nil {\n\t\t\tcontainerStatus.Done = false\n\t\t\tcontainerStatus.Error = errs.ErrDockerCP(err.Error())\n\t\t\treturn containerStatus\n\t\t}\n\t}\n\treturn containerStatus\n}\n\nfunc (d *docker) Destroy(ctx context.Context, r *core.RunnerOptions) error {\n\tif err := d.client.ContainerStop(ctx, r.ContainerID, &gracefulyContainerStopDuration); err != nil {\n\t\td.logger.Errorf(\"error stopping container %v\", err)\n\t\treturn err\n\t}\n\tautoRemove, err := strconv.ParseBool(os.Getenv(global.AutoRemoveEnv))\n\tif err != nil {\n\t\td.logger.Errorf(\"Error reading AutoRemove os env error: %v\", err)\n\t\treturn errors.New(\"error reading AutoRemove os env error\")\n\t}\n\tif autoRemove {\n\t\t// if autoRemove is set then it docker container will be removed once it stopped or exited\n\t\treturn nil\n\t}\n\terr = d.client.ContainerRemove(ctx, r.ContainerID, types.ContainerRemoveOptions{\n\t\tRemoveVolumes: true,\n\t\tForce:         true,\n\t})\n\tif err != nil {\n\t\td.logger.Errorf(\"error removing container %v\", err)\n\t\treturn err\n\t}\n\treturn nil\n}\n\nfunc (d *docker) Run(ctx context.Context, r *core.RunnerOptions) core.ContainerStatus {\n\tcontainerStatus := core.ContainerStatus{Done: true}\n\td.logger.Debugf(\"running container %s\", r.ContainerID)\n\tif err := d.client.ContainerStart(ctx, r.ContainerID, types.ContainerStartOptions{}); err != nil {\n\t\td.logger.Errorf(\"error starting the container: %s\", err)\n\t\tcontainerStatus.Done = false\n\t\tcontainerStatus.Error = errs.ERR_DOCKER_STRT(err.Error())\n\t\treturn containerStatus\n\t}\n\td.RunningContainers = append(d.RunningContainers, r)\n\n\tif err := d.writeLogs(ctx, r); err != nil {\n\t\td.logger.Errorf(\"error writing logs to stdout: %+v\", err)\n\t}\n\n\treturn containerStatus\n}\n\n// removing element from slice of string\nfunc removeContainerID(slice []*core.RunnerOptions, r *core.RunnerOptions) []*core.RunnerOptions {\n\tindex := -1\n\tfor i, val := range slice {\n\t\tif val.ContainerID == r.ContainerID {\n\t\t\tindex = i\n\t\t\tbreak\n\t\t}\n\t}\n\tif index == -1 {\n\t\treturn slice\n\t}\n\tnewSlice := make([]*core.RunnerOptions, 0)\n\tnewSlice = append(newSlice, slice[:index]...)\n\tif index != len(slice)-1 {\n\t\tnewSlice = append(newSlice, slice[index+1:]...)\n\t}\n\n\treturn newSlice\n}\n\nfunc (d *docker) WaitForCompletion(ctx context.Context, r *core.RunnerOptions) error {\n\td.logger.Infof(\"waiting for  container %s compeletion\", r.ContainerID)\n\tstatusCh, errCh := d.client.ContainerWait(ctx, r.ContainerID, container.WaitConditionRemoved)\n\n\tselect {\n\tcase err := <-errCh:\n\t\tif err != nil {\n\t\t\td.logger.Debugf(\"%s container terminated with exit code: %d, reason %s\", r.ContainerID, err)\n\t\t\treturn err\n\t\t}\n\tcase status := <-statusCh:\n\t\td.logger.Debugf(\"status code: %d\", status.StatusCode)\n\t\tif status.StatusCode != 0 {\n\t\t\tmsg := fmt.Sprintf(\"Received non zero status code %v\", status.StatusCode)\n\t\t\treturn errs.ERR_DOCKER_RUN(msg)\n\t\t}\n\t\treturn nil\n\t}\n\treturn nil\n}\n\nfunc (d *docker) GetInfo(ctx context.Context) (cpu float32, ram int64) {\n\treturn d.cpu, d.ram\n}\n\nfunc (d *docker) Initiate(ctx context.Context, r *core.RunnerOptions, statusChan chan core.ContainerStatus) {\n\t// creating the docker contaienr\n\tr.ContainerArgs = append(r.ContainerArgs, \"--local\", os.Getenv(global.LocalEnv), \"--synapsehost\", os.Getenv(global.SynapseHostEnv))\n\tif status := d.Create(ctx, r); !status.Done {\n\t\td.logger.Errorf(\"error creating container: %v\", status.Error)\n\t\td.logger.Infof(\"Update error status after creation\")\n\t\tstatusChan <- status\n\t\treturn\n\t}\n\tif status := d.Run(ctx, r); !status.Done {\n\t\td.logger.Errorf(\"error running container: %v\", status.Error)\n\t\td.logger.Infof(\"Update error status after running\")\n\n\t\tstatusChan <- status\n\t\treturn\n\t}\n\tcontainerStatus := core.ContainerStatus{Done: true}\n\n\tif err := d.WaitForCompletion(ctx, r); err != nil {\n\t\td.logger.Errorf(\"error while waiting for the completion of container: %v\", err)\n\t\tcontainerStatus.Done = false\n\t\tcontainerStatus.Error = errs.ERR_DOCKER_RUN(err.Error())\n\t\td.RunningContainers = removeContainerID(d.RunningContainers, r)\n\t\tstatusChan <- containerStatus\n\t\treturn\n\t}\n\td.RunningContainers = removeContainerID(d.RunningContainers, r)\n\td.logger.Infof(\"container %+s execution successful\", r.ContainerID)\n\tstatusChan <- containerStatus\n}\n\nfunc (d *docker) KillRunningDocker(ctx context.Context) {\n\tfor _, r := range d.RunningContainers {\n\t\td.logger.Infof(\"Destroying container %s\", r.ContainerID)\n\t\tif err := d.Destroy(ctx, r); err != nil {\n\t\t\td.logger.Errorf(\"Error occur while destroying container ID %s , err %+v\", r.ContainerID, err)\n\t\t}\n\t}\n}\n\nfunc (d *docker) KillContainerForBuildID(buildID string) error {\n\tfor _, r := range d.RunningContainers {\n\t\tif r.Label[BuildID] == buildID {\n\t\t\tif err := d.Destroy(context.Background(), r); err != nil {\n\t\t\t\td.logger.Errorf(\"error while destroying container: %v\", err)\n\t\t\t\treturn err\n\t\t\t}\n\t\t\treturn nil\n\t\t}\n\t}\n\treturn nil\n}\n\nfunc (d *docker) PullImage(containerImageConfig *core.ContainerImageConfig, r *core.RunnerOptions) error {\n\tif containerImageConfig.PullPolicy == config.PullNever && r.PodType == core.NucleusPod {\n\t\td.logger.Infof(\"pull policy %s pod type %s, not pulling any image\",\n\t\t\tcontainerImageConfig.PullPolicy, r.PodType)\n\t\treturn nil\n\t}\n\tdockerImage := containerImageConfig.Image\n\n\td.logger.Infof(\"Pulling image : %s\", dockerImage)\n\tImagePullOptions := types.ImagePullOptions{}\n\tImagePullOptions.RegistryAuth = containerImageConfig.AuthRegistry\n\treader, err := d.client.ImagePull(context.TODO(), dockerImage, ImagePullOptions)\n\tdefer func() {\n\t\tif reader == nil {\n\t\t\td.logger.Errorf(\"Reader returned by docker pull is null\")\n\t\t\treturn\n\t\t}\n\t\tif errC := reader.Close(); errC != nil {\n\t\t\td.logger.Errorf(errC.Error())\n\t\t}\n\t}()\n\n\tif err != nil {\n\t\treturn err\n\t}\n\tif _, err := io.Copy(os.Stdout, reader); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}\n\n// writeLogs writes container logs to a file\nfunc (d *docker) writeLogs(ctx context.Context, r *core.RunnerOptions) error {\n\treader, err := d.client.ContainerLogs(ctx,\n\t\tr.ContainerID,\n\t\ttypes.ContainerLogsOptions{\n\t\t\tShowStdout: true,\n\t\t\tShowStderr: true,\n\t\t\tFollow:     true,\n\t\t})\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer reader.Close()\n\n\tbuildLogsPath := fmt.Sprintf(\"%s/%s\", global.ExecutionLogsPath, r.Label[synapse.BuildID])\n\n\tif errDir := utils.CreateDirectory(buildLogsPath); err != nil {\n\t\treturn errDir\n\t}\n\n\tf, err := os.Create(fmt.Sprintf(\"%s/%s-%s.log\", buildLogsPath, r.ContainerName, r.PodType))\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer f.Close()\n\n\tif _, errCopy := stdcopy.StdCopy(f, f, reader); err != nil {\n\t\treturn errCopy\n\t}\n\n\treturn nil\n}\n\nfunc (d *docker) FindVolumes(volumeName string) (bool, error) {\n\tvolumeFilter := filters.KeyValuePair{Key: \"name\", Value: volumeName}\n\tvolumes, err := d.client.VolumeList(context.Background(), filters.NewArgs(volumeFilter))\n\tif err != nil {\n\t\treturn false, err\n\t}\n\tfor _, v := range volumes.Volumes {\n\t\tif v.Name == volumeName {\n\t\t\treturn true, nil\n\t\t}\n\t}\n\treturn false, nil\n}\n\nfunc (d *docker) RemoveVolume(ctx context.Context, volumeName string) error {\n\tif err := d.client.VolumeRemove(ctx, volumeName, true); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}\n\nfunc (d *docker) RemoveOldVolumes(ctx context.Context) {\n\tvolumes, err := d.client.VolumeList(context.Background(), filters.NewArgs())\n\tif err != nil {\n\t\td.logger.Errorf(\"error fetching volume lists: %v\", err.Error())\n\t}\n\tfor _, v := range volumes.Volumes {\n\t\tif strings.HasPrefix(v.Name, volumePrefix) {\n\t\t\t_, data, err := d.client.VolumeInspectWithRaw(context.Background(), v.Name)\n\t\t\tif err == nil {\n\t\t\t\tvar volumeDetails core.VolumeDetails\n\t\t\t\terr = json.Unmarshal(data, &volumeDetails)\n\t\t\t\tif err != nil {\n\t\t\t\t\td.logger.Errorf(\"error in unmarshaling volume details: %v\", err.Error())\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\n\t\t\t\tnow := time.Now()\n\t\t\t\tdiff := now.Sub(volumeDetails.CreatedAt)\n\t\t\t\tif diff > buildCacheExpiry {\n\t\t\t\t\td.logger.Debugf(\"Deleting volume: %s\", v.Name)\n\t\t\t\t\tif err = d.RemoveVolume(ctx, v.Name); err != nil {\n\t\t\t\t\t\td.logger.Errorf(\"Error deleting volume: %v\", err.Error())\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\td.logger.Errorf(\"error in fetching volume details: %v\", err.Error())\n\t\t\t}\n\t\t}\n\t}\n}\n"
  },
  {
    "path": "pkg/runner/docker/docker_test.go",
    "content": "package docker\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\t\"os\"\n\t\"strconv\"\n\t\"testing\"\n\n\t\"github.com/LambdaTest/test-at-scale/config\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/global\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/synapse\"\n\t\"github.com/google/uuid\"\n\t\"github.com/stretchr/testify/assert\"\n)\n\nfunc getRunnerOptions() *core.RunnerOptions {\n\tos.Setenv(global.AutoRemoveEnv, strconv.FormatBool(true))\n\n\tcontainerName := fmt.Sprintf(\"test-container-%s\", uuid.NewString())\n\tr := core.RunnerOptions{\n\t\tContainerName:  containerName,\n\t\tContainerArgs:  []string{\"sleep\", \"10\"},\n\t\tDockerImage:    \"alpine:latest\",\n\t\tHostVolumePath: \"/tmp\",\n\t\tPodType:        core.NucleusPod,\n\t\tLabel:          map[string]string{synapse.BuildID: containerName},\n\t}\n\treturn &r\n}\n\nfunc TestDockerCreate(t *testing.T) {\n\tctx := context.Background()\n\trunnerOpts := getRunnerOptions()\n\t// test create container\n\tstatusCreate := runner.Create(ctx, runnerOpts)\n\tif !statusCreate.Done {\n\t\tt.Errorf(\"error creating container: %v\", statusCreate.Error)\n\t}\n}\n\nfunc TestDockerRun(t *testing.T) {\n\tctx := context.Background()\n\trunnerOpts := getRunnerOptions()\n\t// test create container\n\tstatusCreate := runner.Create(ctx, runnerOpts)\n\tif !statusCreate.Done {\n\t\tt.Errorf(\"error creating container: %v\", statusCreate.Error)\n\t}\n\tif status := runner.Run(ctx, runnerOpts); !status.Done {\n\t\tt.Errorf(\"error in running container : %v\", status.Error)\n\t\treturn\n\t}\n}\n\nfunc TestDockerWaitCompletion(t *testing.T) {\n\tctx := context.Background()\n\trunnerOpts := getRunnerOptions()\n\t// test create container\n\tstatusCreate := runner.Create(ctx, runnerOpts)\n\tif !statusCreate.Done {\n\t\tt.Errorf(\"error creating container: %v\", statusCreate.Error)\n\t}\n\tif status := runner.Run(ctx, runnerOpts); !status.Done {\n\t\tt.Errorf(\"error in running container : %v\", status.Error)\n\t\treturn\n\t}\n\tif err := runner.WaitForCompletion(ctx, runnerOpts); err != nil {\n\t\tt.Errorf(\"Error while waiting for completion of container\")\n\t}\n}\n\nfunc TestDockerDestroyWithoutRunning(t *testing.T) {\n\tctx := context.Background()\n\tos.Setenv(global.AutoRemoveEnv, strconv.FormatBool(false))\n\trunnerOpts := getRunnerOptions()\n\t// test create container\n\tstatusCreate := runner.Create(ctx, runnerOpts)\n\tif !statusCreate.Done {\n\t\tt.Errorf(\"error creating container: %v\", statusCreate.Error)\n\t}\n\tif err := runner.Destroy(ctx, runnerOpts); err != nil {\n\t\tt.Errorf(\"error destroying container: %v\", err)\n\t}\n}\n\nfunc TestDockerDestroyWithRunningWoAutoRemove(t *testing.T) {\n\tctx := context.Background()\n\trunnerOpts := getRunnerOptions()\n\t// test create container\n\tos.Setenv(global.AutoRemoveEnv, strconv.FormatBool(false))\n\tstatusCreate := runner.Create(ctx, runnerOpts)\n\tif !statusCreate.Done {\n\t\tt.Errorf(\"error creating container: %v\", statusCreate.Error)\n\t}\n\tif status := runner.Run(ctx, runnerOpts); !status.Done {\n\t\tt.Errorf(\"error in running container : %v\", status.Error)\n\t\treturn\n\t}\n\tif err := runner.Destroy(ctx, runnerOpts); err != nil {\n\t\tt.Errorf(\"error destroying container: %v\", err)\n\t}\n}\n\nfunc TestDockerDestroyWithRunningWithAutoRemove(t *testing.T) {\n\tctx := context.Background()\n\trunnerOpts := getRunnerOptions()\n\t// test create container\n\tos.Setenv(global.AutoRemoveEnv, strconv.FormatBool(true))\n\tstatusCreate := runner.Create(ctx, runnerOpts)\n\tif !statusCreate.Done {\n\t\tt.Errorf(\"error creating container: %v\", statusCreate.Error)\n\t}\n\tif status := runner.Run(ctx, runnerOpts); !status.Done {\n\t\tt.Errorf(\"error in running container : %v\", status.Error)\n\t\treturn\n\t}\n\tif err := runner.Destroy(ctx, runnerOpts); err != nil {\n\t\tt.Errorf(\"error destroying container: %v\", err)\n\t}\n}\n\nfunc TestDockerPullAlways(t *testing.T) {\n\trunnerOpts := getRunnerOptions()\n\t// test create container\n\trunnerOpts.PodType = core.NucleusPod\n\tif err := runner.PullImage(&core.ContainerImageConfig{\n\t\tMode:       config.PublicMode,\n\t\tPullPolicy: config.PullAlways,\n\t\tImage:      runnerOpts.DockerImage,\n\t}, runnerOpts); err != nil {\n\t\tt.Errorf(\"Error while pulling image %v\", err)\n\t}\n}\n\nfunc TestDockerPullNever(t *testing.T) {\n\trunnerOpts := getRunnerOptions()\n\t// test create container\n\trunnerOpts.PodType = core.NucleusPod\n\tif err := runner.PullImage(&core.ContainerImageConfig{\n\t\tMode:       config.PublicMode,\n\t\tPullPolicy: config.PullNever,\n\t\tImage:      \"dummy-image\",\n\t}, runnerOpts); err != nil {\n\t\tt.Errorf(\"Error while pulling image %v\", err)\n\t}\n}\n\nfunc TestDockerVolumes(t *testing.T) {\n\tctx := context.Background()\n\trunnerOpts := getRunnerOptions()\n\n\tos.Setenv(global.AutoRemoveEnv, strconv.FormatBool(true))\n\tstatusCreate := runner.Create(ctx, runnerOpts)\n\tif !statusCreate.Done {\n\t\tt.Errorf(\"error creating container: %v\", statusCreate.Error)\n\t}\n\n\tcorrectVolumeName := fmt.Sprintf(\"%s-%s\", volumePrefix, runnerOpts.Label[synapse.BuildID])\n\tincorrectVolumeName := fmt.Sprintf(\"incorrect-%s-%s\", volumePrefix, runnerOpts.Label[synapse.BuildID])\n\n\texists, err := runner.FindVolumes(incorrectVolumeName)\n\tif err != nil {\n\t\tt.Errorf(\"error finding docker volume: %v\", err)\n\t}\n\tassert.Equal(t, false, exists)\n\n\texists, err = runner.FindVolumes(correctVolumeName)\n\tif err != nil {\n\t\tt.Errorf(\"error finding docker volume: %v\", err)\n\t}\n\tassert.Equal(t, true, exists)\n\n\tif status := runner.Run(ctx, runnerOpts); !status.Done {\n\t\tt.Errorf(\"error in running container : %v\", status.Error)\n\t\treturn\n\t}\n\n\texpectedFileContent := `{\"access_token\":\"dummytoken\",\"expiry\":\"0001-01-01T00:00:00Z\",\"refresh_token\":\"\",\"token_type\":\"Bearer\"}`\n\tsecretBytes, err := secretsManager.GetGitSecretBytes()\n\tif err != nil {\n\t\tt.Errorf(\"error retrieving secrets: %v\", err)\n\t}\n\tassert.Equal(t, expectedFileContent, string(secretBytes))\n\n\tif err = runner.Destroy(ctx, runnerOpts); err != nil {\n\t\tt.Errorf(\"error destroying container: %v\", err)\n\t}\n}\n"
  },
  {
    "path": "pkg/runner/docker/setup_test.go",
    "content": "package docker\n\nimport (\n\t\"context\"\n\t\"os\"\n\t\"testing\"\n\n\t\"github.com/LambdaTest/test-at-scale/config\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/global\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/secrets\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/tests\"\n\t\"github.com/docker/docker/api/types\"\n\t\"github.com/docker/docker/api/types/filters\"\n\t\"github.com/docker/docker/client\"\n)\n\nvar cfg *config.SynapseConfig\nvar secretsManager core.SecretsManager\nvar runner core.DockerRunner\n\nfunc createNetworkIfNotExists(dockerClient *client.Client, networkName string) error {\n\topts := types.NetworkListOptions{\n\t\tFilters: filters.NewArgs(filters.Arg(\"name\", networkName)),\n\t}\n\tnetworkList, err := dockerClient.NetworkList(context.TODO(), opts)\n\tif err != nil {\n\t\treturn err\n\t}\n\tfor idx := 0; idx < len(networkList); idx++ {\n\t\tif networkList[idx].Name == networkName {\n\t\t\treturn nil\n\t\t}\n\t}\n\tif _, err := dockerClient.NetworkCreate(context.TODO(), networkName, types.NetworkCreate{\n\t\tInternal: true,\n\t}); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}\n\nfunc deletNetworkIfExists(dockerClient *client.Client, networkName string) error {\n\tctx := context.TODO()\n\topts := types.NetworkListOptions{\n\t\tFilters: filters.NewArgs(filters.Arg(\"name\", networkName)),\n\t}\n\tnetworkList, err := dockerClient.NetworkList(ctx, opts)\n\tif err != nil {\n\t\treturn err\n\t}\n\tfor idx := 0; idx < len(networkList); idx++ {\n\t\tif networkList[idx].Name == networkName {\n\t\t\treturn dockerClient.NetworkRemove(ctx, networkName)\n\t\t}\n\t}\n\treturn nil\n}\n\nfunc TestMain(m *testing.M) {\n\tnetworkName := \"dummy-network\"\n\tos.Setenv(global.NetworkEnvName, networkName)\n\tcfg = tests.MockConfig()\n\n\tlogger, err := lumber.NewLogger(cfg.LogConfig, cfg.Verbose, lumber.InstanceZapLogger)\n\t// TODO: check proper way to collect error\n\tif err != nil {\n\t\tos.Exit(1)\n\t}\n\tcl, err := client.NewClientWithOpts(client.FromEnv)\n\tif err != nil {\n\t\tos.Exit(1)\n\t}\n\n\tif errC := createNetworkIfNotExists(cl, networkName); errC != nil {\n\t\tlogger.Errorf(\"Error in creating network %s\", networkName)\n\t\tos.Exit(1)\n\t}\n\tsecretsManager = secrets.New(cfg, logger)\n\trunner, err = New(secretsManager, logger, cfg)\n\tif err != nil {\n\t\tlogger.Errorf(\"error in configuring docker client\")\n\t\tos.Exit(1)\n\t}\n\texitCode := m.Run()\n\tif err := deletNetworkIfExists(cl, networkName); err != nil {\n\t\tlogger.Errorf(\"Error in deleting network %s\", networkName)\n\t\tos.Exit(1)\n\t}\n\tos.Exit(exitCode)\n}\n"
  },
  {
    "path": "pkg/secret/secret.go",
    "content": "package secret\n\nimport (\n\t\"encoding/json\"\n\t\"io/ioutil\"\n\t\"os\"\n\t\"regexp\"\n\t\"strings\"\n\t\"time\"\n\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/errs\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/global\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n)\n\ntype secretParser struct {\n\tlogger      lumber.Logger\n\tsecretRegex *regexp.Regexp\n}\n\n// New return new secret parser\nfunc New(logger lumber.Logger) core.SecretParser {\n\treturn &secretParser{\n\t\tlogger:      logger,\n\t\tsecretRegex: regexp.MustCompile(global.SecretRegex),\n\t}\n}\n\n// GetRepoSecret read repo secrets from given path\nfunc (s *secretParser) GetRepoSecret(path string) (map[string]string, error) {\n\tvar secretData map[string]string\n\tif _, err := os.Lstat(path); os.IsNotExist(err) {\n\t\ts.logger.Debugf(\"failed to find user env secrets in path %s, as path does not exists\", path)\n\t\treturn nil, nil\n\t}\n\tbody, err := ioutil.ReadFile(path)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tif err = json.Unmarshal(body, &secretData); err != nil {\n\t\ts.logger.Errorf(\"failed to unmarshal user env secrets, error %v\", err)\n\t\treturn nil, errs.ErrUnMarshalJSON\n\t}\n\n\t// extract secretmap from data map[data: map[secretname:secretvalue]]\n\treturn secretData, nil\n}\n\n// GetOauthSecret parses the oauth secret\nfunc (s *secretParser) GetOauthSecret(path string) (*core.Oauth, error) {\n\to := &core.Oauth{\n\t\tType: core.Bearer,\n\t}\n\tif _, err := os.Lstat(path); os.IsNotExist(err) {\n\t\ts.logger.Errorf(\"failed to find oauth secret in path %s\", path)\n\t\treturn nil, err\n\t}\n\tbody, err := ioutil.ReadFile(path)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tif err = json.Unmarshal(body, o); err != nil {\n\t\ts.logger.Errorf(\"failed to unmarshal oauth secret, error %v\", err)\n\t\treturn nil, errs.ErrUnMarshalJSON\n\t}\n\tif o.AccessToken == \"\" {\n\t\treturn nil, errs.ErrMissingAccessToken\n\t}\n\t// If tokentype is not basic set it to bearer\n\tif o.Type != core.Basic {\n\t\to.Type = core.Bearer\n\t}\n\n\treturn o, err\n}\n\n// SubstituteSecret replace secret placeholders with their respective values\nfunc (s *secretParser) SubstituteSecret(command string, secretData map[string]string) (string, error) {\n\tmatches := s.secretRegex.FindAllStringSubmatch(command, -1)\n\tif matches == nil {\n\t\treturn command, nil\n\t}\n\tresult := command\n\tfor _, match := range matches {\n\t\tif len(match) < 2 {\n\t\t\treturn \"\", errs.ErrSecretRegexMatch\n\t\t}\n\t\t// validating secret key exists or not\n\t\tif _, ok := secretData[match[1]]; !ok {\n\t\t\ts.logger.Warnf(\"secret with name %s not found in map\", match[0])\n\t\t\tcontinue\n\t\t}\n\t\tresult = strings.ReplaceAll(result, match[0], secretData[match[1]])\n\t}\n\n\treturn result, nil\n}\n\nfunc (s *secretParser) Expired(token *core.Oauth) bool {\n\tif token.RefreshToken == \"\" {\n\t\treturn false\n\t}\n\tif token.Expiry.IsZero() && token.AccessToken != \"\" {\n\t\treturn false\n\t}\n\treturn token.Expiry.Add(-global.ExpiryDelta).\n\t\tBefore(time.Now())\n}\n"
  },
  {
    "path": "pkg/secret/secret_test.go",
    "content": "package secret\n\nimport (\n\t\"errors\"\n\t\"log\"\n\t\"os\"\n\t\"reflect\"\n\t\"regexp\"\n\t\"testing\"\n\t\"time\"\n\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/errs\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/global\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n)\n\nfunc TestGetRepoSecret(t *testing.T) {\n\tlogger, err := lumber.NewLogger(lumber.LoggingConfig{EnableConsole: true}, true, lumber.InstanceZapLogger)\n\tif err != nil {\n\t\tlog.Fatalf(\"could not instantiate logger %s\", err.Error())\n\t}\n\tsecretParser := New(logger)\n\n\ttests := []struct {\n\t\tname      string\n\t\tpath      string\n\t\twant      map[string]string\n\t\terrorType error\n\t}{\n\t\t{\"Test for correct file\", \"../../testutils/testdata/secretTestData/secretfile.json\", map[string]string{\"abc\": \"val\", \"xyz\": \"val2\"}, nil},\n\t\t{\"Test for invalid file\", \"../../testutils/testdata/secretTestData/invalidsecretfile.json\", map[string]string{}, errs.ErrUnMarshalJSON},\n\t\t{\"Test for incorrect path\", \"\", nil, os.ErrNotExist},\n\t}\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\tgot, err := secretParser.GetRepoSecret(tt.path)\n\t\t\tif err != nil {\n\t\t\t\tif !errors.Is(err, tt.errorType) {\n\t\t\t\t\tt.Error(err)\n\t\t\t\t}\n\t\t\t\treturn\n\t\t\t}\n\t\t\tif !reflect.DeepEqual(got, tt.want) {\n\t\t\t\tt.Errorf(\"expected: %v, got: %v\", tt.want, got)\n\t\t\t\treturn\n\t\t\t}\n\t\t})\n\t}\n}\n\nfunc TestGetOauthSecret(t *testing.T) {\n\tlogger, err := lumber.NewLogger(lumber.LoggingConfig{EnableConsole: true}, true, lumber.InstanceZapLogger)\n\tif err != nil {\n\t\tlog.Fatalf(\"could not instantiate logger %s\", err.Error())\n\t}\n\tsecretParser := New(logger)\n\n\toauthToken := core.Oauth{AccessToken: \"token\", Expiry: time.Unix(1645527121, 0), RefreshToken: \"refresh\", Type: core.Bearer}\n\n\ttests := []struct {\n\t\tname      string\n\t\tpath      string\n\t\twant      *core.Oauth\n\t\terrorType error\n\t}{\n\t\t{\"Test for correct file\", \"../../testutils/testdata/secretTestData/secretOauthFile.json\", &oauthToken, nil},\n\t\t{\"Test for invalid file\", \"../../testutils/testdata/secretTestData/invalidsecretfile.json\", nil, errs.ErrMissingAccessToken},\n\t\t{\"Test for incorrect path\", \"\", nil, os.ErrNotExist},\n\t}\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\tgot, err := secretParser.GetOauthSecret(tt.path)\n\t\t\tif err != nil {\n\t\t\t\tif !errors.Is(err, tt.errorType) {\n\t\t\t\t\tt.Error(err)\n\t\t\t\t}\n\t\t\t\treturn\n\t\t\t}\n\t\t\tif got, want := got.AccessToken, tt.want.AccessToken; got != want {\n\t\t\t\tt.Errorf(\"Want access_token %s, got %s\", want, got)\n\t\t\t}\n\t\t\tif got, want := got.Type, tt.want.Type; got != want {\n\t\t\t\tt.Errorf(\"Want type %s, got %s\", want, got)\n\t\t\t}\n\t\t\tif got, want := got.Expiry.Unix(), tt.want.Expiry.Unix(); got != want {\n\t\t\t\tt.Errorf(\"Want expiry %d, got %d\", want, got)\n\t\t\t}\n\t\t})\n\t}\n}\n\nfunc TestSubstituteSecret(t *testing.T) {\n\tlogger, err := lumber.NewLogger(lumber.LoggingConfig{EnableConsole: true}, true, lumber.InstanceZapLogger)\n\tif err != nil {\n\t\tlog.Fatalf(\"Could not instantiate logger %s\", err.Error())\n\t}\n\n\tsecretParser := New(logger)\n\tvar expressions = []struct {\n\t\tparams    map[string]string\n\t\tinput     string\n\t\toutput    string\n\t\terrorType error\n\t}{\n\t\t// basic\n\t\t{\n\t\t\tparams:    map[string]string{\"token\": \"secret\"},\n\t\t\tinput:     \"${{ secrets.token }}\",\n\t\t\toutput:    \"secret\",\n\t\t\terrorType: nil,\n\t\t},\n\t\t// multiple\n\t\t{\n\t\t\tparams:    map[string]string{\"NPM_TOKEN\": \"secret\", \"TAG\": \"nucleus\"},\n\t\t\tinput:     \"docker build --build-arg NPM_TOKEN=${{ secrets.NPM_TOKEN }} --tag=${{ secrets.TAG }}\",\n\t\t\toutput:    \"docker build --build-arg NPM_TOKEN=secret --tag=nucleus\",\n\t\t\terrorType: nil,\n\t\t},\n\t\t// no match\n\t\t{\n\t\t\tparams:    map[string]string{\"clone_token\": \"secret\"},\n\t\t\tinput:     \"${{ secrets.token }}\",\n\t\t\toutput:    \"${{ secrets.token }}\",\n\t\t\terrorType: nil,\n\t\t},\n\t}\n\n\tfor _, expr := range expressions {\n\t\tt.Run(expr.input, func(t *testing.T) {\n\t\t\tt.Logf(expr.input)\n\t\t\toutput, err := secretParser.SubstituteSecret(expr.input, expr.params)\n\t\t\tif err != nil {\n\t\t\t\tif expr.errorType != nil {\n\t\t\t\t\tif err.Error() != expr.errorType.Error() {\n\t\t\t\t\t\tt.Errorf(\"Want error %q expanded but got error %q\", expr.errorType, err)\n\t\t\t\t\t\treturn\n\t\t\t\t\t}\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t\tt.Errorf(\"Want %q expanded but got error %q\", expr.input, err)\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\tif output != expr.output {\n\t\t\t\tt.Errorf(\"Want %q expanded to %q, got %q\",\n\t\t\t\t\texpr.input,\n\t\t\t\t\texpr.output,\n\t\t\t\t\toutput)\n\t\t\t}\n\t\t})\n\t}\n}\n\n//nolint:funlen\nfunc TestExpired(t *testing.T) {\n\tlogger, err := lumber.NewLogger(lumber.LoggingConfig{EnableConsole: true}, true, lumber.InstanceZapLogger)\n\tif err != nil {\n\t\tlog.Fatalf(\"Could not instantiate logger %s\", err.Error())\n\t}\n\n\ttype fields struct {\n\t\tlogger      lumber.Logger\n\t\tsecretRegex *regexp.Regexp\n\t}\n\ttype args struct {\n\t\ttoken *core.Oauth\n\t}\n\ttests := []struct {\n\t\tname   string\n\t\tfields fields\n\t\targs   args\n\t\twant   bool\n\t}{\n\t\t{\n\t\t\tname: \"Missing Refresh Token\",\n\t\t\tfields: fields{\n\t\t\t\tlogger:      logger,\n\t\t\t\tsecretRegex: regexp.MustCompile(global.SecretRegex),\n\t\t\t},\n\t\t\targs: args{\n\t\t\t\ttoken: &core.Oauth{\n\t\t\t\t\tAccessToken:  \"54321\",\n\t\t\t\t\tRefreshToken: \"\",\n\t\t\t\t\tExpiry:       time.Now().Add(-time.Hour)},\n\t\t\t},\n\t\t\twant: false,\n\t\t},\n\t\t{\n\t\t\tname: \"Missing Access Token\",\n\t\t\tfields: fields{\n\t\t\t\tlogger:      logger,\n\t\t\t\tsecretRegex: regexp.MustCompile(global.SecretRegex),\n\t\t\t},\n\t\t\targs: args{\n\t\t\t\ttoken: &core.Oauth{\n\t\t\t\t\tAccessToken:  \"\",\n\t\t\t\t\tRefreshToken: \"54321\"},\n\t\t\t},\n\t\t\twant: true,\n\t\t},\n\t\t{\n\t\t\tname: \"Missing Time\",\n\t\t\tfields: fields{\n\t\t\t\tlogger:      logger,\n\t\t\t\tsecretRegex: regexp.MustCompile(global.SecretRegex),\n\t\t\t},\n\t\t\targs: args{\n\t\t\t\ttoken: &core.Oauth{\n\t\t\t\t\tAccessToken:  \"12345\",\n\t\t\t\t\tRefreshToken: \"54321\"},\n\t\t\t},\n\t\t\twant: false,\n\t\t},\n\t\t{\n\t\t\tname: \"Token Valid\",\n\t\t\tfields: fields{\n\t\t\t\tlogger:      logger,\n\t\t\t\tsecretRegex: regexp.MustCompile(global.SecretRegex),\n\t\t\t},\n\t\t\targs: args{\n\t\t\t\ttoken: &core.Oauth{\n\t\t\t\t\tAccessToken:  \"12345\",\n\t\t\t\t\tRefreshToken: \"54321\",\n\t\t\t\t\tExpiry:       time.Now().Add(time.Hour)},\n\t\t\t},\n\t\t\twant: false,\n\t\t},\n\t\t{\n\t\t\tname: \"Token Expire\",\n\t\t\tfields: fields{\n\t\t\t\tlogger:      logger,\n\t\t\t\tsecretRegex: regexp.MustCompile(global.SecretRegex),\n\t\t\t},\n\t\t\targs: args{\n\t\t\t\ttoken: &core.Oauth{\n\t\t\t\t\tAccessToken:  \"12345\",\n\t\t\t\t\tRefreshToken: \"54321\",\n\t\t\t\t\tExpiry:       time.Now().Add(-time.Second)},\n\t\t\t},\n\t\t\twant: true,\n\t\t},\n\t\t{\n\t\t\tname: \"Token not Expiredn but in expiry buffer\",\n\t\t\tfields: fields{\n\t\t\t\tlogger:      logger,\n\t\t\t\tsecretRegex: regexp.MustCompile(global.SecretRegex),\n\t\t\t},\n\t\t\targs: args{\n\t\t\t\ttoken: &core.Oauth{\n\t\t\t\t\tAccessToken:  \"12345\",\n\t\t\t\t\tRefreshToken: \"54321\",\n\t\t\t\t\tExpiry:       time.Now().Add(time.Second * 600)},\n\t\t\t},\n\t\t\twant: true,\n\t\t},\n\t}\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\ts := &secretParser{\n\t\t\t\tlogger:      tt.fields.logger,\n\t\t\t\tsecretRegex: tt.fields.secretRegex,\n\t\t\t}\n\t\t\tif got := s.Expired(tt.args.token); got != tt.want {\n\t\t\t\tt.Errorf(\"secretParser.Expired() = %v, want %v\", got, tt.want)\n\t\t\t}\n\t\t})\n\t}\n}\n"
  },
  {
    "path": "pkg/secrets/secrets.go",
    "content": "package secrets\n\nimport (\n\t\"encoding/base64\"\n\t\"encoding/json\"\n\t\"errors\"\n\n\t\"github.com/LambdaTest/test-at-scale/config\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\terrs \"github.com/LambdaTest/test-at-scale/pkg/errs\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n)\n\ntype secertManager struct {\n\tlogger lumber.Logger\n\tcfg    *config.SynapseConfig\n}\n\n// New returns new secretManager\nfunc New(cfg *config.SynapseConfig, logger lumber.Logger) core.SecretsManager {\n\treturn &secertManager{\n\t\tlogger: logger,\n\t\tcfg:    cfg,\n\t}\n}\n\nfunc (s *secertManager) GetLambdatestSecrets() *config.LambdatestConfig {\n\treturn &s.cfg.Lambdatest\n}\n\n// GetSynapseName returns the name of synapse if mentioned in config\nfunc (s *secertManager) GetSynapseName() string {\n\treturn s.cfg.Name\n}\n\nfunc (s *secertManager) GetGitSecretBytes() ([]byte, error) {\n\tgitSecrets := core.Secret{\n\t\t\"access_token\":  s.cfg.Git.Token,\n\t\t\"expiry\":        \"0001-01-01T00:00:00Z\",\n\t\t\"refresh_token\": \"\",\n\t\t\"token_type\":    s.cfg.Git.TokenType,\n\t}\n\tgitSecretsJSON, err := json.Marshal(gitSecrets)\n\tif err != nil {\n\t\treturn []byte{}, errs.ERR_JSON_MAR(err.Error())\n\t}\n\treturn gitSecretsJSON, nil\n}\n\nfunc (s *secertManager) GetRepoSecretBytes(repo string) ([]byte, error) {\n\tval, ok := s.cfg.RepoSecrets[repo]\n\tif !ok {\n\t\treturn []byte{}, errors.New(\"no secrets found in configuration file\")\n\t}\n\n\trepoSecretsJSON, err := json.Marshal(val)\n\tif err != nil {\n\t\treturn []byte{}, errs.ERR_JSON_MAR(err.Error())\n\t}\n\treturn repoSecretsJSON, nil\n}\n\nfunc (s *secertManager) GetDockerSecrets(r *core.RunnerOptions) (core.ContainerImageConfig, error) {\n\tcontainerImageConfig := core.ContainerImageConfig{}\n\tcontainerImageConfig.Mode = s.cfg.ContainerRegistry.Mode\n\tcontainerImageConfig.Image = r.DockerImage\n\tcontainerImageConfig.PullPolicy = s.cfg.ContainerRegistry.PullPolicy\n\t/*\n\t\tIn parsing mode use default public container\n\t*/\n\tif r.PodType != core.NucleusPod {\n\t\treturn containerImageConfig, nil\n\t}\n\t/*\n\t\t\t1. if mode is public then no need to build AuthRegistry\n\t\t \t2. PullPolicy is set to never, then we assume docker image is being pulled manually by user\n\t*/\n\tif s.cfg.ContainerRegistry.Mode == config.PublicMode || s.cfg.ContainerRegistry.PullPolicy == config.PullNever {\n\t\treturn containerImageConfig, nil\n\t}\n\t// for private repo check whether creds are empty\n\tif s.cfg.ContainerRegistry.Username == \"\" || s.cfg.ContainerRegistry.Password == \"\" {\n\t\treturn containerImageConfig, errs.CR_AUTH_NF\n\t}\n\tjsonBytes, _ := json.Marshal(map[string]string{\n\t\t\"username\": s.cfg.ContainerRegistry.Username,\n\t\t\"password\": s.cfg.ContainerRegistry.Password,\n\t})\n\tcontainerImageConfig.AuthRegistry = base64.StdEncoding.EncodeToString(jsonBytes)\n\treturn containerImageConfig, nil\n}\n\nfunc (s *secertManager) GetOauthToken() *core.Oauth {\n\treturn &core.Oauth{\n\t\tAccessToken: s.cfg.Git.Token,\n\t\tType:        core.TokenType(s.cfg.Git.TokenType),\n\t}\n}\n"
  },
  {
    "path": "pkg/secrets/secrets_test.go",
    "content": "package secrets\n\nimport (\n\t\"fmt\"\n\t\"os\"\n\t\"testing\"\n\n\t\"github.com/stretchr/testify/assert\"\n)\n\nfunc removeCreatedPath(path string) {\n\terr := os.RemoveAll(path)\n\tif err != nil {\n\t\tfmt.Println(\"error in removing!!\")\n\t}\n}\n\nfunc TestGetLambdatestSecrets(t *testing.T) {\n\tlambdatestSecrets := secretsManager.GetLambdatestSecrets()\n\tassert.Equal(t, \"dummysecretkey\", lambdatestSecrets.SecretKey)\n}\n"
  },
  {
    "path": "pkg/secrets/setup_test.go",
    "content": "package secrets\n\nimport (\n\t\"os\"\n\t\"testing\"\n\n\t\"github.com/LambdaTest/test-at-scale/config\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/tests\"\n)\n\nvar cfg *config.SynapseConfig\nvar secretsManager core.SecretsManager\n\nconst testdDataDir = \"./testdata\"\n\nfunc TestMain(m *testing.M) {\n\tcfg = tests.MockConfig()\n\tlogger, err := lumber.NewLogger(cfg.LogConfig, cfg.Verbose, lumber.InstanceZapLogger)\n\t// TODO: check proper way to collect error\n\tif err != nil {\n\t\treturn\n\t}\n\n\tsecretsManager = New(cfg, logger)\n\tos.Exit(m.Run())\n}\n"
  },
  {
    "path": "pkg/server/setup.go",
    "content": "package server\n\nimport (\n\t\"context\"\n\t\"net/http\"\n\n\t\"github.com/LambdaTest/test-at-scale/config\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/api\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n\t\"github.com/gin-gonic/gin\"\n)\n\n// ListenAndServe initializes a server to respond to HTTP network requests.\nfunc ListenAndServe(ctx context.Context, router api.Router, config *config.NucleusConfig, logger lumber.Logger) error {\n\n\t// set gin to release mode\n\tgin.SetMode(gin.ReleaseMode)\n\n\tlogger.Infof(\"Setting up http handler\")\n\n\terrChan := make(chan error)\n\n\t// HTTP server instance\n\tsrv := &http.Server{\n\t\tAddr:    \":\" + config.Port,\n\t\tHandler: router.Handler(),\n\t}\n\n\t// channel to signal server process exit\n\tdone := make(chan struct{})\n\tgo func() {\n\t\tlogger.Infof(\"Starting server on port %s\", config.Port)\n\t\t// service connections\n\t\tif err := srv.ListenAndServe(); err != nil && err != http.ErrServerClosed {\n\t\t\tlogger.Errorf(\"listen: %#v\", err)\n\t\t\terrChan <- err\n\t\t}\n\t}()\n\n\tselect {\n\tcase <-ctx.Done():\n\t\tlogger.Infof(\"Caller has requested graceful shutdown. shutting down the server\")\n\t\tif err := srv.Shutdown(ctx); err != nil {\n\t\t\tlogger.Errorf(\"Server Shutdown:\", \"error\", err)\n\t\t}\n\t\treturn nil\n\tcase err := <-errChan:\n\t\treturn err\n\tcase <-done:\n\t\treturn nil\n\t}\n\n}\n"
  },
  {
    "path": "pkg/service/coverage/coverage.go",
    "content": "package coverage\n\nimport (\n\t\"bytes\"\n\t\"context\"\n\t\"encoding/json\"\n\t\"errors\"\n\t\"fmt\"\n\t\"io\"\n\t\"io/fs\"\n\t\"io/ioutil\"\n\t\"net/http\"\n\t\"net/url\"\n\t\"os\"\n\t\"path\"\n\t\"path/filepath\"\n\t\"strings\"\n\n\t\"github.com/LambdaTest/test-at-scale/config\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/global\"\n\t\"golang.org/x/sync/errgroup\"\n\n\t\"github.com/LambdaTest/test-at-scale/pkg/fileutils\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n)\n\nconst (\n\tcoverageJSONFileName = \"coverage-final.json\"\n\tmergedcoverageJSON   = \"coverage-merged.json\"\n\tcompressedFileName   = \"coverage-files.tzst\"\n\tmanifestJSONFileName = \"manifest.json\"\n\tcoverageFilePath     = \"/scripts/mapCoverage.js\"\n)\n\ntype codeCoverageService struct {\n\tlogger               lumber.Logger\n\texecManager          core.ExecutionManager\n\tcodeCoveragParentDir string\n\tazureClient          core.AzureClient\n\tzstd                 core.ZstdCompressor\n\thttpClient           http.Client\n\tendpoint             string\n}\n\n// New returns a new instance of CoverageService\nfunc New(execManager core.ExecutionManager,\n\tazureClient core.AzureClient,\n\tzstd core.ZstdCompressor,\n\tcfg *config.NucleusConfig,\n\tlogger lumber.Logger) (core.CoverageService, error) {\n\t// if coverage mode not enabled do not initialize the service\n\tif !cfg.CoverageMode {\n\t\treturn nil, nil\n\t}\n\tif _, err := os.Lstat(global.CodeCoverageDir); os.IsNotExist(err) {\n\t\treturn nil, errors.New(\"coverage directory not mounted\")\n\t}\n\treturn &codeCoverageService{\n\t\tlogger:               logger,\n\t\texecManager:          execManager,\n\t\tazureClient:          azureClient,\n\t\tzstd:                 zstd,\n\t\tcodeCoveragParentDir: global.CodeCoverageDir,\n\t\tendpoint:             global.NeuronHost + \"/coverage\",\n\t\thttpClient: http.Client{\n\t\t\tTimeout: global.DefaultAPITimeout,\n\t\t}}, nil\n\n}\n\n// mergeCodeCoverageFiles merge all the coverage.json into single entity\nfunc (c *codeCoverageService) mergeCodeCoverageFiles(ctx context.Context, commitDir, coverageManifestPath string, threshold bool) error {\n\tif _, err := os.Lstat(commitDir); os.IsNotExist(err) {\n\t\tc.logger.Errorf(\"coverage files not found, skipping merge\")\n\t\treturn nil\n\t}\n\n\tcoverageFiles := make([]string, 0)\n\tif err := filepath.WalkDir(commitDir, func(path string, d fs.DirEntry, err error) error {\n\t\t// add all individual coverage json files\n\t\tif d.Name() == coverageJSONFileName {\n\t\t\tcoverageFiles = append(coverageFiles, path)\n\t\t}\n\t\treturn nil\n\t}); err != nil {\n\t\treturn err\n\t}\n\n\tif len(coverageFiles) < 1 {\n\t\treturn errors.New(\"no coverage dirs found\")\n\t}\n\n\tcommand := fmt.Sprintf(\"/scripts/node_modules/.bin/babel-node %s --commitDir %s --coverageFiles '%s'\",\n\t\tcoverageFilePath, commitDir, strings.Join(coverageFiles, \" \"))\n\tif threshold {\n\t\tcommand = fmt.Sprintf(\"%s --coverageManifest %s\", command, coverageManifestPath)\n\t}\n\tcommands := []string{command}\n\treturn c.execManager.ExecuteInternalCommands(ctx, core.CoverageMerge, commands, \"\", nil, nil)\n}\n\n// MergeAndUpload compress the file and upload in azure blob\nfunc (c *codeCoverageService) MergeAndUpload(ctx context.Context, payload *core.Payload) error {\n\tvar parentCommitDir, repoDir string\n\tvar g errgroup.Group\n\t// change variable name\n\trepoDir = filepath.Join(c.codeCoveragParentDir, payload.OrgID, payload.RepoID)\n\trepoBlobPath := path.Join(payload.GitProvider, payload.OrgID, payload.RepoID)\n\n\t// skip downloading if parent commit does not exists for the repository\n\tif payload.ParentCommitCoverageExists {\n\t\tcoverage, err := c.getParentCommitCoverageDir(payload.RepoID, payload.BuildBaseCommit)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif err = c.downloadAndDecompressParentCommitDir(ctx, coverage, repoDir); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tparentCommitDir = filepath.Join(repoDir, coverage.ParentCommit)\n\t}\n\tcoveragePayload := make([]coverageData, 0, len(payload.Commits))\n\n\tfor _, commit := range payload.Commits {\n\t\tcommitDir := filepath.Join(repoDir, commit.Sha)\n\t\tc.logger.Debugf(\"commit directory %s\", commitDir)\n\n\t\tif _, err := os.Lstat(commitDir); os.IsNotExist(err) {\n\t\t\tc.logger.Errorf(\"code coverage directory not found commit id %s\", commit.Sha)\n\t\t\treturn err\n\t\t}\n\t\tcoverageManifestPath := filepath.Join(commitDir, manifestJSONFileName)\n\n\t\tmanifestPayload, err := c.parseManifestFile(coverageManifestPath)\n\t\tif err != nil {\n\t\t\tc.logger.Errorf(\"failed to parse manifest file: %s, error :%v\", commitDir, err)\n\t\t\treturn err\n\t\t}\n\t\t//skip copy of parent directory if all test files executed\n\t\tif !manifestPayload.AllFilesExecuted {\n\t\t\tif err := c.copyFromParentCommitDir(parentCommitDir, commitDir, manifestPayload.Removedfiles...); err != nil {\n\t\t\t\tc.logger.Errorf(\"failed to copy coverage files from %s to %s, error :%v\", parentCommitDir, commitDir, err)\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t\tthresholdEnabled := false\n\t\tif manifestPayload.CoverageThreshold != nil {\n\t\t\tthresholdEnabled = true\n\t\t}\n\t\tif err := c.mergeCodeCoverageFiles(ctx, commitDir, coverageManifestPath, thresholdEnabled); err != nil {\n\t\t\tc.logger.Errorf(\"failed to merge coverage files %v\", err)\n\t\t\treturn err\n\t\t}\n\t\tc.logger.Debugf(\"compressed file name %v\", compressedFileName)\n\n\t\tg.Go(func() error {\n\t\t\tif err := c.zstd.Compress(ctx, compressedFileName, false, repoDir, commit.Sha); err != nil {\n\t\t\t\tc.logger.Errorf(\"failed to compress coverage files %v\", err)\n\t\t\t\treturn err\n\t\t\t}\n\t\t\t_, err := c.uploadFile(ctx, repoBlobPath, compressedFileName, commit.Sha)\n\t\t\treturn err\n\t\t})\n\n\t\tvar blobURL string\n\t\tg.Go(func() error {\n\t\t\tblobURL, err = c.uploadFile(ctx, repoBlobPath, filepath.Join(commitDir, mergedcoverageJSON), commit.Sha)\n\t\t\treturn err\n\t\t})\n\n\t\tvar totalCoverage json.RawMessage\n\t\tg.Go(func() error {\n\t\t\ttotalCoverage, err = c.getTotalCoverage(filepath.Join(commitDir, mergedcoverageJSON))\n\t\t\treturn err\n\t\t})\n\t\tif err = g.Wait(); err != nil {\n\t\t\tc.logger.Errorf(\"failed to upload files to azure blob %v\", err)\n\t\t\treturn err\n\t\t}\n\t\tblobURL = strings.TrimSuffix(blobURL, fmt.Sprintf(\"/%s\", mergedcoverageJSON))\n\t\tcoveragePayload = append(coveragePayload, coverageData{BuildID: payload.BuildID, RepoID: payload.RepoID, CommitID: commit.Sha, BlobLink: blobURL, TotalCoverage: totalCoverage})\n\t\t//current commit dir becomes parent for next commit\n\t\tparentCommitDir = commitDir\n\t}\n\treturn c.sendCoverageData(coveragePayload)\n}\n\nfunc (c *codeCoverageService) uploadFile(ctx context.Context, blobPath, filename, commitID string) (blobURL string, err error) {\n\tfile, err := os.Open(filename)\n\tif err != nil {\n\t\treturn\n\t}\n\tdefer file.Close()\n\n\tmimeType := \"application/json\"\n\tif filepath.Ext(filename) == \".tzst\" {\n\t\tmimeType = \"application/zstd\"\n\t}\n\tblobURL, err = c.azureClient.Create(ctx, fmt.Sprintf(\"%s/%s/%s\", blobPath, commitID, filepath.Base(filename)), file, mimeType)\n\treturn\n}\n\nfunc (c *codeCoverageService) parseManifestFile(filepath string) (core.CoverageManifest, error) {\n\tmanifestPayload := core.CoverageManifest{}\n\tif _, err := os.Lstat(filepath); os.IsNotExist(err) {\n\t\tc.logger.Errorf(\"manifest file not found in path %s\", filepath)\n\t\treturn manifestPayload, err\n\t}\n\tbody, err := ioutil.ReadFile(filepath)\n\tif err != nil {\n\t\treturn manifestPayload, err\n\t}\n\n\terr = json.Unmarshal(body, &manifestPayload)\n\treturn manifestPayload, err\n}\n\nfunc (c *codeCoverageService) downloadAndDecompressParentCommitDir(ctx context.Context, coverage parentCommitCoverage, repoDir string) error {\n\tu, err := url.Parse(coverage.Bloblink)\n\tif err != nil {\n\t\tc.logger.Errorf(\"failed to parse blob link %s, error :%v\", coverage.Bloblink, err)\n\t\treturn err\n\t}\n\tu.Path = path.Join(u.Path, compressedFileName)\n\treq, err := http.NewRequestWithContext(ctx, http.MethodGet, u.String(), nil)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tresp, err := c.httpClient.Do(req)\n\tif err != nil {\n\t\tc.logger.Errorf(\"error while making http request %v\", err)\n\t\treturn err\n\t}\n\tdefer resp.Body.Close()\n\n\tif resp.StatusCode != http.StatusOK {\n\t\treturn fmt.Errorf(\"non 200 status while cloning from endpoint %s, status %d \", u.String(), resp.StatusCode)\n\t}\n\n\tparentCommitFilePath := filepath.Join(repoDir, coverage.ParentCommit+\".tzst\")\n\tc.logger.Debugf(\"parent commit file path %s\", parentCommitFilePath)\n\tout, err := os.Create(parentCommitFilePath)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer out.Close()\n\n\tif _, err := io.Copy(out, resp.Body); err != nil {\n\t\treturn err\n\t}\n\n\t// decompress the file in temp directory as we cannot decompress inside azure file volume\n\tif err := c.zstd.Decompress(ctx, parentCommitFilePath, false, os.TempDir()); err != nil {\n\t\tc.logger.Errorf(\"failed to decompress parent commit directory %v\", err)\n\t\treturn err\n\t}\n\n\tsrcPath := filepath.Join(os.TempDir(), coverage.ParentCommit)\n\tdestPath := filepath.Join(repoDir, coverage.ParentCommit)\n\t// copy the coverage directories to shared volume,\n\t// chmod is not allowed inside azure file volume so that is skipped Ref: https://stackoverflow.com/questions/58301985/permissions-on-azure-file\n\tif err := fileutils.CopyDir(srcPath, destPath, false); err != nil {\n\t\tc.logger.Errorf(\"failed to copy directory from src %s to dest %s, error %v\", srcPath, destPath, err)\n\t\treturn err\n\t}\n\treturn nil\n}\n\nfunc (c *codeCoverageService) copyFromParentCommitDir(parentCommitDir, commitDir string, removedFiles ...string) error {\n\tif _, err := os.Lstat(parentCommitDir); os.IsNotExist(err) {\n\t\tc.logger.Errorf(\"Parent Commit Directory %s not found\", parentCommitDir)\n\t\treturn err\n\t}\n\tif err := filepath.WalkDir(parentCommitDir, func(path string, info fs.DirEntry, err error) error {\n\t\tif info.IsDir() && info.Name() != filepath.Base(parentCommitDir) {\n\t\t\tif len(removedFiles) > 0 {\n\t\t\t\tfor index, removedfile := range removedFiles {\n\t\t\t\t\t//if testfile is now removed don't copy to current commit directory\n\t\t\t\t\tif info.Name() == removedfile {\n\t\t\t\t\t\t//remove file from slice\n\t\t\t\t\t\tremovedFiles = append(removedFiles[:index], removedFiles[index+1:]...)\n\t\t\t\t\t\treturn filepath.SkipDir\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\ttestfileDir := filepath.Join(commitDir, info.Name())\n\n\t\t\t//TODO: check if copied dir size is not 0\n\t\t\t//if file already exists then don't copy from parent directory\n\t\t\tif _, err := os.Lstat(testfileDir); os.IsNotExist(err) {\n\t\t\t\tif err := fileutils.CopyDir(path, testfileDir, false); err != nil {\n\t\t\t\t\tc.logger.Errorf(\"failed to copy directory from src %s to dest %s, error %v\", path, testfileDir, err)\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t}\n\t\t\t//all files copied now we can move next sub directory\n\t\t\treturn filepath.SkipDir\n\t\t}\n\t\treturn nil\n\t}); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}\n\nfunc (c *codeCoverageService) getParentCommitCoverageDir(repoID, commitID string) (coverage parentCommitCoverage, err error) {\n\tu, err := url.Parse(c.endpoint)\n\tif err != nil {\n\t\tc.logger.Errorf(\"error while parsing endpoint %s, %v\", c.endpoint, err)\n\t\treturn coverage, err\n\t}\n\tq := u.Query()\n\tq.Set(\"repoID\", repoID)\n\tq.Set(\"commitID\", commitID)\n\tu.RawQuery = q.Encode()\n\n\treq, err := http.NewRequest(http.MethodGet, u.String(), nil)\n\tif err != nil {\n\t\tc.logger.Errorf(\"failed to create new request %v\", err)\n\t\treturn coverage, err\n\t}\n\n\tresp, err := c.httpClient.Do(req)\n\n\tif err != nil {\n\t\tc.logger.Errorf(\"error while getting coverage details for parent commitID %s, %v\", commitID, err)\n\t\treturn coverage, err\n\t}\n\n\tdefer resp.Body.Close()\n\n\tif resp.StatusCode != http.StatusOK {\n\t\tc.logger.Errorf(\"error while getting coverage data, status_code %d\", resp.StatusCode)\n\t\treturn coverage, errors.New(\"non 200 status\")\n\t}\n\tpayload := parentCommitCoverage{}\n\tdecode := json.NewDecoder(resp.Body)\n\n\tif err := decode.Decode(&payload); err != nil {\n\t\tc.logger.Errorf(\"failed to decode response body %v\", err)\n\t\treturn coverage, err\n\t}\n\tc.logger.Infof(\"Got parent directory bloblink %s, commitID:%s\", payload.Bloblink, payload.ParentCommit)\n\n\treturn payload, nil\n}\n\nfunc (c *codeCoverageService) sendCoverageData(payload []coverageData) error {\n\treqBody, err := json.Marshal(payload)\n\tif err != nil {\n\t\tc.logger.Errorf(\"failed to marshal request body %v\", err)\n\t\treturn err\n\t}\n\n\treq, err := http.NewRequest(http.MethodPost, c.endpoint, bytes.NewBuffer(reqBody))\n\tif err != nil {\n\t\tc.logger.Errorf(\"failed to create new request %v\", err)\n\t\treturn err\n\t}\n\n\tresp, err := c.httpClient.Do(req)\n\n\tif err != nil {\n\t\tc.logger.Errorf(\"error while sending coverage data %v\", err)\n\t\treturn err\n\t}\n\n\tdefer resp.Body.Close()\n\n\tif resp.StatusCode != http.StatusOK {\n\t\tc.logger.Errorf(\"error while sending coverage data, status code %d\", resp.StatusCode)\n\t\treturn errors.New(\"non 200 status\")\n\t}\n\treturn nil\n}\n\nfunc (c *codeCoverageService) getTotalCoverage(filepath string) (json.RawMessage, error) {\n\tif _, err := os.Lstat(filepath); os.IsNotExist(err) {\n\t\tc.logger.Errorf(\"coverage summary file not found in path %s\", filepath)\n\t\treturn nil, err\n\t}\n\tbody, err := ioutil.ReadFile(filepath)\n\tif err != nil {\n\t\tc.logger.Errorf(\"failed to read coverage summary json, error: %v\", err)\n\t\treturn nil, err\n\t}\n\n\tvar payload map[string]json.RawMessage\n\tif err = json.Unmarshal(body, &payload); err != nil {\n\t\tc.logger.Errorf(\"failed to unmarshal coverage summary json, error: %v\", err)\n\t\treturn nil, err\n\t}\n\n\ttotalCoverage, ok := payload[\"total\"]\n\tif !ok {\n\t\tc.logger.Errorf(\"total coverage summary not found in map\")\n\t\treturn nil, errors.New(\"total coverage summary not found in map\")\n\t}\n\treturn totalCoverage, nil\n}\n"
  },
  {
    "path": "pkg/service/coverage/coverage_test.go",
    "content": "package coverage\n\nimport (\n\t\"context\"\n\t\"encoding/json\"\n\t\"fmt\"\n\t\"io\"\n\t\"net/http\"\n\t\"net/http/httptest\"\n\t\"os\"\n\t\"path/filepath\"\n\t\"reflect\"\n\t\"testing\"\n\n\t\"github.com/LambdaTest/test-at-scale/mocks\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/global\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n\t\"github.com/LambdaTest/test-at-scale/testutils\"\n\t\"github.com/stretchr/testify/mock\"\n)\n\nfunc Test_codeCoverageService_mergeCodeCoverageFiles(t *testing.T) {\n\tlogger, execManager, azureClient, zstdCompressor := initialiseArgs()\n\n\tvar commandType core.CommandType\n\tvar commands []string\n\texecManager.On(\"ExecuteInternalCommands\",\n\t\tmock.AnythingOfType(\"*context.emptyCtx\"),\n\t\tmock.AnythingOfType(\"core.CommandType\"),\n\t\tmock.AnythingOfType(\"[]string\"),\n\t\tmock.AnythingOfType(\"string\"),\n\t\tmock.AnythingOfType(\"map[string]string\"),\n\t\tmock.AnythingOfType(\"map[string]string\")).Return(\n\t\tfunc(ctx context.Context, commType core.CommandType, comm []string,\n\t\t\tcwd string, envMap, secretData map[string]string) error {\n\t\t\tcommandType = commType\n\t\t\tcommands = comm\n\t\t\treturn nil\n\t\t},\n\t)\n\tcoverageFiles := \"../../../testutils/testdata/coverage/coverage-final.json ../../../testutils/testdata/coverage/sample/coverage-final.json\"\n\tcommitDir := \"../../../testutils/testdata\"\n\tcoverageManifestPath := \"../../../testutils/testdata/coverage\"\n\n\ttype args struct {\n\t\tctx                  context.Context\n\t\tcommitDir            string\n\t\tcoverageManifestPath string\n\t\tthreshold            bool\n\t}\n\ttype expected struct {\n\t\tcommandType core.CommandType\n\t\tcommands    []string\n\t\tcwd         string\n\t\tenvMap      map[string]string\n\t\tsecretData  map[string]string\n\t}\n\ttests := []struct {\n\t\tname     string\n\t\targs     args\n\t\twantErr  bool\n\t\texpected expected\n\t}{\n\t\t{\"Test\",\n\t\t\targs{\n\t\t\t\tctx:                  context.TODO(),\n\t\t\t\tcommitDir:            commitDir,\n\t\t\t\tcoverageManifestPath: coverageManifestPath,\n\t\t\t\tthreshold:            true,\n\t\t\t},\n\t\t\tfalse,\n\t\t\texpected{\n\t\t\t\tcommandType: core.CoverageMerge,\n\t\t\t\tcommands: []string{\n\t\t\t\t\tfmt.Sprintf(\"/scripts/node_modules/.bin/babel-node %s --commitDir %s --coverageFiles '%s' --coverageManifest %s\",\n\t\t\t\t\t\tcoverageFilePath, commitDir, coverageFiles, coverageManifestPath),\n\t\t\t\t},\n\t\t\t\tcwd:        \"\",\n\t\t\t\tenvMap:     nil,\n\t\t\t\tsecretData: nil,\n\t\t\t},\n\t\t},\n\t}\n\n\tc := newCodeCoverageService(logger, execManager, coverageManifestPath, azureClient, zstdCompressor, \"endpoint\")\n\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\terr := c.mergeCodeCoverageFiles(tt.args.ctx, tt.args.commitDir, tt.args.coverageManifestPath, tt.args.threshold)\n\t\t\tif err != nil != tt.wantErr {\n\t\t\t\tt.Errorf(\"codeCoverageService.mergeCodeCoverageFiles() error = %v, wantErr %v\", err, tt.wantErr)\n\t\t\t\treturn\n\t\t\t}\n\t\t\tif commandType != tt.expected.commandType || !reflect.DeepEqual(commands, tt.expected.commands) {\n\t\t\t\tt.Errorf(\"Received commandType: %v, commands: %v\\nexpected commandType: %v, commands: %v\",\n\t\t\t\t\tcommandType, commands, tt.expected.commandType, tt.expected.commands)\n\t\t\t}\n\t\t})\n\t}\n}\n\nfunc Test_codeCoverageService_uploadFile(t *testing.T) {\n\tlogger, execManager, azureClient, zstdCompressor := initialiseArgs()\n\n\tvar calledArgs string\n\tazureClient.On(\"Create\",\n\t\tmock.AnythingOfType(\"*context.emptyCtx\"),\n\t\tmock.AnythingOfType(\"string\"),\n\t\tmock.AnythingOfType(\"*os.File\"),\n\t\tmock.AnythingOfType(\"string\")).Return(\n\t\tfunc(ctx context.Context, path string, reader io.Reader, mimeType string) string {\n\t\t\tst, _ := io.ReadAll(reader)\n\t\t\tcalledArgs = fmt.Sprintf(\"%v %v %v\", path, string(st), mimeType)\n\t\t\treturn \"blobURL\"\n\t\t},\n\t\tfunc(ctx context.Context, path string, reader io.Reader, mimeType string) error {\n\t\t\treturn nil\n\t\t},\n\t)\n\n\ttype args struct {\n\t\tctx      context.Context\n\t\tblobPath string\n\t\tfilename string\n\t\tcommitID string\n\t}\n\ttests := []struct {\n\t\tname        string\n\t\targs        args\n\t\twantBlobURL string\n\t\twantArgs    string\n\t\twantErr     bool\n\t}{\n\t\t{\"Test uploadFile\",\n\t\t\targs{\n\t\t\t\tctx:      context.TODO(),\n\t\t\t\tblobPath: \"blobpath\",\n\t\t\t\tfilename: \"../../../testutils/testdata/coverage/coverage-final.json\",\n\t\t\t\tcommitID: \"cID\",\n\t\t\t},\n\t\t\t\"blobURL\",\n\t\t\t`blobpath/cID/coverage-final.json {\n    \"cover1\" : \"f1\"\n} application/json`,\n\t\t\tfalse,\n\t\t},\n\t}\n\tc := newCodeCoverageService(logger, execManager, \"\", azureClient, zstdCompressor, \"\")\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\tgotBlobURL, err := c.uploadFile(tt.args.ctx, tt.args.blobPath, tt.args.filename, tt.args.commitID)\n\t\t\tif (err != nil) != tt.wantErr {\n\t\t\t\tt.Errorf(\"codeCoverageService.uploadFile() error = %v, wantErr %v\", err, tt.wantErr)\n\t\t\t\treturn\n\t\t\t}\n\t\t\tif gotBlobURL != tt.wantBlobURL {\n\t\t\t\tt.Errorf(\"codeCoverageService.uploadFile() = %v, want %v\", gotBlobURL, tt.wantBlobURL)\n\t\t\t\treturn\n\t\t\t}\n\t\t\tif tt.wantArgs != calledArgs {\n\t\t\t\tt.Errorf(\"Expected: \\n%v\\nreceived: \\n%v\", tt.wantArgs, calledArgs)\n\t\t\t}\n\t\t})\n\t}\n}\n\nfunc Test_codeCoverageService_parseManifestFile(t *testing.T) {\n\tlogger, execManager, azureClient, zstdCompressor := initialiseArgs()\n\n\ttype args struct {\n\t\tfilepath string\n\t}\n\ttests := []struct {\n\t\tname    string\n\t\targs    args\n\t\twant    core.CoverageManifest\n\t\twantErr bool\n\t}{\n\t\t{\"Test parseManifestFile for success\",\n\t\t\targs{filepath: \"../../../testutils/testdata/coverage/coverage-final.json\"},\n\t\t\tcore.CoverageManifest{},\n\t\t\tfalse,\n\t\t},\n\t\t{\"Test parseManifestFile\",\n\t\t\targs{filepath: \"../../../testutils/testdata/coverage/dne.json\"},\n\t\t\tcore.CoverageManifest{},\n\t\t\ttrue,\n\t\t},\n\t}\n\tc := newCodeCoverageService(logger, execManager, \"\", azureClient, zstdCompressor, \"\")\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\tgot, err := c.parseManifestFile(tt.args.filepath)\n\t\t\tif (err != nil) != tt.wantErr {\n\t\t\t\tt.Errorf(\"codeCoverageService.parseManifestFile() error = %v, wantErr %v\", err, tt.wantErr)\n\t\t\t\treturn\n\t\t\t}\n\t\t\tif !reflect.DeepEqual(got, tt.want) {\n\t\t\t\tt.Errorf(\"codeCoverageService.parseManifestFile() = %v, want %v\", got, tt.want)\n\t\t\t}\n\t\t})\n\t}\n}\n\nfunc Test_codeCoverageService_downloadAndDecompressParentCommitDir(t *testing.T) {\n\tserver := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\tif r.URL.Path != \"/coverage-files.tzst\" {\n\t\t\tt.Errorf(\"Expected to request '/coverage-files.tzst', got: %v\", r.URL)\n\t\t\treturn\n\t\t}\n\t\tw.WriteHeader(200)\n\t}))\n\tdefer server.Close()\n\n\tlogger, execManager, azureClient, zstdCompressor := initialiseArgs()\n\tzstdCompressor.On(\"Decompress\",\n\t\tmock.AnythingOfType(\"*context.emptyCtx\"),\n\t\tmock.AnythingOfType(\"string\"),\n\t\tfalse,\n\t\tmock.AnythingOfType(\"string\")).Return(\n\t\tfunc(ctx context.Context, filePath string, preservePath bool, workingDirectory string) error {\n\t\t\treturn nil\n\t\t},\n\t)\n\n\ttype args struct {\n\t\tctx      context.Context\n\t\tcoverage parentCommitCoverage\n\t\trepoDir  string\n\t}\n\ttests := []struct {\n\t\tname    string\n\t\targs    args\n\t\twantErr bool\n\t}{\n\t\t// TODO: Add success case, currently on local tempdir can't be created\n\t\t{\"Test downloadAndDecompressParentCommitDir\",\n\t\t\targs{\n\t\t\t\tctx:      context.TODO(),\n\t\t\t\tcoverage: parentCommitCoverage{Bloblink: server.URL, ParentCommit: \"parentCommit\"},\n\t\t\t\trepoDir:  \"../../../testutils/testdata\"},\n\t\t\ttrue,\n\t\t},\n\t}\n\tc := newCodeCoverageService(logger, execManager, \"\", azureClient, zstdCompressor, \"\")\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\terr := c.downloadAndDecompressParentCommitDir(tt.args.ctx, tt.args.coverage, tt.args.repoDir)\n\n\t\t\tdefer removeCreatedFile(filepath.Join(tt.args.repoDir, tt.args.coverage.ParentCommit+\".tzst\"))\n\n\t\t\tif (err != nil) != tt.wantErr {\n\t\t\t\tt.Errorf(\"codeCoverageService.downloadAndDecompressParentCommitDir() error = %v, wantErr %v\", err, tt.wantErr)\n\t\t\t}\n\t\t})\n\t}\n}\n\nfunc Test_codeCoverageService_getParentCommitCoverageDir(t *testing.T) {\n\tts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\tif r.URL.Path != \"/\" {\n\t\t\tw.WriteHeader(http.StatusBadRequest)\n\t\t\treturn\n\t\t}\n\n\t\tif r.URL.RawQuery == \"commitID=non200&repoID=non200\" {\n\t\t\tw.WriteHeader(300)\n\t\t\treturn\n\t\t}\n\n\t\tif r.URL.RawQuery == \"commitID=payloadError&repoID=payloadDecodeError\" {\n\t\t\t_, writeErr := fmt.Fprintln(w, `{\"undefined_field\"}`)\n\t\t\tif writeErr != nil {\n\t\t\t\tw.WriteHeader(http.StatusBadRequest)\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t\tw.Header().Set(\"Content-Type\", \"application/json\")\n\n\t\t_, writeErr := fmt.Fprintln(w, `{\"blob_link\": \"http://fakeblob.link\", \"parent_commit\" : \"fake_parent_commit\"}`)\n\t\tif writeErr != nil {\n\t\t\tw.WriteHeader(http.StatusBadRequest)\n\t\t\treturn\n\t\t}\n\t\tw.WriteHeader(200)\n\t}))\n\tdefer ts.Close()\n\n\tlogger, execManager, azureClient, zstdCompressor := initialiseArgs()\n\ttype args struct {\n\t\trepoID   string\n\t\tcommitID string\n\t}\n\ttests := []struct {\n\t\tname         string\n\t\targs         args\n\t\twantCoverage parentCommitCoverage\n\t\twantErr      bool\n\t}{\n\t\t{\n\t\t\t\"Test getParentCommitCoverageDir\",\n\t\t\targs{repoID: \"dummyRepoID\", commitID: \"dummyCommitID\"},\n\t\t\tparentCommitCoverage{Bloblink: \"http://fakeblob.link\", ParentCommit: \"fake_parent_commit\"},\n\t\t\tfalse,\n\t\t},\n\t\t{\n\t\t\t\"Test getParentCommitCoverageDir for non 200 status error\",\n\t\t\targs{repoID: \"non200\", commitID: \"non200\"},\n\t\t\tparentCommitCoverage{},\n\t\t\ttrue,\n\t\t},\n\n\t\t{\n\t\t\t\"Test getParentCommitCoverageDir for payloadDecodeError\",\n\t\t\targs{repoID: \"payloadDecodeError\", commitID: \"payloadError\"},\n\t\t\tparentCommitCoverage{},\n\t\t\ttrue,\n\t\t},\n\t}\n\tc := newCodeCoverageService(logger, execManager, \"\", azureClient, zstdCompressor, ts.URL)\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\tgotCoverage, err := c.getParentCommitCoverageDir(tt.args.repoID, tt.args.commitID)\n\t\t\tif (err != nil) != tt.wantErr {\n\t\t\t\tt.Errorf(\"codeCoverageService.getParentCommitCoverageDir() error = %v, wantErr %v\", err, tt.wantErr)\n\t\t\t\treturn\n\t\t\t}\n\t\t\tif !reflect.DeepEqual(gotCoverage, tt.wantCoverage) {\n\t\t\t\tt.Errorf(\"codeCoverageService.getParentCommitCoverageDir() = %v, want %v\", gotCoverage, tt.wantCoverage)\n\t\t\t}\n\t\t})\n\t}\n}\n\nfunc Test_codeCoverageService_sendCoverageData(t *testing.T) {\n\tpayload := []coverageData{\n\t\t{\n\t\t\tBuildID:       \"buildID1\",\n\t\t\tRepoID:        \"repoID1\",\n\t\t\tCommitID:      \"commitID1\",\n\t\t\tBlobLink:      \"blobLink1\",\n\t\t\tTotalCoverage: json.RawMessage([]byte(`{\"bar\":\"baz\"}`)),\n\t\t},\n\t}\n\n\tmux := http.NewServeMux()\n\n\tmux.HandleFunc(\"/endpoint\", func(res http.ResponseWriter, req *http.Request) {\n\t\tbody, _ := io.ReadAll(req.Body)\n\t\texpResp := `[{\"build_id\":\"buildID1\",\"repo_id\":\"repoID1\",\"commit_id\":\"commitID1\",\"blob_link\":\"blobLink1\",\"total_coverage\":{\"bar\":\"baz\"}}]`\n\t\tif !reflect.DeepEqual(string(body), expResp) {\n\t\t\tt.Errorf(\"Expected response body: %v, got: %v\\n\", expResp, string(body))\n\t\t}\n\t\tres.WriteHeader(200)\n\t})\n\tmux.HandleFunc(\"/endpoint-err\", func(res http.ResponseWriter, req *http.Request) {\n\t\tres.WriteHeader(404)\n\t})\n\n\tts := httptest.NewServer(mux)\n\tdefer ts.Close()\n\n\tlogger, execManager, azureClient, zstdCompressor := initialiseArgs()\n\ttype args struct {\n\t\tpayload []coverageData\n\t}\n\ttests := []struct {\n\t\tname     string\n\t\targs     args\n\t\tendpoint string\n\t\twantErr  bool\n\t}{\n\t\t{\n\t\t\t\"Test sendCoverageData for success\",\n\t\t\targs{payload: payload},\n\t\t\t\"/endpoint\",\n\t\t\tfalse,\n\t\t},\n\n\t\t{\n\t\t\t\"Test sendCoverageData for non 200 status\",\n\t\t\targs{payload: payload},\n\t\t\t\"/endpoint-err\",\n\t\t\ttrue,\n\t\t},\n\t}\n\tfor _, tt := range tests {\n\t\tc := newCodeCoverageService(logger, execManager, \"\", azureClient, zstdCompressor, ts.URL+tt.endpoint)\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\tif err := c.sendCoverageData(tt.args.payload); (err != nil) != tt.wantErr {\n\t\t\t\tt.Errorf(\"codeCoverageService.sendCoverageData() error = %v, wantErr %v\", err, tt.wantErr)\n\t\t\t}\n\t\t})\n\t}\n}\n\nfunc Test_codeCoverageService_getTotalCoverage(t *testing.T) {\n\tlogger, execManager, azureClient, zstdCompressor := initialiseArgs()\n\tc := newCodeCoverageService(logger, execManager, \"\", azureClient, zstdCompressor, \"\")\n\ttype args struct {\n\t\tfilepath string\n\t}\n\ttests := []struct {\n\t\tname    string\n\t\targs    args\n\t\twant    json.RawMessage\n\t\twantErr bool\n\t}{\n\t\t{\n\t\t\t\"Test getTotalCoverage\",\n\t\t\targs{\"../../../testutils/testdata/coverage/sample/coverage-final.json\"},\n\t\t\tjson.RawMessage([]byte(`\"80%\"`)),\n\t\t\tfalse,\n\t\t},\n\n\t\t{\n\t\t\t\"Test getTotalCoverage for no field of total coverage\",\n\t\t\targs{\"../../../testutils/testdata/coverage/coverage-final.json\"},\n\t\t\tjson.RawMessage{},\n\t\t\ttrue,\n\t\t},\n\t}\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\tgot, err := c.getTotalCoverage(tt.args.filepath)\n\t\t\tif (err != nil) != tt.wantErr {\n\t\t\t\tt.Errorf(\"codeCoverageService.getTotalCoverage() error = %v, wantErr %v\", err, tt.wantErr)\n\t\t\t\treturn\n\t\t\t}\n\t\t\tif len(tt.want) > 0 && !reflect.DeepEqual(got, tt.want) {\n\t\t\t\tt.Errorf(\"codeCoverageService.getTotalCoverage() = %v, want %v\", string(got), string(tt.want))\n\t\t\t}\n\t\t})\n\t}\n}\n\nfunc newCodeCoverageService(logger lumber.Logger,\n\texecManager *mocks.ExecutionManager,\n\tcodeCoveragParentDir string,\n\tazureClient *mocks.AzureClient,\n\tzstd *mocks.ZstdCompressor,\n\tendpoint string) *codeCoverageService {\n\treturn &codeCoverageService{\n\t\tlogger:               logger,\n\t\texecManager:          execManager,\n\t\tcodeCoveragParentDir: codeCoveragParentDir,\n\t\tazureClient:          azureClient,\n\t\tzstd:                 zstd,\n\t\thttpClient: http.Client{\n\t\t\tTimeout: global.DefaultAPITimeout,\n\t\t},\n\t\tendpoint: endpoint,\n\t}\n}\n\nfunc initialiseArgs() (logger lumber.Logger,\n\texecManager *mocks.ExecutionManager,\n\tazureClient *mocks.AzureClient,\n\tzstd *mocks.ZstdCompressor) {\n\tazureClient = new(mocks.AzureClient)\n\texecManager = new(mocks.ExecutionManager)\n\tzstdCompressor := new(mocks.ZstdCompressor)\n\n\tlogger, err := testutils.GetLogger()\n\tif err != nil {\n\t\tfmt.Printf(\"Couldn't initialize logger, error: %v\", err)\n\t}\n\treturn logger, execManager, azureClient, zstdCompressor\n}\n\nfunc removeCreatedFile(path string) {\n\terr := os.RemoveAll(path)\n\tif err != nil {\n\t\tfmt.Println(\"error in removing!!\")\n\t}\n}\n"
  },
  {
    "path": "pkg/service/coverage/models.go",
    "content": "package coverage\n\nimport \"encoding/json\"\n\ntype parentCommitCoverage struct {\n\tBloblink     string `json:\"blob_link\"`\n\tParentCommit string `json:\"parent_commit\"`\n}\n\ntype coverageData struct {\n\tBuildID       string          `json:\"build_id\"`\n\tRepoID        string          `json:\"repo_id\"`\n\tCommitID      string          `json:\"commit_id\"`\n\tBlobLink      string          `json:\"blob_link\"`\n\tTotalCoverage json.RawMessage `json:\"total_coverage\"`\n}\n"
  },
  {
    "path": "pkg/service/teststats/teststats.go",
    "content": "package teststats\n\nimport (\n\t\"sort\"\n\t\"sync\"\n\t\"time\"\n\n\t\"github.com/LambdaTest/test-at-scale/config\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/global\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/procfs\"\n)\n\n//ProcStats represents the process stats for a particular pid\ntype ProcStats struct {\n\tlogger                       lumber.Logger\n\tExecutionResultInputChannel  chan core.ExecutionResults\n\twg                           sync.WaitGroup\n\tExecutionResultOutputChannel chan *core.ExecutionResults\n}\n\n// New returns instance of ProcStats\nfunc New(cfg *config.NucleusConfig, logger lumber.Logger) (*ProcStats, error) {\n\treturn &ProcStats{\n\t\tlogger:                       logger,\n\t\tExecutionResultInputChannel:  make(chan core.ExecutionResults),\n\t\tExecutionResultOutputChannel: make(chan *core.ExecutionResults),\n\t}, nil\n\n}\n\n// CaptureTestStats combines the ps stats for each test\nfunc (s *ProcStats) CaptureTestStats(pid int32, collectStats bool) error {\n\tps, err := procfs.New(pid, global.SamplingTime, false)\n\tif err != nil {\n\t\ts.logger.Errorf(\"failed to find process stats with pid %d %v\", pid, err)\n\t\treturn err\n\t}\n\n\ts.wg.Add(1)\n\tgo func() {\n\t\tdefer s.wg.Done()\n\t\tprocessStats := ps.GetStatsInInterval()\n\t\tif len(processStats) == 0 {\n\t\t\ts.logger.Errorf(\"no process stats found with pid %d\", pid)\n\t\t}\n\t\tselect {\n\t\tcase executionResults := <-s.ExecutionResultInputChannel:\n\t\t\tif collectStats {\n\t\t\t\tfor ind := range executionResults.Results {\n\t\t\t\t\t// Refactor the impl of below 2 functions using generics when Go 1.18 arrives\n\t\t\t\t\t// https://www.freecodecamp.org/news/generics-in-golang/\n\t\t\t\t\ts.appendStatsToTests(executionResults.Results[ind].TestPayload, processStats)\n\t\t\t\t\ts.appendStatsToTestSuites(executionResults.Results[ind].TestSuitePayload, processStats)\n\t\t\t\t}\n\t\t\t}\n\t\t\ts.ExecutionResultOutputChannel <- &executionResults\n\t\tdefault:\n\t\t\t// Can reach here in 2 cases (ie `/results` API wasn't called):\n\t\t\t// 1. runner process exited with zero exit exitCode but no testFiles were run (changes in Readme.md etc)\n\t\t\t// 2. runner process exited with non-zero exitCode\n\t\t\ts.logger.Warnf(\"No test results found, pid %d\", pid)\n\t\t\ts.ExecutionResultOutputChannel <- nil\n\t\t}\n\t}()\n\n\treturn nil\n}\n\n// processStats is RecordTime sorted\nfunc (s *ProcStats) getProcsForInterval(start, end time.Time, processStats []*procfs.Stats) []*procfs.Stats {\n\tn := len(processStats)\n\tleft := sort.Search(n, func(i int) bool { return !processStats[i].RecordTime.Before(start) })\n\tright := sort.Search(n, func(i int) bool { return !processStats[i].RecordTime.Before(end) })\n\n\tif left <= right && 0 <= left && right <= n {\n\t\treturn processStats[left:right]\n\t}\n\t// return empty slice\n\treturn processStats[0:0]\n}\n\nfunc (s *ProcStats) appendStatsToTests(testResults []core.TestPayload, processStats []*procfs.Stats) {\n\tfor r := 0; r < len(testResults); r++ {\n\t\tresult := &testResults[r]\n\t\t// check if start time of test t(start) is not 0\n\t\tif !result.StartTime.IsZero() {\n\t\t\t// calculate end time of test t(end)\n\t\t\tresult.EndTime = result.StartTime.Add(time.Duration(result.Duration) * time.Millisecond)\n\t\t\tfor _, proc := range s.getProcsForInterval(result.StartTime, result.EndTime, processStats) {\n\t\t\t\tresult.Stats = append(result.Stats, core.TestProcessStats{CPU: proc.CPUPercentage, Memory: proc.MemConsumed, RecordTime: proc.RecordTime})\n\t\t\t}\n\t\t}\n\t}\n}\n\nfunc (s *ProcStats) appendStatsToTestSuites(testSuiteResults []core.TestSuitePayload, processStats []*procfs.Stats) {\n\tfor r := 0; r < len(testSuiteResults); r++ {\n\t\tresult := &testSuiteResults[r]\n\t\t// check if start time of test suite ts(start) is not 0\n\t\tif !result.StartTime.IsZero() {\n\t\t\t// calculate end time of test suite ts(end)\n\t\t\tresult.EndTime = result.StartTime.Add(time.Duration(result.Duration) * time.Millisecond)\n\t\t\tfor _, proc := range s.getProcsForInterval(result.StartTime, result.EndTime, processStats) {\n\t\t\t\tresult.Stats = append(result.Stats, core.TestProcessStats{CPU: proc.CPUPercentage, Memory: proc.MemConsumed, RecordTime: proc.RecordTime})\n\t\t\t}\n\t\t}\n\t}\n}\n"
  },
  {
    "path": "pkg/service/teststats/teststats_test.go",
    "content": "package teststats\n\nimport (\n\t\"fmt\"\n\t\"reflect\"\n\t\"testing\"\n\t\"time\"\n\n\t\"github.com/LambdaTest/test-at-scale/config\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/procfs\"\n\t\"github.com/LambdaTest/test-at-scale/testutils\"\n)\n\nfunc getDummyTimeMap() map[string]time.Time {\n\ttpresent, err := time.Parse(\"Mon, 02 Jan 2006 15:04:05 MST\", \"Tue, 28 Feb 2022 16:22:01 UTC\")\n\tif err != nil {\n\t\tfmt.Printf(\"Error parsing time: %v\", err)\n\t}\n\tt2025, _ := time.Parse(\"Mon, 02 Jan 2006 15:04:05 MST\", \"Tue, 22 Feb 2025 16:22:01 UTC\")\n\ttpast1, _ := time.Parse(\"Mon, 02 Jan 2006 15:04:05 MST\", \"Tue, 22 Feb 2021 16:23:01 UTC\")\n\ttpast2, _ := time.Parse(\"Mon, 02 Jan 2006 15:04:05 MST\", \"Tue, 22 Feb 2021 16:22:05 UTC\")\n\ttfuture1, _ := time.Parse(\"Mon, 02 Jan 2006 15:04:05 MST\", \"Tue, 22 Feb 2023 16:14:01 UTC\")\n\ttfuture2, _ := time.Parse(\"Mon, 02 Jan 2006 15:04:05 MST\", \"Tue, 22 Feb 2023 16:25:01 UTC\")\n\n\treturn map[string]time.Time{\"tpresent\": tpresent, \"t2025\": t2025, \"tpast1\": tpast1, \"tpast2\": tpast2, \"tfuture1\": tfuture1, \"tfuture2\": tfuture2}\n\n}\n\n// NOTE: Tests in this package are meant to be run in a Linux environment\n\nfunc TestNew(t *testing.T) {\n\tcfg, _ := testutils.GetConfig()\n\tlogger, _ := testutils.GetLogger()\n\ttype args struct {\n\t\tcfg    *config.NucleusConfig\n\t\tlogger lumber.Logger\n\t}\n\ttests := []struct {\n\t\tname    string\n\t\targs    args\n\t\twant    *ProcStats\n\t\twantErr bool\n\t}{\n\t\t{\"Test New\",\n\t\t\targs{cfg, logger},\n\t\t\t&ProcStats{\n\t\t\t\tlogger:                       logger,\n\t\t\t\tExecutionResultInputChannel:  make(chan core.ExecutionResults),\n\t\t\t\tExecutionResultOutputChannel: make(chan *core.ExecutionResults),\n\t\t\t}, false},\n\t}\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\tgot, err := New(tt.args.cfg, tt.args.logger)\n\t\t\tif (err != nil) != tt.wantErr {\n\t\t\t\tt.Errorf(\"New() error = %v, wantErr %v\", err, tt.wantErr)\n\t\t\t\treturn\n\t\t\t}\n\t\t\tif !reflect.DeepEqual(got.logger, tt.want.logger) {\n\t\t\t\tt.Errorf(\"New() = %v, want %v\", got, tt.want)\n\t\t\t}\n\t\t})\n\t}\n}\n\nfunc TestProcStats_getProcsForInterval(t *testing.T) {\n\tcfg, _ := testutils.GetConfig()\n\tlogger, _ := testutils.GetLogger()\n\ttimeMap := getDummyTimeMap()\n\n\ttype args struct {\n\t\tstart        time.Time\n\t\tend          time.Time\n\t\tprocessStats []*procfs.Stats\n\t}\n\ttests := []struct {\n\t\tname string\n\t\targs args\n\t\twant []*procfs.Stats\n\t}{\n\t\t{\"Test getProcsForInterval\", args{timeMap[\"tpresent\"], timeMap[\"tpresent\"], []*procfs.Stats{}}, []*procfs.Stats{}},\n\n\t\t{\"Test getProcsForInterval\", args{timeMap[\"tpresent\"], timeMap[\"t2025\"], []*procfs.Stats{\n\t\t\t{\n\t\t\t\tCPUPercentage: 1.2,\n\t\t\t\tMemPercentage: 14.1,\n\t\t\t\tMemShared:     105.0,\n\t\t\t\tMemSwapped:    25,\n\t\t\t\tMemConsumed:   131,\n\t\t\t\tRecordTime:    timeMap[\"tpast1\"],\n\t\t\t},\n\t\t\t{\n\t\t\t\tCPUPercentage: 1.25,\n\t\t\t\tMemPercentage: 14.15,\n\t\t\t\tMemShared:     107.0,\n\t\t\t\tMemSwapped:    25,\n\t\t\t\tMemConsumed:   131,\n\t\t\t\tRecordTime:    timeMap[\"tfuture1\"],\n\t\t\t},\n\t\t\t{\n\t\t\t\tCPUPercentage: 1.25,\n\t\t\t\tMemPercentage: 14.15,\n\t\t\t\tMemShared:     107.0,\n\t\t\t\tMemSwapped:    25,\n\t\t\t\tMemConsumed:   131,\n\t\t\t\tRecordTime:    timeMap[\"tfuture2\"],\n\t\t\t},\n\t\t}}, []*procfs.Stats{\n\t\t\t{\n\t\t\t\tCPUPercentage: 1.25,\n\t\t\t\tMemPercentage: 14.15,\n\t\t\t\tMemShared:     107.0,\n\t\t\t\tMemSwapped:    25,\n\t\t\t\tMemConsumed:   131,\n\t\t\t\tRecordTime:    timeMap[\"tfuture1\"],\n\t\t\t},\n\t\t\t{\n\t\t\t\tCPUPercentage: 1.25,\n\t\t\t\tMemPercentage: 14.15,\n\t\t\t\tMemShared:     107.0,\n\t\t\t\tMemSwapped:    25,\n\t\t\t\tMemConsumed:   131,\n\t\t\t\tRecordTime:    timeMap[\"tfuture2\"],\n\t\t\t},\n\t\t},\n\t\t},\n\t}\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\ts, err := New(cfg, logger)\n\t\t\tif err != nil {\n\t\t\t\tt.Errorf(\"New() error = %v\", err)\n\t\t\t}\n\t\t\tgot := s.getProcsForInterval(tt.args.start, tt.args.end, tt.args.processStats)\n\t\t\tif !reflect.DeepEqual(got, tt.want) {\n\t\t\t\tt.Errorf(\"ProcStats.getProcsForInterval() = %v, want %v\", got, tt.want)\n\t\t\t}\n\t\t})\n\t}\n}\n\nfunc TestProcStats_appendStatsToTests(t *testing.T) {\n\tcfg, _ := testutils.GetConfig()\n\tlogger, _ := testutils.GetLogger()\n\ttimeMap := getDummyTimeMap()\n\n\ttype args struct {\n\t\ttestResults  []core.TestPayload\n\t\tprocessStats []*procfs.Stats\n\t}\n\ttests := []struct {\n\t\tname string\n\t\targs args\n\t\twant string\n\t}{\n\t\t{\"Test appendStatsToTests\",\n\t\t\targs{[]core.TestPayload{\n\t\t\t\t{Name: \"test 1\", StartTime: timeMap[\"tpast1\"], EndTime: timeMap[\"tfuture1\"]},\n\t\t\t},\n\t\t\t\t[]*procfs.Stats{}},\n\t\t\t// nolint:lll\n\t\t\t\"[{TestID: Detail: SuiteID: Suites:[] Title: FullTitle: Name:test 1 Duration:0 FilePath: Line: Col: CurrentRetry:0 Status: DAG:[] Filelocator: BlocklistSource: Blocklisted:false StartTime:2021-02-22 16:23:01 +0000 UTC EndTime:2021-02-22 16:23:01 +0000 UTC Stats:[] FailureMessage:}]\",\n\t\t},\n\n\t\t{\"Test appendStatsToTests\",\n\t\t\targs{[]core.TestPayload{\n\t\t\t\t{\n\t\t\t\t\tName:      \"test 1\",\n\t\t\t\t\tStartTime: timeMap[\"tpast1\"],\n\t\t\t\t\tDuration:  100,\n\t\t\t\t\tEndTime:   timeMap[\"tfuture1\"],\n\t\t\t\t\tStats:     []core.TestProcessStats{},\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\tName:      \"test 2\",\n\t\t\t\t\tStartTime: timeMap[\"tpast2\"],\n\t\t\t\t\tDuration:  200,\n\t\t\t\t\tEndTime:   timeMap[\"tfuture2\"],\n\t\t\t\t\tStats:     []core.TestProcessStats{{Memory: 100, CPU: 25.4, Storage: 250, RecordTime: timeMap[\"tpast2\"]}},\n\t\t\t\t},\n\t\t\t},\n\t\t\t\t[]*procfs.Stats{\n\t\t\t\t\t{\n\t\t\t\t\t\tCPUPercentage: 1.2,\n\t\t\t\t\t\tMemPercentage: 14.1,\n\t\t\t\t\t\tMemShared:     105.0,\n\t\t\t\t\t\tMemSwapped:    25,\n\t\t\t\t\t\tMemConsumed:   131,\n\t\t\t\t\t\tRecordTime:    timeMap[\"tpast1\"],\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tCPUPercentage: 1.25,\n\t\t\t\t\t\tMemPercentage: 14.15,\n\t\t\t\t\t\tMemShared:     107.0,\n\t\t\t\t\t\tMemSwapped:    25,\n\t\t\t\t\t\tMemConsumed:   131,\n\t\t\t\t\t\tRecordTime:    timeMap[\"tfuture1\"],\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tCPUPercentage: 1.25,\n\t\t\t\t\t\tMemPercentage: 14.15,\n\t\t\t\t\t\tMemShared:     107.0,\n\t\t\t\t\t\tMemSwapped:    25,\n\t\t\t\t\t\tMemConsumed:   131,\n\t\t\t\t\t\tRecordTime:    timeMap[\"tfuture2\"],\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\t// nolint:lll\n\t\t\t\"[{TestID: Detail: SuiteID: Suites:[] Title: FullTitle: Name:test 1 Duration:100 FilePath: Line: Col: CurrentRetry:0 Status: DAG:[] Filelocator: BlocklistSource: Blocklisted:false StartTime:2021-02-22 16:23:01 +0000 UTC EndTime:2021-02-22 16:23:01.1 +0000 UTC Stats:[{Memory:131 CPU:1.2 Storage:0 RecordTime:2021-02-22 16:23:01 +0000 UTC}] FailureMessage:} {TestID: Detail: SuiteID: Suites:[] Title: FullTitle: Name:test 2 Duration:200 FilePath: Line: Col: CurrentRetry:0 Status: DAG:[] Filelocator: BlocklistSource: Blocklisted:false StartTime:2021-02-22 16:22:05 +0000 UTC EndTime:2021-02-22 16:22:05.2 +0000 UTC Stats:[{Memory:100 CPU:25.4 Storage:250 RecordTime:2021-02-22 16:22:05 +0000 UTC}] FailureMessage:}]\",\n\t\t},\n\t}\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\ts, err := New(cfg, logger)\n\t\t\tif err != nil {\n\t\t\t\tt.Errorf(\"New() error = %v\", err)\n\t\t\t}\n\t\t\ts.appendStatsToTests(tt.args.testResults, tt.args.processStats)\n\t\t\tgot := fmt.Sprintf(\"%+v\", tt.args.testResults)\n\t\t\tif got != tt.want {\n\t\t\t\tt.Errorf(\"ProcStats.appendStatsToTests() = \\n%v\\nwant: \\n%v\", got, tt.want)\n\t\t\t}\n\t\t})\n\t}\n}\n\nfunc TestProcStats_appendStatsToTestSuites(t *testing.T) {\n\tcfg, _ := testutils.GetConfig()\n\tlogger, _ := testutils.GetLogger()\n\ttimeMap := getDummyTimeMap()\n\n\ttype args struct {\n\t\ttestSuiteResults []core.TestSuitePayload\n\t\tprocessStats     []*procfs.Stats\n\t}\n\ttests := []struct {\n\t\tname string\n\t\targs args\n\t\twant string\n\t}{\n\t\t{\"Test appendStatsToTests\",\n\t\t\targs{[]core.TestSuitePayload{\n\t\t\t\t{SuiteID: \"testSuite1\", StartTime: timeMap[\"tpast1\"], EndTime: timeMap[\"tfuture1\"], TotalTests: 2},\n\t\t\t},\n\t\t\t\t[]*procfs.Stats{}},\n\t\t\t\"[{SuiteID:testSuite1 SuiteName: ParentSuiteID: BlocklistSource: Blocklisted:false StartTime:2021-02-22 16:23:01 +0000 UTC EndTime:2021-02-22 16:23:01 +0000 UTC Duration:0 Status: Stats:[] TotalTests:2}]\", // nolint\n\t\t},\n\n\t\t{\"Test appendStatsToTests\",\n\t\t\targs{[]core.TestSuitePayload{\n\t\t\t\t{\n\t\t\t\t\tSuiteID:    \"testSuite2\",\n\t\t\t\t\tStartTime:  timeMap[\"tpast1\"],\n\t\t\t\t\tDuration:   100,\n\t\t\t\t\tEndTime:    timeMap[\"tfuture1\"],\n\t\t\t\t\tStats:      []core.TestProcessStats{},\n\t\t\t\t\tTotalTests: 3,\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\tSuiteID:    \"testSuite3\",\n\t\t\t\t\tStartTime:  timeMap[\"tpast2\"],\n\t\t\t\t\tDuration:   200,\n\t\t\t\t\tEndTime:    timeMap[\"tfuture2\"],\n\t\t\t\t\tStats:      []core.TestProcessStats{{Memory: 100, CPU: 25.4, Storage: 250, RecordTime: timeMap[\"tpast2\"]}},\n\t\t\t\t\tTotalTests: 5,\n\t\t\t\t},\n\t\t\t},\n\t\t\t\t[]*procfs.Stats{\n\t\t\t\t\t{\n\t\t\t\t\t\tCPUPercentage: 1.2,\n\t\t\t\t\t\tMemPercentage: 14.1,\n\t\t\t\t\t\tMemShared:     105.0,\n\t\t\t\t\t\tMemSwapped:    25,\n\t\t\t\t\t\tMemConsumed:   131,\n\t\t\t\t\t\tRecordTime:    timeMap[\"tpast1\"],\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tCPUPercentage: 1.25,\n\t\t\t\t\t\tMemPercentage: 14.15,\n\t\t\t\t\t\tMemShared:     107.0,\n\t\t\t\t\t\tMemSwapped:    25,\n\t\t\t\t\t\tMemConsumed:   131,\n\t\t\t\t\t\tRecordTime:    timeMap[\"tfuture1\"],\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\tCPUPercentage: 1.25,\n\t\t\t\t\t\tMemPercentage: 14.15,\n\t\t\t\t\t\tMemShared:     107.0,\n\t\t\t\t\t\tMemSwapped:    25,\n\t\t\t\t\t\tMemConsumed:   131,\n\t\t\t\t\t\tRecordTime:    timeMap[\"tfuture2\"],\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\t\"[{SuiteID:testSuite2 SuiteName: ParentSuiteID: BlocklistSource: Blocklisted:false StartTime:2021-02-22 16:23:01 +0000 UTC EndTime:2021-02-22 16:23:01.1 +0000 UTC Duration:100 Status: Stats:[{Memory:131 CPU:1.2 Storage:0 RecordTime:2021-02-22 16:23:01 +0000 UTC}] TotalTests:3} {SuiteID:testSuite3 SuiteName: ParentSuiteID: BlocklistSource: Blocklisted:false StartTime:2021-02-22 16:22:05 +0000 UTC EndTime:2021-02-22 16:22:05.2 +0000 UTC Duration:200 Status: Stats:[{Memory:100 CPU:25.4 Storage:250 RecordTime:2021-02-22 16:22:05 +0000 UTC}] TotalTests:5}]\", //nolint\n\t\t},\n\t}\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\ts, err := New(cfg, logger)\n\t\t\tif err != nil {\n\t\t\t\tt.Errorf(\"New() error = %v\", err)\n\t\t\t}\n\t\t\ts.appendStatsToTestSuites(tt.args.testSuiteResults, tt.args.processStats)\n\t\t\tgot := fmt.Sprintf(\"%+v\", tt.args.testSuiteResults)\n\t\t\tif got != tt.want {\n\t\t\t\tt.Errorf(\"ProcStats.appendStatsToTestSuites = \\n%v\\nwant: \\n%v\", got, tt.want)\n\t\t\t}\n\t\t})\n\t}\n}\n"
  },
  {
    "path": "pkg/synapse/synapse.go",
    "content": "package synapse\n\nimport (\n\t\"context\"\n\t\"encoding/json\"\n\t\"fmt\"\n\t\"sync\"\n\t\"time\"\n\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/global\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/tasconfigdownloader\"\n\t\"github.com/cenkalti/backoff/v4\"\n\t\"github.com/denisbrodbeck/machineid\"\n\t\"github.com/gorilla/websocket\"\n\t\"github.com/spf13/viper\"\n)\n\n// All constant related to synapse\nconst (\n\tRepo                             = \"repo\"\n\tBuildID                          = \"build-id\"\n\tJobID                            = \"job-id\"\n\tMode                             = \"mode\"\n\tID                               = \"id\"\n\tDuplicateConnectionErr           = \"Synapse already has an open connection\"\n\tAuthenticationFailed             = \"Synapse authentication failed\"\n\tduplicateConnectionSleepDuration = 15 * time.Second\n)\n\nvar buildAbortMap = make(map[string]bool)\n\ntype synapse struct {\n\tconn                     *websocket.Conn\n\trunner                   core.DockerRunner\n\tsecretsManager           core.SecretsManager\n\tlogger                   lumber.Logger\n\tMsgErrChan               chan struct{}\n\tMsgChan                  chan []byte\n\tConnectionAborted        chan struct{}\n\tInvalidConnectionRequest chan struct{}\n\tLogoutRequired           bool\n\ttasConfigDownloader      *tasconfigdownloader.TASConfigDownloader\n}\n\n// New returns new instance of synapse\nfunc New(\n\trunner core.DockerRunner,\n\tlogger lumber.Logger,\n\tsecretsManager core.SecretsManager,\n\ttasConfigDownloader *tasconfigdownloader.TASConfigDownloader,\n) core.SynapseManager {\n\treturn &synapse{\n\t\trunner:                   runner,\n\t\tlogger:                   logger,\n\t\tsecretsManager:           secretsManager,\n\t\tMsgErrChan:               make(chan struct{}),\n\t\tInvalidConnectionRequest: make(chan struct{}),\n\t\tMsgChan:                  make(chan []byte, 1024),\n\t\tConnectionAborted:        make(chan struct{}, 10),\n\t\tLogoutRequired:           true,\n\t\ttasConfigDownloader:      tasConfigDownloader,\n\t}\n}\n\nfunc (s *synapse) InitiateConnection(\n\tctx context.Context,\n\twg *sync.WaitGroup,\n\tconnectionFailed chan struct{}) {\n\tdefer wg.Done()\n\tgo s.openAndMaintainConnection(ctx, connectionFailed)\n\t<-ctx.Done()\n\tif s.LogoutRequired {\n\t\ts.logout()\n\t}\n\ts.runner.KillRunningDocker(context.TODO())\n\ts.logger.Debugf(\"exiting synapse\")\n}\n\n/*\nopenAndMaintainConnection tries to create and mantain connection with\nexponential backoff factor\n*/\nfunc (s *synapse) openAndMaintainConnection(ctx context.Context, connectionFailed chan struct{}) {\n\t// setup exponential backoff for retrying control websocket connection\n\texponentialBackoff := backoff.NewExponentialBackOff()\n\texponentialBackoff.InitialInterval = 500 * time.Millisecond\n\texponentialBackoff.RandomizationFactor = 0.05\n\texponentialBackoff.MaxElapsedTime = 10 * time.Minute\n\ts.logger.Debugf(\"starting socket connection at URL %s\", global.SocketURL[viper.GetString(\"env\")])\n\toperation := func() error {\n\t\ts.logger.Debugf(\"trying to connect to TAS server\")\n\t\tselect {\n\t\tcase <-ctx.Done():\n\t\t\treturn nil\n\t\tdefault:\n\t\t\tconn, _, err := websocket.DefaultDialer.Dial(global.SocketURL[viper.GetString(\"env\")], nil)\n\t\t\tif err != nil {\n\t\t\t\ts.logger.Errorf(\"error connecting synapse to TAS %+v\", err)\n\t\t\t\treturn err\n\t\t\t}\n\t\t\ts.conn = conn\n\t\t\ts.logger.Debugf(\"synapse connected to TAS server\")\n\t\t\ts.login()\n\t\t\tif !s.connectionHandler(ctx, conn, connectionFailed) {\n\t\t\t\treturn nil\n\t\t\t}\n\t\t\ts.MsgErrChan = make(chan struct{})\n\t\t\t// re-listen for any connection breaks\n\t\t\tgo s.openAndMaintainConnection(ctx, connectionFailed)\n\t\t\treturn nil\n\t\t}\n\t}\n\tif err := backoff.Retry(operation, exponentialBackoff); err != nil {\n\t\ts.logger.Errorf(\"Unable to establish connection with lambdatest server. exiting...\")\n\t\tconnectionFailed <- struct{}{}\n\t\ts.LogoutRequired = false\n\t}\n}\n\n/*\n connectionHandler handles the connection by listening to any connection closer\n also it returns boolean value which represents whether we can retry to connect\n*/\nfunc (s *synapse) connectionHandler(ctx context.Context, conn *websocket.Conn, connectionFailed chan struct{}) bool {\n\tnormalCloser := make(chan struct{})\n\tctxDone := false\n\tdefer func() {\n\t\t// if gracefully terminated, wait for logout message to be sent\n\t\tif !ctxDone {\n\t\t\tconn.Close()\n\t\t}\n\t\ts.ConnectionAborted <- struct{}{}\n\t}()\n\n\tgo s.messageReader(normalCloser, conn)\n\tgo s.messageWriter(conn)\n\tselect {\n\tcase <-ctx.Done():\n\t\tctxDone = true\n\t\treturn false\n\tcase <-normalCloser:\n\t\treturn false\n\tcase <-s.InvalidConnectionRequest:\n\t\tconnectionFailed <- struct{}{}\n\t\ts.LogoutRequired = false\n\t\treturn false\n\tcase <-s.MsgErrChan:\n\t\ts.logger.Errorf(\"Connection between synpase and lambdatest break\")\n\t\treturn true\n\t}\n}\n\n/*\nmessageReader reads websocket messages and acts upon it\n*/\nfunc (s *synapse) messageReader(normalCloser chan struct{}, conn *websocket.Conn) {\n\tconn.SetReadLimit(global.MaxMessageSize)\n\tif err := conn.SetReadDeadline(time.Now().Add(global.PingWait)); err != nil {\n\t\ts.logger.Errorf(\"Error in setting read deadline , error: %v\", err)\n\t\ts.MsgErrChan <- struct{}{}\n\t\tclose(s.MsgErrChan)\n\t\treturn\n\t}\n\tconn.SetPingHandler(func(string) error {\n\t\tif err := conn.WriteMessage(websocket.PongMessage, nil); err != nil {\n\t\t\ts.logger.Errorf(\"Error in writing pong msg , error: %v\", err)\n\t\t\treturn err\n\t\t}\n\t\tif err := conn.SetReadDeadline(time.Now().Add(global.PingWait)); err != nil {\n\t\t\ts.logger.Errorf(\"Error in setting read deadline , error: %v\", err)\n\n\t\t\treturn err\n\t\t}\n\t\treturn nil\n\t})\n\tduplicateConnectionChan := make(chan struct{})\n\tfor {\n\t\tselect {\n\t\tcase <-duplicateConnectionChan:\n\t\t\ts.logger.Errorf(\"Duplicate connection detected .. will retry after certain time\")\n\t\t\ttime.Sleep(duplicateConnectionSleepDuration)\n\t\t\ts.MsgErrChan <- struct{}{}\n\t\t\tclose(s.MsgErrChan)\n\t\t\tclose(duplicateConnectionChan)\n\t\t\treturn\n\t\tdefault:\n\t\t\t_, msg, err := conn.ReadMessage()\n\t\t\tif err != nil {\n\t\t\t\tif websocket.IsCloseError(err, websocket.CloseNormalClosure) {\n\t\t\t\t\ts.logger.Debugf(\"Normal closure occurred...........\")\n\t\t\t\t\tnormalCloser <- struct{}{}\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t\ts.logger.Errorf(\"disconnecting from lambdatest server. error in reading message %v\", err)\n\t\t\t\ts.MsgErrChan <- struct{}{}\n\t\t\t\tclose(s.MsgErrChan)\n\t\t\t\treturn\n\t\t\t}\n\t\t\ts.processMessage(msg, duplicateConnectionChan)\n\t\t}\n\t}\n}\n\n// processMessage process messages received via websocket\nfunc (s *synapse) processMessage(msg []byte, duplicateConnectionChan chan struct{}) {\n\tvar message core.Message\n\terr := json.Unmarshal(msg, &message)\n\tif err != nil {\n\t\ts.logger.Errorf(\"error unmarshaling message\")\n\t}\n\n\tswitch message.Type {\n\tcase core.MsgError:\n\t\ts.logger.Debugf(\"error message received from server\")\n\t\tgo s.processErrorMessage(message, duplicateConnectionChan)\n\tcase core.MsgInfo:\n\t\ts.logger.Debugf(\"info message received from server\")\n\tcase core.MsgTask:\n\t\ts.logger.Debugf(\"task message received from server\")\n\t\tgo s.processTask(message)\n\tcase core.MsgYMLParsingRequest:\n\t\ts.logger.Debugf(\"yml parsing request received from server\")\n\t\tgo s.processYMLParsingRequest(message)\n\tcase core.MsgBuildAbort:\n\t\ts.logger.Debugf(\"abort-build message received from server\")\n\t\tgo s.processAbortBuild(message)\n\tdefault:\n\t\ts.logger.Errorf(\"message type not found\")\n\t}\n}\n\n// processErrorMessage handles error messages\nfunc (s *synapse) processErrorMessage(message core.Message, duplicateConnectionChan chan struct{}) {\n\terrMsg := string(message.Content)\n\ts.logger.Errorf(\"error message received from server, error %s \", errMsg)\n\tif errMsg == AuthenticationFailed {\n\t\ts.InvalidConnectionRequest <- struct{}{}\n\t}\n\tif errMsg == DuplicateConnectionErr {\n\t\tduplicateConnectionChan <- struct{}{}\n\t}\n}\n\n// processAbortBuild handles aborting a running build\nfunc (s *synapse) processAbortBuild(message core.Message) {\n\tbuildID := string(message.Content)\n\tbuildAbortMap[buildID] = true\n\ts.logger.Debugf(\"message received to abort build %s\", buildID)\n\tif err := s.runner.KillContainerForBuildID(buildID); err != nil {\n\t\ts.logger.Errorf(\"error while terminating container for buildID: %s, error: %v\", buildID, err)\n\t\treturn\n\t}\n}\n\n// processTask handles task type message\nfunc (s *synapse) processTask(message core.Message) {\n\tvar runnerOpts core.RunnerOptions\n\terr := json.Unmarshal(message.Content, &runnerOpts)\n\tif err != nil {\n\t\ts.logger.Errorf(\"error unmarshaling core.task\")\n\t}\n\n\t// sending job started updates\n\tif runnerOpts.PodType == core.NucleusPod {\n\t\tjobInfo := CreateJobInfo(core.JobStarted, &runnerOpts, \"\")\n\t\ts.logger.Infof(\"Sending update to neuron %+v\", jobInfo)\n\t\tresourceStatsMessage := CreateJobUpdateMessage(jobInfo)\n\t\ts.writeMessageToBuffer(&resourceStatsMessage)\n\t}\n\t// mounting secrets to container\n\trunnerOpts.HostVolumePath = fmt.Sprintf(\"/tmp/synapse/data/%s\", runnerOpts.ContainerName)\n\n\ts.runAndUpdateJobStatus(&runnerOpts)\n}\n\n// runAndUpdateJobStatus intiate and sends jobs status\nfunc (s *synapse) runAndUpdateJobStatus(runnerOpts *core.RunnerOptions) {\n\t// starting container\n\tstatusChan := make(chan core.ContainerStatus)\n\tdefer close(statusChan)\n\ts.logger.Debugf(\"starting container %s for build %s...\", runnerOpts.ContainerName, runnerOpts.Label[BuildID])\n\tgo s.runner.Initiate(context.TODO(), runnerOpts, statusChan)\n\n\tstatus := <-statusChan\n\t// post job completion steps\n\ts.logger.Debugf(\"jobID %s, buildID %s  status  %+v\", runnerOpts.Label[JobID], runnerOpts.Label[BuildID], status)\n\n\ts.sendResourceUpdates(core.ResourceRelease, runnerOpts, runnerOpts.Label[JobID], runnerOpts.Label[BuildID])\n\tjobStatus := core.JobFailed\n\tif status.Done {\n\t\tjobStatus = core.JobCompleted\n\t}\n\tif buildAbortMap[runnerOpts.Label[BuildID]] {\n\t\tjobStatus = core.JobAborted\n\t}\n\tjobInfo := CreateJobInfo(jobStatus, runnerOpts, status.Error.Message)\n\ts.logger.Infof(\"Sending update to neuron %+v\", jobInfo)\n\tresourceStatsMessage := CreateJobUpdateMessage(jobInfo)\n\ts.writeMessageToBuffer(&resourceStatsMessage)\n}\n\n// login write login message to lambdatest server\nfunc (s *synapse) login() {\n\tcpu, ram := s.runner.GetInfo(context.TODO())\n\tid, err := machineid.ProtectedID(\"synapaseMeta\")\n\tif err != nil {\n\t\ts.logger.Fatalf(\"Error while generating unique id\")\n\t}\n\tlambdatestConfig := s.secretsManager.GetLambdatestSecrets()\n\tloginDetails := core.LoginDetails{\n\t\tName:           s.secretsManager.GetSynapseName(),\n\t\tSecretKey:      lambdatestConfig.SecretKey,\n\t\tCPU:            cpu,\n\t\tRAM:            ram,\n\t\tSynapseID:      id,\n\t\tSynapseVersion: global.SynapseBinaryVersion,\n\t}\n\ts.logger.Infof(\"Login synapse with id %s\", loginDetails.SynapseID)\n\n\tloginMessage := CreateLoginMessage(loginDetails)\n\ts.writeMessageToBuffer(&loginMessage)\n}\n\n// logout writes logout message to lambdatest server\nfunc (s *synapse) logout() {\n\ts.logger.Infof(\"Logging out from lambdatest server\")\n\tlogoutMessage := CreateLogoutMessage()\n\tmessageJson, err := json.Marshal(logoutMessage)\n\n\tif err != nil {\n\t\ts.logger.Errorf(\"error marshaling message\")\n\t\treturn\n\t}\n\tif err := s.conn.WriteMessage(websocket.TextMessage, messageJson); err != nil {\n\t\ts.logger.Errorf(\"error sending message to the server, error %v\", err)\n\n\t}\n}\n\n// sendResourceUpdates sends resource status of synapse\nfunc (s *synapse) sendResourceUpdates(\n\tstatus core.StatType,\n\trunnerOpts *core.RunnerOptions,\n\tjobID, buildID string,\n) {\n\tspecs := GetResources(runnerOpts.Tier)\n\tresourceStats := core.ResourceStats{\n\t\tStatus: status,\n\t\tCPU:    specs.CPU,\n\t\tRAM:    specs.RAM,\n\t}\n\ts.logger.Debugf(\"sending resource update for jobID %s buildID %s to lambdatest %+v\", jobID, buildID, resourceStats)\n\tresourceStatsMessage := CreateResourceStatsMessage(resourceStats)\n\ts.writeMessageToBuffer(&resourceStatsMessage)\n}\n\n// writeMessageToBuffer  writes all message to buffer channel\nfunc (s *synapse) writeMessageToBuffer(message *core.Message) {\n\tmessageJSON, err := json.Marshal(message)\n\tif err != nil {\n\t\ts.logger.Errorf(\"error marshaling message\")\n\t\treturn\n\t}\n\ts.MsgChan <- messageJSON\n}\n\n// messageWriter writes the messages to open websocket\nfunc (s *synapse) messageWriter(conn *websocket.Conn) {\n\tfor {\n\t\tselect {\n\t\tcase <-s.ConnectionAborted:\n\t\t\treturn\n\t\tcase messageJson := <-s.MsgChan:\n\t\t\tif err := conn.WriteMessage(websocket.TextMessage, messageJson); err != nil {\n\t\t\t\ts.logger.Errorf(\"error sending message to the server error %v\", err)\n\t\t\t\ts.MsgChan <- messageJson\n\t\t\t\ts.MsgErrChan <- struct{}{}\n\t\t\t\tclose(s.MsgErrChan)\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t}\n}\n\nfunc (s *synapse) processYMLParsingRequest(message core.Message) {\n\tvar parsingReqMsg *core.YMLParsingRequestMessage\n\tvar writeMsg core.Message\n\tdefer s.writeMessageToBuffer(&writeMsg)\n\tif err := json.Unmarshal(message.Content, &parsingReqMsg); err != nil {\n\t\ts.logger.Errorf(\"error in unmarshaling message for yml parsing request, error %v \", err)\n\n\t\twriteMsg = createYMlParsingResultMessage(core.YMLParsingResultMessage{\n\t\t\tOrgID:    parsingReqMsg.OrgID,\n\t\t\tBuildID:  parsingReqMsg.BuildID,\n\t\t\tErrorMsg: err.Error(),\n\t\t})\n\t\treturn\n\t}\n\toauth := s.secretsManager.GetOauthToken()\n\n\ttasOutput, err := s.tasConfigDownloader.GetTASConfig(context.TODO(), parsingReqMsg.GitProvider,\n\t\tparsingReqMsg.CommitID,\n\t\tparsingReqMsg.RepoSlug, parsingReqMsg.TasFileName, oauth,\n\t\tparsingReqMsg.Event, parsingReqMsg.LicenseTier)\n\tif err != nil {\n\t\ts.logger.Errorf(\"error occurred while fetching tas config file for buildID %s orgID %s, error %v\",\n\t\t\tparsingReqMsg.BuildID, parsingReqMsg.OrgID, err)\n\t\twriteMsg = createYMlParsingResultMessage(core.YMLParsingResultMessage{\n\t\t\tOrgID:    parsingReqMsg.OrgID,\n\t\t\tBuildID:  parsingReqMsg.BuildID,\n\t\t\tErrorMsg: err.Error(),\n\t\t})\n\t\treturn\n\t}\n\twriteMsg = createYMlParsingResultMessage(core.YMLParsingResultMessage{\n\t\tOrgID:     parsingReqMsg.OrgID,\n\t\tBuildID:   parsingReqMsg.BuildID,\n\t\tYMLOutput: *tasOutput,\n\t})\n}\n"
  },
  {
    "path": "pkg/synapse/utils.go",
    "content": "package synapse\n\nimport (\n\t\"encoding/json\"\n\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n)\n\n// CreateLoginMessage creates message of type login\nfunc CreateLoginMessage(loginDetails core.LoginDetails) core.Message {\n\tloginDetailsJson, err := json.Marshal(loginDetails)\n\tif err != nil {\n\t\treturn core.Message{}\n\t}\n\treturn core.Message{\n\t\tType:    core.MsgLogin,\n\t\tContent: loginDetailsJson,\n\t\tSuccess: true,\n\t}\n}\n\n// CreateLogoutMessage creates message of type logout\nfunc CreateLogoutMessage() core.Message {\n\treturn core.Message{\n\t\tType:    core.MsgLogout,\n\t\tContent: []byte(\"\"),\n\t\tSuccess: true,\n\t}\n}\n\n// CreateJobInfo creates jobInfo based on status and runner\nfunc CreateJobInfo(status core.StatusType, runnerOpts *core.RunnerOptions, message string) core.JobInfo {\n\tjobInfo := core.JobInfo{\n\t\tStatus:  status,\n\t\tJobID:   runnerOpts.Label[JobID],\n\t\tBuildID: runnerOpts.Label[BuildID],\n\t\tID:      runnerOpts.Label[ID],\n\t\tMode:    runnerOpts.Label[Mode],\n\t\tMessage: message,\n\t}\n\treturn jobInfo\n}\n\n// CreateJobUpdateMessage creates message of type job updates\nfunc CreateJobUpdateMessage(jobInfo core.JobInfo) core.Message {\n\n\tjobInfoJson, err := json.Marshal(jobInfo)\n\tif err != nil {\n\t\treturn core.Message{}\n\t}\n\treturn core.Message{\n\t\tType:    core.MsgJobInfo,\n\t\tContent: []byte(jobInfoJson),\n\t\tSuccess: true,\n\t}\n}\n\n// CreateResourceStatsMessage creates message of type resource stats\nfunc CreateResourceStatsMessage(resourceStats core.ResourceStats) core.Message {\n\tresourceStatsJson, err := json.Marshal(resourceStats)\n\tif err != nil {\n\t\treturn core.Message{}\n\t}\n\treturn core.Message{\n\t\tType:    core.MsgResourceStats,\n\t\tContent: resourceStatsJson,\n\t\tSuccess: true,\n\t}\n}\n\n// GetResources returns dummy resources based on pod type\nfunc GetResources(tierOpts core.Tier) core.Specs {\n\tif val, ok := core.TierOpts[tierOpts]; ok {\n\t\treturn val\n\t}\n\treturn core.Specs{CPU: 0, RAM: 0}\n}\n\n// createYMlParsingResultMessage creates message for YML parsing result\nfunc createYMlParsingResultMessage(ymlParsingOutput core.YMLParsingResultMessage) core.Message {\n\tymlParsingOutputJSON, err := json.Marshal(ymlParsingOutput)\n\tif err != nil {\n\t\treturn core.Message{}\n\t}\n\treturn core.Message{\n\t\tType:    core.MsgYMLParsingResult,\n\t\tContent: ymlParsingOutputJSON,\n\t\tSuccess: true,\n\t}\n}\n"
  },
  {
    "path": "pkg/synapse/utils_test.go",
    "content": "package synapse\n\nimport (\n\t\"encoding/json\"\n\t\"testing\"\n\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/stretchr/testify/assert\"\n)\n\nfunc TestCreateLoginMessage(t *testing.T) {\n\tloginDetails := core.LoginDetails{\n\t\tSecretKey: \"dummysecretkey\",\n\t\tCPU:       4,\n\t\tRAM:       4096,\n\t}\n\tloginMessage := CreateLoginMessage(loginDetails)\n\tloginDetailsJSON, err := json.Marshal(loginDetails)\n\tif err != nil {\n\t\tt.Errorf(\"error in marshaling login details: %v\", err)\n\t}\n\tassert.Equal(t, loginDetailsJSON, loginMessage.Content)\n\tassert.Equal(t, core.MsgLogin, loginMessage.Type)\n}\n\nfunc TestCreateLogoutMessage(t *testing.T) {\n\tlogoutMessage := CreateLogoutMessage()\n\tassert.Empty(t, logoutMessage.Content)\n\tassert.Equal(t, core.MsgLogout, logoutMessage.Type)\n}\n\nfunc TestCreateJobUpdateMessage(t *testing.T) {\n\tjobInfo := core.JobInfo{\n\t\tStatus:  core.JobCompleted,\n\t\tJobID:   \"dummyjobid\",\n\t\tID:      \"dummyid\",\n\t\tMode:    \"nucleus\",\n\t\tBuildID: \"dummybuildid\",\n\t}\n\tjobInfoMessage := CreateJobUpdateMessage(jobInfo)\n\tjobInfoJSON, err := json.Marshal(jobInfo)\n\tif err != nil {\n\t\tt.Errorf(\"error in marshaling job info: %v\", err)\n\t}\n\tassert.Equal(t, jobInfoJSON, jobInfoMessage.Content)\n\tassert.Equal(t, core.MsgJobInfo, jobInfoMessage.Type)\n}\n\nfunc TestCreateResourceStatsMessage(t *testing.T) {\n\tresourceStats := core.ResourceStats{\n\t\tStatus: core.ResourceRelease,\n\t\tCPU:    2,\n\t\tRAM:    2000,\n\t}\n\tresourceStatsMessage := CreateResourceStatsMessage(resourceStats)\n\tresourceStatsJSON, err := json.Marshal(resourceStats)\n\tif err != nil {\n\t\tt.Errorf(\"error in marshaling job info: %v\", err)\n\t}\n\tassert.Equal(t, resourceStatsJSON, resourceStatsMessage.Content)\n\tassert.Equal(t, core.MsgResourceStats, resourceStatsMessage.Type)\n}\n"
  },
  {
    "path": "pkg/tasconfigdownloader/setup.go",
    "content": "package tasconfigdownloader\n\nimport (\n\t\"context\"\n\t\"errors\"\n\t\"fmt\"\n\t\"os\"\n\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/gitmanager\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/global\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/tasconfigmanager\"\n)\n\nconst ymlVersionMismtachRemarks = \"the yml structure is invalid, please check the TAS yml documentation : %s\"\n\ntype TASConfigDownloader struct {\n\tlogger           lumber.Logger\n\tgitmanager       core.GitManager\n\ttasconfigmanager core.TASConfigManager\n}\n\nfunc New(logger lumber.Logger) *TASConfigDownloader {\n\treturn &TASConfigDownloader{\n\t\tlogger:           logger,\n\t\tgitmanager:       gitmanager.NewGitManager(logger, nil),\n\t\ttasconfigmanager: tasconfigmanager.NewTASConfigManager(logger),\n\t}\n}\n\nfunc (t *TASConfigDownloader) GetTASConfig(ctx context.Context, gitProvider, commitID, repoSlug,\n\tfilePath string, oauth *core.Oauth, eventType core.EventType, licenseTier core.Tier) (*core.TASConfigDownloaderOutput, error) {\n\tymlPath, err := t.gitmanager.DownloadFileByCommit(ctx, gitProvider, repoSlug, commitID, filePath, oauth)\n\tif err != nil {\n\t\tt.logger.Errorf(\"error occurred while downloading file %s from %s for commitID %s, error %v\", filePath, repoSlug, commitID, err)\n\t\treturn nil, err\n\t}\n\n\tversion, err := t.tasconfigmanager.GetVersion(ymlPath)\n\tif err != nil {\n\t\tt.logger.Errorf(\"error reading version for tas config file %s, error %v\", ymlPath, err)\n\t\treturn nil, err\n\t}\n\n\ttasConfig, err := t.tasconfigmanager.LoadAndValidate(ctx, version, ymlPath, eventType, licenseTier, filePath)\n\tif err != nil {\n\t\tif supportedVersion := t.checkYmlValidityForOtherVersion(ctx, version, ymlPath, eventType,\n\t\t\tlicenseTier, filePath); supportedVersion != -1 {\n\t\t\terrMsg := fmt.Sprintf(ymlVersionMismtachRemarks, global.TASYmlConfigurationDocLink)\n\t\t\tt.logger.Errorf(\"error while parsing yml for commitID %s, error: %s\", commitID, errMsg)\n\t\t\treturn nil, errors.New(errMsg)\n\t\t}\n\t\tt.logger.Errorf(\"error while parsing yml for commitID %s error %v\", commitID, err)\n\t\treturn nil, err\n\t}\n\tif err := os.Remove(ymlPath); err != nil {\n\t\tt.logger.Errorf(\"failed to delete file %s , error %v\", ymlPath, err)\n\t\treturn nil, err\n\t}\n\treturn &core.TASConfigDownloaderOutput{Version: version, TASConfig: tasConfig}, nil\n}\n\nfunc (t *TASConfigDownloader) checkYmlValidityForOtherVersion(ctx context.Context,\n\tversion int,\n\tymlPath string,\n\teventType core.EventType,\n\tlicenseTier core.Tier, filePath string) int {\n\tfor _, supportedVersion := range global.ValidYMLVersions {\n\t\tif version == supportedVersion {\n\t\t\tcontinue\n\t\t}\n\t\tif _, err := t.tasconfigmanager.LoadAndValidate(ctx, supportedVersion, ymlPath, eventType, licenseTier, filePath); err == nil {\n\t\t\treturn supportedVersion\n\t\t}\n\t}\n\treturn -1\n}\n"
  },
  {
    "path": "pkg/tasconfigmanager/setup.go",
    "content": "// Package tasconfigmanager is used for fetching and validating the tas config file\npackage tasconfigmanager\n\nimport (\n\t\"context\"\n\t\"errors\"\n\t\"fmt\"\n\t\"os\"\n\n\t\"github.com/LambdaTest/test-at-scale/pkg/errs\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/global\"\n\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/utils\"\n)\n\nconst packageJSON = \"package.json\"\n\nvar tierEnumMapping = map[core.Tier]int{\n\tcore.XSmall: 1,\n\tcore.Small:  2,\n\tcore.Medium: 3,\n\tcore.Large:  4,\n\tcore.XLarge: 5,\n}\n\n// tasConfigManager represents an instance of TASConfigManager instance\ntype tasConfigManager struct {\n\tlogger lumber.Logger\n}\n\n// NewTASConfigManager creates and returns a new TASConfigManager instance\nfunc NewTASConfigManager(logger lumber.Logger) core.TASConfigManager {\n\treturn &tasConfigManager{logger: logger}\n}\n\nfunc (tc *tasConfigManager) LoadAndValidate(ctx context.Context,\n\tversion int,\n\tpath string,\n\teventType core.EventType,\n\tlicenseTier core.Tier, tasFilePathInRepo string) (interface{}, error) {\n\tyamlFile, err := os.ReadFile(path)\n\tif err != nil {\n\t\tif errors.Is(err, os.ErrNotExist) {\n\t\t\treturn nil, errs.New(fmt.Sprintf(\"Configuration file not found at path: %s\", tasFilePathInRepo))\n\t\t}\n\t\ttc.logger.Errorf(\"Error while reading file, error %v\", err)\n\t\treturn nil, errs.New(fmt.Sprintf(\"Error while reading configuration file at path: %s\", tasFilePathInRepo))\n\t}\n\tif version < global.NewTASVersion {\n\t\treturn tc.validateYMLV1(ctx, yamlFile, eventType, licenseTier, tasFilePathInRepo)\n\t}\n\treturn tc.validateYMLV2(ctx, yamlFile, eventType, licenseTier, tasFilePathInRepo)\n}\n\nfunc (tc *tasConfigManager) validateYMLV1(ctx context.Context,\n\tyamlFile []byte,\n\teventType core.EventType,\n\tlicenseTier core.Tier,\n\tfilePath string) (*core.TASConfig, error) {\n\ttasConfig, err := utils.ValidateStructTASYmlV1(ctx, yamlFile, filePath)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tif tasConfig.CoverageThreshold == nil {\n\t\ttasConfig.CoverageThreshold = new(core.CoverageThreshold)\n\t}\n\n\tswitch eventType {\n\tcase core.EventPullRequest:\n\t\tif tasConfig.Premerge == nil {\n\t\t\treturn nil, errs.New(fmt.Sprintf(\"`preMerge` test cases are not configured in `%s` configuration file.\", filePath))\n\t\t}\n\tcase core.EventPush:\n\t\tif tasConfig.Postmerge == nil {\n\t\t\treturn nil, errs.New(fmt.Sprintf(\"`postMerge` test cases are not configured in `%s` configuration file.\", filePath))\n\t\t}\n\t}\n\tif err := isValidLicenseTier(tasConfig.Tier, licenseTier); err != nil {\n\t\ttc.logger.Errorf(\"LicenseTier validation failed. error: %v\", err)\n\t\treturn nil, err\n\t}\n\treturn tasConfig, nil\n}\n\nfunc isValidLicenseTier(yamlTier, licenseTier core.Tier) error {\n\tif tierEnumMapping[yamlTier] > tierEnumMapping[licenseTier] {\n\t\treturn errs.New(\n\t\t\tfmt.Sprintf(\n\t\t\t\t\"Sorry, the requested tier `%s` is not supported under the current plan. Please upgrade your plan.\",\n\t\t\t\tyamlTier))\n\t}\n\treturn nil\n}\n\nfunc (tc *tasConfigManager) validateYMLV2(ctx context.Context,\n\tyamlFile []byte,\n\teventType core.EventType,\n\tlicenseTier core.Tier,\n\tyamlFilePath string) (*core.TASConfigV2, error) {\n\ttasConfig, err := utils.ValidateStructTASYmlV2(ctx, yamlFile, yamlFilePath)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tif tasConfig.CoverageThreshold == nil {\n\t\ttasConfig.CoverageThreshold = new(core.CoverageThreshold)\n\t}\n\n\tswitch eventType {\n\tcase core.EventPullRequest:\n\t\tif tasConfig.PreMerge == nil {\n\t\t\treturn nil, fmt.Errorf(\"`preMerge` is missing in tas configuration file %s\", yamlFilePath)\n\t\t}\n\t\tsubModuleMap := map[string]bool{}\n\t\tfor i := 0; i < len(tasConfig.PreMerge.SubModules); i++ {\n\t\t\tif err := utils.ValidateSubModule(&tasConfig.PreMerge.SubModules[i]); err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\tif _, ok := subModuleMap[tasConfig.PreMerge.SubModules[i].Name]; ok {\n\t\t\t\treturn nil, fmt.Errorf(\"duplicate subModule name found in `preMerge` in tas configuration file %s\", yamlFilePath)\n\t\t\t}\n\t\t\tsubModuleMap[tasConfig.PreMerge.SubModules[i].Name] = true\n\t\t}\n\n\tcase core.EventPush:\n\t\tif tasConfig.PostMerge == nil {\n\t\t\treturn nil, fmt.Errorf(\"`postMerge` is missing in tas configuration file %s\", yamlFilePath)\n\t\t}\n\t\tsubModuleMap := map[string]bool{}\n\n\t\tfor i := 0; i < len(tasConfig.PostMerge.SubModules); i++ {\n\t\t\tif err := utils.ValidateSubModule(&tasConfig.PostMerge.SubModules[i]); err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\tif _, ok := subModuleMap[tasConfig.PostMerge.SubModules[i].Name]; ok {\n\t\t\t\treturn nil, fmt.Errorf(\"duplicate subModule name found in `postMerge` in tas configuration file %s\", yamlFilePath)\n\t\t\t}\n\t\t\tsubModuleMap[tasConfig.PostMerge.SubModules[i].Name] = true\n\t\t}\n\t}\n\tif err := isValidLicenseTier(tasConfig.Tier, licenseTier); err != nil {\n\t\ttc.logger.Errorf(\"LicenseTier validation failed. error: %v\", err)\n\t\treturn nil, err\n\t}\n\n\treturn tasConfig, nil\n}\n\nfunc (tc *tasConfigManager) GetVersion(path string) (int, error) {\n\tyamlFile, err := os.ReadFile(path)\n\tif err != nil {\n\t\tif errors.Is(err, os.ErrNotExist) {\n\t\t\treturn 0, errs.New(fmt.Sprintf(\"Configuration file not found at path: %s\", path))\n\t\t}\n\t\ttc.logger.Errorf(\"Error while reading file, error %v\", err)\n\t\treturn 0, errs.New(fmt.Sprintf(\"Error while reading configuration file at path: %s\", path))\n\t}\n\tversionYml, err := utils.GetVersion(yamlFile)\n\tif err != nil {\n\t\ttc.logger.Errorf(\"error while reading tas yml version : %v\", err)\n\t\treturn 0, err\n\t}\n\treturn versionYml, nil\n}\n\nfunc (tc *tasConfigManager) GetTasConfigFilePath(payload *core.Payload) (string, error) {\n\t// load tas yaml file\n\tfilePath, err := utils.GetTASFilePath(payload.TasFileName)\n\tif err != nil {\n\t\ttc.logger.Errorf(\"Unable to load tas yaml file, error: %v\", err)\n\t\treturn \"\", err\n\t}\n\treturn filePath, nil\n}\n"
  },
  {
    "path": "pkg/tasconfigmanager/setup_test.go",
    "content": "package tasconfigmanager\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\t\"path\"\n\t\"reflect\"\n\t\"testing\"\n\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/LambdaTest/test-at-scale/testutils\"\n\t\"github.com/stretchr/testify/assert\"\n)\n\nfunc assertTasConfigV1(got, want *core.TASConfig) error {\n\tif got.SmartRun != want.SmartRun {\n\t\treturn fmt.Errorf(\"Mismatch in smart run got %t, want %t\", got.SmartRun, want.SmartRun)\n\t}\n\tif got.Framework != want.Framework {\n\t\treturn fmt.Errorf(\"Mismatch in framework , got %s , want %s\", got.Framework, want.Framework)\n\t}\n\tif got.ConfigFile != want.ConfigFile {\n\t\treturn fmt.Errorf(\"Mismatch in configFile , got %s , want %s\", got.ConfigFile, want.ConfigFile)\n\t}\n\tif got.NodeVersion != want.NodeVersion {\n\t\treturn fmt.Errorf(\"Mismatch in nodeVersion , got %s, want %s\", got.NodeVersion, want.NodeVersion)\n\t}\n\tif got.Tier != want.Tier {\n\t\treturn fmt.Errorf(\"Mismatch in tier , got %s, want %s\", got.Tier, want.Tier)\n\t}\n\tif got.SplitMode != want.SplitMode {\n\t\treturn fmt.Errorf(\"Mismatch in split mode , got %s, want %s\", got.SplitMode, want.SplitMode)\n\t}\n\tif got.Version != want.Version {\n\t\treturn fmt.Errorf(\"Mismatch in version , got %s, want %s\", got.Version, want.Version)\n\t}\n\tif !reflect.DeepEqual(*got.Premerge, *want.Premerge) {\n\t\treturn fmt.Errorf(\"Mismmatch in pre merge pattern , got %+v, want %+v\", *got.Premerge, *want.Premerge)\n\t}\n\tif !reflect.DeepEqual(*got.Postmerge, *want.Postmerge) {\n\t\treturn fmt.Errorf(\"Mismmatch in post merge pattern , got %+v, want %+v\", *got.Postmerge, *want.Postmerge)\n\t}\n\tif !reflect.DeepEqual(*got.Prerun, *want.Prerun) {\n\t\treturn fmt.Errorf(\"Mismmatch in preRun , got %+v, want %+v\", *got.Prerun, *want.Prerun)\n\t}\n\tif !reflect.DeepEqual(*got.Postrun, *want.Postrun) {\n\t\treturn fmt.Errorf(\"Mismmatch in preRun , got %+v, want %+v\", *got.Postrun, *want.Postrun)\n\t}\n\treturn nil\n}\n\nfunc assertTasConfigV2(got, want *core.TASConfigV2) error {\n\tif got.SmartRun != want.SmartRun {\n\t\treturn fmt.Errorf(\"Mismatch in smart run got %t, want %t\", got.SmartRun, want.SmartRun)\n\t}\n\n\tif got.Tier != want.Tier {\n\t\treturn fmt.Errorf(\"Mismatch in tier , got %s, want %s\", got.Tier, want.Tier)\n\t}\n\tif got.SplitMode != want.SplitMode {\n\t\treturn fmt.Errorf(\"Mismatch in split mode , got %s, want %s\", got.SplitMode, want.SplitMode)\n\t}\n\tif got.Version != want.Version {\n\t\treturn fmt.Errorf(\"Mismatch in version , got %s, want %s\", got.Version, want.Version)\n\t}\n\tif err := assertMergeV2(got.PreMerge, want.PreMerge, \"preMerge\"); err != nil {\n\t\treturn err\n\t}\n\n\treturn assertMergeV2(got.PostMerge, want.PostMerge, \"postMerge\")\n}\n\nfunc assertMergeV2(got, want *core.MergeV2, mode string) error {\n\tif !assert.ObjectsAreEqualValues(got.PreRun, want.PreRun) {\n\t\treturn fmt.Errorf(\"Mismatch in %s preRun , got %+v, want %+v\", mode, got.PreRun, want.PreRun)\n\t}\n\n\tif !reflect.DeepEqual(got.EnvMap, want.EnvMap) {\n\t\treturn fmt.Errorf(\"Mismatch in %s env , got %+v, want %+v\", mode, got.EnvMap, want.EnvMap)\n\t}\n\treturn nil\n}\n\nfunc TestLoadAndValidateV1(t *testing.T) {\n\tlogger, err := testutils.GetLogger()\n\tif err != nil {\n\t\tt.Errorf(\"Couldn't initialize logger, error: %v\", err)\n\t}\n\n\ttasConfigManager := NewTASConfigManager(logger)\n\tctx := context.TODO()\n\ttests := []struct {\n\t\tName      string\n\t\tFilePath  string\n\t\tEventType core.EventType\n\t\tTier      core.Tier\n\t\twant      *core.TASConfig\n\t\twantErr   error\n\t}{\n\t\t{\n\t\t\t\"Invalid yml file for tas version 1\",\n\t\t\tpath.Join(\"../../\", \"testutils/testdata/tasyml/junk.yml\"),\n\t\t\tcore.EventPush,\n\t\t\tcore.Small,\n\t\t\tnil,\n\t\t\tfmt.Errorf(\"`%s` configuration file contains invalid format. Please correct the `%s` file\",\n\t\t\t\t\"../../testutils/testdata/tasyml/junk.yml\",\n\t\t\t\t\"../../testutils/testdata/tasyml/junk.yml\"),\n\t\t},\n\t\t{\n\t\t\t\"Valid Config\",\n\t\t\tpath.Join(\"../../\", \"testutils/testdata/tasyml/validwithCacheKey.yml\"),\n\t\t\tcore.EventPush,\n\t\t\tcore.Small,\n\t\t\t&core.TASConfig{\n\t\t\t\tSmartRun:  true,\n\t\t\t\tFramework: \"jest\",\n\t\t\t\tPostmerge: &core.Merge{\n\t\t\t\t\tEnvMap:   map[string]string{\"NODE_ENV\": \"development\"},\n\t\t\t\t\tPatterns: []string{\"{packages,scripts}/**/__tests__/*{.js,.coffee,[!d].ts}\"},\n\t\t\t\t},\n\t\t\t\tPremerge: &core.Merge{\n\t\t\t\t\tEnvMap:   map[string]string{\"NODE_ENV\": \"development\"},\n\t\t\t\t\tPatterns: []string{\"{packages,scripts}/**/__tests__/*{.js,.coffee,[!d].ts}\"},\n\t\t\t\t},\n\t\t\t\tPrerun:      &core.Run{EnvMap: map[string]string{\"NODE_ENV\": \"development\"}, Commands: []string{\"yarn\"}},\n\t\t\t\tPostrun:     &core.Run{Commands: []string{\"node --version\"}},\n\t\t\t\tConfigFile:  \"scripts/jest/config.source-www.js\",\n\t\t\t\tNodeVersion: \"14.17.6\",\n\t\t\t\tTier:        \"small\",\n\t\t\t\tSplitMode:   core.TestSplit,\n\t\t\t\tVersion:     \"1.0\",\n\t\t\t\tCache: &core.Cache{\n\t\t\t\t\tKey:   \"xyz\",\n\t\t\t\t\tPaths: []string{\"abcd\"},\n\t\t\t\t},\n\t\t\t},\n\t\t\tnil,\n\t\t},\n\t\t{\n\t\t\t\"PreMerge is empty in tas yml for PR\",\n\t\t\tpath.Join(\"../../\", \"testutils/testdata/tasyml/pre_merge_emptyv1.yml\"),\n\t\t\tcore.EventPullRequest,\n\t\t\tcore.Small,\n\t\t\tnil,\n\t\t\tfmt.Errorf(\"`preMerge` test cases are not configured in `%s` configuration file.\",\n\t\t\t\t\"../../testutils/testdata/tasyml/pre_merge_emptyv1.yml\"),\n\t\t},\n\t\t{\n\t\t\t\"post merge is empty in tas yml for push event \",\n\t\t\tpath.Join(\"../../\", \"testutils/testdata/tasyml/postmerge_emptyv1.yml\"),\n\t\t\tcore.EventPush,\n\t\t\tcore.Small,\n\t\t\tnil,\n\t\t\tfmt.Errorf(\"`postMerge` test cases are not configured in `%s` configuration file.\",\n\t\t\t\t\"../../testutils/testdata/tasyml/postmerge_emptyv1.yml\"),\n\t\t},\n\t}\n\tfor _, tt := range tests {\n\t\ttas, err := tasConfigManager.LoadAndValidate(ctx, 1, tt.FilePath, tt.EventType, core.Small, tt.FilePath)\n\t\tif err != nil {\n\t\t\tassert.Equal(t, err.Error(), tt.wantErr.Error(), \"error mismatch\")\n\t\t} else {\n\t\t\ttasConfig := tas.(*core.TASConfig)\n\t\t\terr = assertTasConfigV1(tasConfig, tt.want)\n\t\t\tif err != nil {\n\t\t\t\tt.Errorf(err.Error())\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t}\n}\n\n// nolint\nfunc TestLoadAndValidateV2(t *testing.T) {\n\tlogger, err := testutils.GetLogger()\n\tif err != nil {\n\t\tt.Errorf(\"Couldn't initialize logger, error: %v\", err)\n\t}\n\n\ttasConfigManager := NewTASConfigManager(logger)\n\tctx := context.TODO()\n\ttests := []struct {\n\t\tName      string\n\t\tFilePath  string\n\t\tEventType core.EventType\n\t\tTier      core.Tier\n\t\twant      *core.TASConfigV2\n\t\twantErr   error\n\t}{\n\t\t{\n\t\t\t\"Invalid yml file for tas version 1\",\n\t\t\tpath.Join(\"../../\", \"testutils/testdata/tasyml/junk.yml\"),\n\t\t\tcore.EventPush,\n\t\t\tcore.Small,\n\t\t\tnil,\n\t\t\tfmt.Errorf(\"`%s` configuration file contains invalid format. Please correct the `%s` file\",\n\t\t\t\t\"../../testutils/testdata/tasyml/junk.yml\",\n\t\t\t\t\"../../testutils/testdata/tasyml/junk.yml\"),\n\t\t},\n\t\t{\n\t\t\t\"PreMerge is missing in tas yml file for pull_request event\",\n\t\t\tpath.Join(\"../../\", \"testutils/testdata/tasyml/premerge_emptyv2.yaml\"),\n\t\t\tcore.EventPullRequest,\n\t\t\tcore.Small,\n\t\t\tnil,\n\t\t\tfmt.Errorf(\"`preMerge` is missing in tas configuration file %s\",\n\t\t\t\t\"../../testutils/testdata/tasyml/premerge_emptyv2.yaml\"),\n\t\t},\n\t\t{\n\t\t\t\"PostMerge is missing in tas yml file for push event\",\n\t\t\tpath.Join(\"../../\", \"testutils/testdata/tasyml/postmerge_emptyv2.yaml\"),\n\t\t\tcore.EventPush,\n\t\t\tcore.Small,\n\t\t\tnil,\n\t\t\tfmt.Errorf(\"`postMerge` is missing in tas configuration file %s\",\n\t\t\t\t\"../../testutils/testdata/tasyml/postmerge_emptyv2.yaml\"),\n\t\t},\n\t\t{\n\t\t\t\"Duplicate submodule name in preMerge\",\n\t\t\tpath.Join(\"../../\", \"testutils/testdata/tasyml/duplicate_submodule_premerge.yaml\"),\n\t\t\tcore.EventPullRequest,\n\t\t\tcore.Small,\n\t\t\tnil,\n\t\t\tfmt.Errorf(\"duplicate subModule name found in `preMerge` in tas configuration file %s\",\n\t\t\t\t\"../../testutils/testdata/tasyml/duplicate_submodule_premerge.yaml\"),\n\t\t},\n\t\t{\n\t\t\t\"Duplicate submodule name in postMerge\",\n\t\t\tpath.Join(\"../../\", \"testutils/testdata/tasyml/duplicate_submodule_postmerge.yaml\"),\n\t\t\tcore.EventPush,\n\t\t\tcore.Small,\n\t\t\tnil,\n\t\t\tfmt.Errorf(\"duplicate subModule name found in `postMerge` in tas configuration file %s\",\n\t\t\t\t\"../../testutils/testdata/tasyml/duplicate_submodule_postmerge.yaml\"),\n\t\t},\n\t\t{\n\t\t\t\"Valid Config\",\n\t\t\t\"../../testutils/testdata/tasyml/valid_with_cachekeyV2.yml\",\n\t\t\tcore.EventPush,\n\t\t\tcore.Small,\n\t\t\t&core.TASConfigV2{\n\t\t\t\tSmartRun:  true,\n\t\t\t\tTier:      \"small\",\n\t\t\t\tSplitMode: core.TestSplit,\n\t\t\t\tPostMerge: &core.MergeV2{\n\t\t\t\t\tSubModules: []core.SubModule{\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tName: \"some-module-1\",\n\t\t\t\t\t\t\tPath: \"./somepath\",\n\t\t\t\t\t\t\tPatterns: []string{\n\t\t\t\t\t\t\t\t\"./x/y/z\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tFramework:  \"mocha\",\n\t\t\t\t\t\t\tConfigFile: \"x/y/z\",\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tPreMerge: &core.MergeV2{\n\t\t\t\t\tSubModules: []core.SubModule{\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tName: \"some-module-1\",\n\t\t\t\t\t\t\tPath: \"./somepath\",\n\t\t\t\t\t\t\tPatterns: []string{\n\t\t\t\t\t\t\t\t\"./x/y/z\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tFramework:  \"jasmine\",\n\t\t\t\t\t\t\tConfigFile: \"/x/y/z\",\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tParallelism: 1,\n\t\t\t\tVersion:     \"2.0.1\",\n\t\t\t\tCache: &core.Cache{\n\t\t\t\t\tKey:   \"xyz\",\n\t\t\t\t\tPaths: []string{\"abcd\"},\n\t\t\t\t},\n\t\t\t},\n\t\t\tnil,\n\t\t},\n\t}\n\tfor _, tt := range tests {\n\t\ttas, err := tasConfigManager.LoadAndValidate(ctx, 2, tt.FilePath, tt.EventType, core.Small, tt.FilePath)\n\t\tif err != nil {\n\t\t\tassert.Equal(t, err.Error(), tt.wantErr.Error(), \"error mismatch\")\n\t\t} else {\n\t\t\ttasConfig := tas.(*core.TASConfigV2)\n\t\t\terr = assertTasConfigV2(tasConfig, tt.want)\n\t\t\tif err != nil {\n\t\t\t\tt.Errorf(err.Error())\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t}\n}\n"
  },
  {
    "path": "pkg/task/task.go",
    "content": "package task\n\nimport (\n\t\"context\"\n\t\"encoding/json\"\n\t\"net/http\"\n\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/global\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/utils\"\n)\n\n// task represents each instance of nucleus spawned by neuron\ntype task struct {\n\trequests core.Requests\n\tendpoint string\n\tlogger   lumber.Logger\n}\n\n// New returns new task\nfunc New(requests core.Requests, logger lumber.Logger) (core.Task, error) {\n\treturn &task{\n\t\trequests: requests,\n\t\tlogger:   logger,\n\t\tendpoint: global.NeuronHost + \"/task\",\n\t}, nil\n}\n\nfunc (t *task) UpdateStatus(ctx context.Context, payload *core.TaskPayload) error {\n\tt.logger.Debugf(\"sending status update of task: %s to %s for repository: %s\", payload.TaskID, payload.Status, payload.RepoLink)\n\treqBody, err := json.Marshal(payload)\n\tif err != nil {\n\t\tt.logger.Errorf(\"error while json marshal %v\", err)\n\t\treturn err\n\t}\n\tquery, headers := utils.GetDefaultQueryAndHeaders()\n\tif _, _, err := t.requests.MakeAPIRequest(ctx, http.MethodPut, t.endpoint, reqBody, query, headers); err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n\n}\n"
  },
  {
    "path": "pkg/task/task_test.go",
    "content": "package task\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\t\"net/http\"\n\t\"net/http/httptest\"\n\t\"testing\"\n\n\t\"github.com/LambdaTest/test-at-scale/pkg/global\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/requestutils\"\n\t\"github.com/LambdaTest/test-at-scale/testutils\"\n\t\"github.com/cenkalti/backoff/v4\"\n)\n\nvar noContext = context.Background()\n\nconst (\n\ttaskE  = \"/task\"\n\tnon200 = \"non 200 status code\"\n)\n\nfunc TestTask_UpdateStatus(t *testing.T) {\n\tcheck := func(t *testing.T, st int) {\n\t\tserver := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\t\tif r.URL.Path != taskE {\n\t\t\t\tt.Errorf(\"Expected to request '/task', got: %v\", r.URL)\n\t\t\t\treturn\n\t\t\t}\n\t\t\tw.WriteHeader(st)\n\t\t\t_, err := w.Write([]byte(`{\"value\":\"fixed\"}`))\n\t\t\tif err != nil {\n\t\t\t\tfmt.Printf(\"Could not write data in httptest server, error: %v\", err)\n\t\t\t}\n\t\t}))\n\t\tdefer server.Close()\n\n\t\tlogger, err := lumber.NewLogger(lumber.LoggingConfig{ConsoleLevel: lumber.Debug}, true, 1)\n\t\trequests := requestutils.New(logger, global.DefaultAPITimeout, &backoff.StopBackOff{})\n\t\tif err != nil {\n\t\t\tfmt.Println(\"Logger can't be established\")\n\t\t}\n\n\t\ttaskPayload, err := testutils.GetTaskPayload()\n\t\tif err != nil {\n\t\t\tt.Errorf(\"Couldn't get task payload, received: %v\", err)\n\t\t}\n\t\t_, err2 := New(requests, logger)\n\t\tif err2 != nil {\n\t\t\tt.Errorf(\"New task couldn't initialized, received: %v\", err)\n\t\t}\n\t\ttk := &task{\n\t\t\trequests: requests,\n\t\t\tlogger:   logger,\n\t\t\tendpoint: server.URL + taskE,\n\t\t}\n\n\t\tupdateStatusErr := tk.UpdateStatus(noContext, taskPayload)\n\n\t\tif st != 200 {\n\t\t\texpectedErr := non200\n\t\t\tif updateStatusErr == nil {\n\t\t\t\tt.Errorf(\"Expected: %s, Received: %s\", expectedErr, updateStatusErr)\n\t\t\t}\n\t\t\treturn\n\t\t}\n\t\tif updateStatusErr != nil {\n\t\t\tt.Errorf(\"Received: %v\", updateStatusErr)\n\t\t}\n\t}\n\n\tt.Run(\"TestUpdateStatus check for statusOK\", func(t *testing.T) {\n\t\tcheck(t, 200)\n\t})\n\tt.Run(\"TestUpdateStatus check for non statusOK\", func(t *testing.T) {\n\t\tcheck(t, 404)\n\t})\n}\n\nfunc TestTask_UpdateStatusForError(t *testing.T) {\n\tcheckErr := func(t *testing.T, st int) {\n\t\tserver := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\t\tif r.URL.Path != taskE {\n\t\t\t\tt.Errorf(\"Expected to request '/task', got: %v\", r.URL)\n\t\t\t}\n\t\t\tw.WriteHeader(st)\n\t\t\tw.Header().Set(\"Content-Type\", \"application/json\")\n\t\t}))\n\t\tdefer server.Close()\n\n\t\tlogger, err := lumber.NewLogger(lumber.LoggingConfig{ConsoleLevel: lumber.Debug}, true, 1)\n\t\trequests := requestutils.New(logger, global.DefaultAPITimeout, &backoff.StopBackOff{})\n\t\tif err != nil {\n\t\t\tfmt.Println(\"Logger can't be established\")\n\t\t}\n\n\t\ttaskPayload, err := testutils.GetTaskPayload()\n\t\tif err != nil {\n\t\t\tt.Errorf(\"Couldn't get task payload, received: %v\", err)\n\t\t}\n\t\ttk := &task{\n\t\t\trequests: requests,\n\t\t\tlogger:   logger,\n\t\t\tendpoint: server.URL + taskE,\n\t\t}\n\n\t\tupdateStatusErr := tk.UpdateStatus(noContext, taskPayload)\n\n\t\tif st != 200 {\n\t\t\texpectedErr := non200\n\t\t\tif expectedErr != updateStatusErr.Error() {\n\t\t\t\tt.Errorf(\"Expected: %s, Received: %s\", expectedErr, updateStatusErr)\n\t\t\t}\n\t\t\treturn\n\t\t}\n\t\tif updateStatusErr != nil {\n\t\t\tt.Errorf(\"Received: %v\", updateStatusErr)\n\t\t}\n\t}\n\tt.Run(\"TestUpdateStatus check for error\", func(t *testing.T) {\n\t\tcheckErr(t, 404) // statusNotFound\n\t})\n}\n"
  },
  {
    "path": "pkg/testdiscoveryservice/testdiscovery.go",
    "content": "// Package testdiscoveryservice is used for discover tests\npackage testdiscoveryservice\n\nimport (\n\t\"context\"\n\t\"encoding/json\"\n\t\"net/http\"\n\t\"os/exec\"\n\t\"strings\"\n\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/global\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/logstream\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/utils\"\n)\n\ntype testDiscoveryService struct {\n\tlogger            lumber.Logger\n\texecManager       core.ExecutionManager\n\ttdResChan         chan core.DiscoveryResult\n\trequests          core.Requests\n\tdiscoveryEndpoint string\n}\n\n// NewTestDiscoveryService creates and returns a new testDiscoveryService instance\nfunc NewTestDiscoveryService(ctx context.Context,\n\ttdResChan chan core.DiscoveryResult,\n\texecManager core.ExecutionManager,\n\trequests core.Requests,\n\tlogger lumber.Logger) core.TestDiscoveryService {\n\treturn &testDiscoveryService{\n\t\tlogger:            logger,\n\t\texecManager:       execManager,\n\t\ttdResChan:         tdResChan,\n\t\trequests:          requests,\n\t\tdiscoveryEndpoint: global.NeuronHost + \"/test-list\",\n\t}\n}\n\nfunc (tds *testDiscoveryService) Discover(ctx context.Context, discoveryArgs *core.DiscoveyArgs) (*core.DiscoveryResult, error) {\n\tconfigFilePath, err := utils.GetConfigFileName(discoveryArgs.Payload.TasFileName)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\timpactAll := tds.shouldImpactAll(discoveryArgs.SmartRun, configFilePath, discoveryArgs.Diff)\n\n\targs := utils.GetArgs(\"discover\", discoveryArgs.FrameWork, discoveryArgs.FrameWorkVersion,\n\t\tdiscoveryArgs.TestConfigFile, discoveryArgs.TestPattern)\n\n\tif !impactAll {\n\t\tif len(discoveryArgs.Diff) == 0 && discoveryArgs.DiffExists {\n\t\t\t// empty diff; in PR, a commit added and then reverted to cause an overall empty PR diff\n\t\t\targs = append(args, global.ArgDiff)\n\t\t} else {\n\t\t\tfor k, v := range discoveryArgs.Diff {\n\t\t\t\t// in changed files we only have added or modified files.\n\t\t\t\tif v != core.FileRemoved {\n\t\t\t\t\targs = append(args, global.ArgDiff, k)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\ttds.logger.Debugf(\"Discovering tests at paths %+v\", discoveryArgs.TestPattern)\n\n\tcmd := exec.CommandContext(ctx, global.FrameworkRunnerMap[discoveryArgs.FrameWork], args...) //nolint:gosec\n\tcmd.Dir = discoveryArgs.CWD\n\tenvVars, err := tds.execManager.GetEnvVariables(discoveryArgs.EnvMap, discoveryArgs.SecretData)\n\tif err != nil {\n\t\ttds.logger.Errorf(\"failed to parse env variables, error: %v\", err)\n\t\treturn nil, err\n\t}\n\tcmd.Env = envVars\n\tlogWriter := lumber.NewWriter(tds.logger)\n\tdefer logWriter.Close()\n\tmaskWriter := logstream.NewMasker(logWriter, discoveryArgs.SecretData)\n\tcmd.Stdout = maskWriter\n\tcmd.Stderr = maskWriter\n\n\ttds.logger.Debugf(\"Executing test discovery command: %s\", cmd.String())\n\tif err := cmd.Run(); err != nil {\n\t\ttds.logger.Errorf(\"command %s of type %s failed with error: %v\", cmd.String(), core.Discovery, err)\n\t\treturn nil, err\n\t}\n\n\ttestDiscoveryResult := <-tds.tdResChan\n\treturn &testDiscoveryResult, nil\n}\n\nfunc (tds *testDiscoveryService) shouldImpactAll(smartRun bool, configFilePath string, diff map[string]int) bool {\n\timpactAll := !smartRun\n\tif _, ok := diff[configFilePath]; ok {\n\t\timpactAll = true\n\t}\n\tfor diffFile := range diff {\n\t\tif strings.HasSuffix(diffFile, global.PackageJSON) {\n\t\t\timpactAll = true\n\t\t\tbreak\n\t\t}\n\t}\n\treturn impactAll\n}\n\nfunc (tds *testDiscoveryService) SendResult(ctx context.Context, testDiscoveryResult *core.DiscoveryResult) error {\n\treqBody, err := json.Marshal(testDiscoveryResult)\n\tif err != nil {\n\t\ttds.logger.Errorf(\"error while json marshal %v\", err)\n\t\treturn err\n\t}\n\tquery, headers := utils.GetDefaultQueryAndHeaders()\n\tif _, _, err := tds.requests.MakeAPIRequest(ctx, http.MethodPost, tds.discoveryEndpoint, reqBody, query, headers); err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}\n"
  },
  {
    "path": "pkg/testdiscoveryservice/testdiscovery_test.go",
    "content": "// Package testdiscoveryservice is used for discover tests\npackage testdiscoveryservice\n\nimport (\n\t\"context\"\n\t\"reflect\"\n\t\"testing\"\n\n\t\"github.com/LambdaTest/test-at-scale/mocks\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/global\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/requestutils\"\n\t\"github.com/LambdaTest/test-at-scale/testutils\"\n\t\"github.com/cenkalti/backoff/v4\"\n\t\"github.com/stretchr/testify/mock\"\n)\n\ntype argsV1 struct {\n\tctx           context.Context\n\tdiscoveryArgs core.DiscoveyArgs\n}\n\ntype testV1 struct {\n\tname           string\n\targs           argsV1\n\twantErr        bool\n\twantEnvMap     map[string]string\n\twantSecretData map[string]string\n}\n\nfunc Test_testDiscoveryService_Discover(t *testing.T) {\n\tlogger, err := testutils.GetLogger()\n\tif err != nil {\n\t\tt.Errorf(\"Couldn't initialize logger, error: %v\", err)\n\t}\n\trequests := requestutils.New(logger, global.DefaultAPITimeout, &backoff.StopBackOff{})\n\ttdResChan := make(chan core.DiscoveryResult)\n\tglobal.TestEnv = true\n\tdefer func() { global.TestEnv = false }()\n\n\tvar PassedEnvMap map[string]string        // envMap which should be passed to call execManager.GetEnvVariables\n\tvar PassedSecretDataMap map[string]string // secretData map which should be passed to call execManager.GetEnvVariables\n\n\texecManager := new(mocks.ExecutionManager)\n\texecManager.On(\"GetEnvVariables\", mock.AnythingOfType(\"map[string]string\"), mock.AnythingOfType(\"map[string]string\")).Return(\n\t\tfunc(envMap, secretData map[string]string) []string {\n\t\t\tPassedEnvMap = envMap\n\t\t\tPassedSecretDataMap = secretData\n\t\t\treturn []string{\"success\", \"ss\"}\n\t\t},\n\t\tfunc(envMap, secretData map[string]string) error {\n\t\t\tPassedEnvMap = envMap\n\t\t\tPassedSecretDataMap = secretData\n\t\t\treturn nil\n\t\t},\n\t)\n\ttds := NewTestDiscoveryService(context.TODO(), tdResChan, execManager, requests, logger)\n\ttests := getTestCases()\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\t_, err := tds.Discover(tt.args.ctx, &tt.args.discoveryArgs)\n\t\t\tif !reflect.DeepEqual(PassedEnvMap, tt.wantEnvMap) || !reflect.DeepEqual(PassedSecretDataMap, tt.wantSecretData) {\n\t\t\t\tt.Errorf(\"expected Envmap: %+v, received: %+v\\nexpected SecretDataMap: %+v, received: %+v\\n\",\n\t\t\t\t\ttt.wantEnvMap, PassedEnvMap, tt.wantSecretData, PassedSecretDataMap)\n\t\t\t}\n\t\t\tif (err != nil) != tt.wantErr {\n\t\t\t\tt.Errorf(\"got error = %v, wantErr %v\", err, tt.wantErr)\n\t\t\t}\n\t\t})\n\t}\n}\n\nfunc getTestCases() []*testV1 {\n\ttestCases := []*testV1{\n\t\t{\"Test Discover with Premerge pattern\",\n\n\t\t\targsV1{\n\t\t\t\tctx: context.TODO(),\n\t\t\t\tdiscoveryArgs: core.DiscoveyArgs{\n\t\t\t\t\tTestPattern: []string{\"./test/**/*.spec.ts\"},\n\t\t\t\t\tPayload: &core.Payload{\n\t\t\t\t\t\tEventType:   core.EventPullRequest,\n\t\t\t\t\t\tTasFileName: \"../../tesutils/testdata/tas.yaml\",\n\t\t\t\t\t},\n\t\t\t\t\tEnvMap:         map[string]string{\"env\": \"repo\"},\n\t\t\t\t\tSecretData:     map[string]string{\"secret\": \"data\"},\n\t\t\t\t\tTestConfigFile: \"\",\n\t\t\t\t\tFrameWork:      \"jest\",\n\t\t\t\t\tSmartRun:       false,\n\t\t\t\t\tDiff:           map[string]int{},\n\t\t\t\t\tDiffExists:     true,\n\t\t\t\t},\n\t\t\t},\n\t\t\ttrue,\n\t\t\tmap[string]string{\"env\": \"repo\"},\n\t\t\tmap[string]string{\"secret\": \"data\"},\n\t\t},\n\t\t{\"Test Discover with Postmerge pattern\",\n\t\t\targsV1{\n\t\t\t\tctx: context.TODO(),\n\t\t\t\tdiscoveryArgs: core.DiscoveyArgs{\n\t\t\t\t\tTestPattern: []string{\"./test/**/*.spec.ts\"},\n\t\t\t\t\tEnvMap:      map[string]string{\"env\": \"RepoName\"},\n\t\t\t\t\tPayload: &core.Payload{\n\t\t\t\t\t\tEventType:   \"push\",\n\t\t\t\t\t\tTasFileName: \"../../tesutils/testdata/tas.yaml\",\n\t\t\t\t\t},\n\t\t\t\t\tSecretData:     map[string]string{\"this is\": \"a secret\"},\n\t\t\t\t\tFrameWork:      \"mocha\",\n\t\t\t\t\tTestConfigFile: \"\",\n\t\t\t\t\tSmartRun:       false,\n\t\t\t\t\tDiff:           map[string]int{},\n\t\t\t\t\tDiffExists:     false,\n\t\t\t\t},\n\t\t\t},\n\t\t\ttrue,\n\t\t\tmap[string]string{\"env\": \"RepoName\"},\n\t\t\tmap[string]string{\"this is\": \"a secret\"},\n\t\t},\n\t\t{\"Test Discover not to execute discoverAll\",\n\t\t\targsV1{\n\t\t\t\tctx: context.TODO(),\n\t\t\t\tdiscoveryArgs: core.DiscoveyArgs{\n\t\t\t\t\tTestPattern: []string{\"./test/**/*.spec.ts\"},\n\t\t\t\t\tEnvMap:      map[string]string{\"env\": \"RepoName\"},\n\t\t\t\t\tPayload: &core.Payload{\n\t\t\t\t\t\tEventType:                  \"push\",\n\t\t\t\t\t\tTasFileName:                \"../../tesutils/testdata/tas.yaml\",\n\t\t\t\t\t\tParentCommitCoverageExists: true,\n\t\t\t\t\t},\n\t\t\t\t\tSecretData: map[string]string{\"secret\": \"data\"},\n\t\t\t\t\tFrameWork:  \"jasmine\",\n\t\t\t\t},\n\t\t\t},\n\t\t\ttrue,\n\t\t\tmap[string]string{\"env\": \"RepoName\"},\n\t\t\tmap[string]string{\"secret\": \"data\"},\n\t\t},\n\t}\n\treturn testCases\n}\n"
  },
  {
    "path": "pkg/testexecutionservice/testexecution.go",
    "content": "// Package testexecutionservice is used for executing tests\npackage testexecutionservice\n\nimport (\n\t\"context\"\n\t\"encoding/json\"\n\t\"errors\"\n\t\"io\"\n\t\"net/http\"\n\t\"os\"\n\t\"os/exec\"\n\t\"path/filepath\"\n\n\t\"github.com/LambdaTest/test-at-scale/config\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/global\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/logstream\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/service/teststats\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/utils\"\n)\n\nconst locatorFile = \"locators\"\n\ntype testExecutionService struct {\n\tlogger         lumber.Logger\n\tazureClient    core.AzureClient\n\tcfg            *config.NucleusConfig\n\tts             *teststats.ProcStats\n\texecManager    core.ExecutionManager\n\trequests       core.Requests\n\tserverEndpoint string\n}\n\n// NewTestExecutionService creates and returns a new TestExecutionService instance\nfunc NewTestExecutionService(cfg *config.NucleusConfig,\n\trequests core.Requests,\n\texecManager core.ExecutionManager,\n\tazureClient core.AzureClient,\n\tts *teststats.ProcStats,\n\tlogger lumber.Logger) core.TestExecutionService {\n\treturn &testExecutionService{cfg: cfg,\n\t\trequests:       requests,\n\t\tserverEndpoint: global.NeuronHost + \"/report\",\n\t\texecManager:    execManager,\n\t\tazureClient:    azureClient,\n\t\tts:             ts,\n\t\tlogger:         logger}\n}\n\n// Run executes the test files\nfunc (tes *testExecutionService) Run(ctx context.Context,\n\ttestExecutionArgs *core.TestExecutionArgs) (*core.ExecutionResults, error) {\n\tazureReader, azureWriter := io.Pipe()\n\tdefer azureWriter.Close()\n\n\terrChan := testExecutionArgs.LogWriterStrategy.Write(ctx, azureReader)\n\tdefer tes.closeAndWriteLog(azureWriter, errChan)\n\tlogWriter := lumber.NewWriter(tes.logger)\n\tdefer logWriter.Close()\n\tmultiWriter := io.MultiWriter(logWriter, azureWriter)\n\tmaskWriter := logstream.NewMasker(multiWriter, testExecutionArgs.SecretData)\n\n\targs, err := tes.buildCmdArgs(ctx, testExecutionArgs.TestConfigFile,\n\t\ttestExecutionArgs.FrameWork, testExecutionArgs.FrameWorkVersion, testExecutionArgs.Payload, testExecutionArgs.TestPattern)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tpayload := testExecutionArgs.Payload\n\tcollectCoverage := payload.CollectCoverage\n\tcommandArgs := args\n\tenvVars, err := tes.execManager.GetEnvVariables(testExecutionArgs.EnvMap, testExecutionArgs.SecretData)\n\tif err != nil {\n\t\ttes.logger.Errorf(\"failed to parse env variables, error: %v\", err)\n\t\treturn nil, err\n\t}\n\n\texecutionResults := &core.ExecutionResults{\n\t\tTaskID:   payload.TaskID,\n\t\tBuildID:  payload.BuildID,\n\t\tRepoID:   payload.RepoID,\n\t\tOrgID:    payload.OrgID,\n\t\tCommitID: payload.BuildTargetCommit,\n\t\tTaskType: payload.TaskType,\n\t}\n\tfor i := 1; i <= tes.cfg.ConsecutiveRuns; i++ {\n\t\tvar cmd *exec.Cmd\n\t\tif testExecutionArgs.FrameWork == \"jasmine\" || testExecutionArgs.FrameWork == \"mocha\" {\n\t\t\tif collectCoverage {\n\t\t\t\tcmd = exec.CommandContext(ctx, \"nyc\", commandArgs...)\n\t\t\t} else {\n\t\t\t\tcmd = exec.CommandContext(ctx, commandArgs[0], commandArgs[1:]...) //nolint:gosec\n\t\t\t}\n\t\t} else {\n\t\t\tcmd = exec.CommandContext(ctx, commandArgs[0], commandArgs[1:]...) //nolint:gosec\n\t\t\tif collectCoverage {\n\t\t\t\tenvVars = append(envVars, \"TAS_COLLECT_COVERAGE=true\")\n\t\t\t}\n\t\t}\n\t\tcmd.Dir = testExecutionArgs.CWD\n\t\tcmd.Env = envVars\n\t\tcmd.Stdout = maskWriter\n\t\tcmd.Stderr = maskWriter\n\t\ttes.logger.Debugf(\"Executing test execution command: %s\", cmd.String())\n\t\tif err := cmd.Start(); err != nil {\n\t\t\ttes.logger.Errorf(\"failed to execute test %s %v\", cmd.String(), err)\n\t\t\treturn nil, err\n\t\t}\n\t\tpid := int32(cmd.Process.Pid)\n\t\ttes.logger.Debugf(\"execution command started with pid %d\", pid)\n\n\t\tif err := tes.ts.CaptureTestStats(pid, tes.cfg.CollectStats); err != nil {\n\t\t\ttes.logger.Errorf(\"failed to find process for command %s with pid %d %v\", cmd.String(), pid, err)\n\t\t\treturn nil, err\n\t\t}\n\t\terr := cmd.Wait()\n\t\tresult := <-tes.ts.ExecutionResultOutputChannel\n\t\tif err != nil {\n\t\t\ttes.logger.Errorf(\"error in test execution: %+v\", err)\n\t\t\t// returning error when result is nil to throw execution errors like heap out of memory\n\t\t\tif result == nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t}\n\t\tif result != nil {\n\t\t\texecutionResults.Results = append(executionResults.Results, result.Results...)\n\t\t}\n\t}\n\treturn executionResults, nil\n}\n\nfunc getPatternAndEnvV1(payload *core.Payload, tasConfig *core.TASConfig) (target []string, envMap map[string]string) {\n\tif payload.EventType == core.EventPullRequest {\n\t\ttarget = tasConfig.Premerge.Patterns\n\t\tenvMap = tasConfig.Premerge.EnvMap\n\t} else {\n\t\ttarget = tasConfig.Postmerge.Patterns\n\t\tenvMap = tasConfig.Postmerge.EnvMap\n\t}\n\treturn target, envMap\n}\n\nfunc (tes *testExecutionService) SendResults(ctx context.Context,\n\tpayload *core.ExecutionResults) (resp *core.TestReportResponsePayload, err error) {\n\treqBody, err := json.Marshal(payload)\n\tif err != nil {\n\t\ttes.logger.Errorf(\"failed to marshal request body %v\", err)\n\t\treturn nil, err\n\t}\n\tquery, headers := utils.GetDefaultQueryAndHeaders()\n\trespBody, _, err := tes.requests.MakeAPIRequest(ctx, http.MethodPost, tes.serverEndpoint, reqBody, query, headers)\n\tif err != nil {\n\t\ttes.logger.Errorf(\"error while sending reports %v\", err)\n\t\treturn nil, err\n\t}\n\terr = json.Unmarshal(respBody, &resp)\n\tif err != nil {\n\t\ttes.logger.Errorf(\"failed to unmarshal response body %v\", err)\n\t\treturn nil, err\n\t}\n\tif resp.TaskStatus == \"\" {\n\t\treturn nil, errors.New(\"empty task status\")\n\t}\n\treturn resp, nil\n}\n\nfunc (tes *testExecutionService) getLocatorsFile(ctx context.Context, locatorAddress string) (string, error) {\n\tresp, err := tes.azureClient.FindUsingSASUrl(ctx, locatorAddress)\n\tif err != nil {\n\t\ttes.logger.Errorf(\"Error while downloading locatorFile, error %v\", err)\n\t\treturn \"\", err\n\t}\n\tdefer resp.Close()\n\n\tlocatorFilePath := filepath.Join(os.TempDir(), locatorFile)\n\tout, err := os.Create(locatorFilePath)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\tdefer out.Close()\n\n\tif _, err := io.Copy(out, resp); err != nil {\n\t\treturn \"\", err\n\t}\n\treturn locatorFilePath, err\n}\n\nfunc (tes *testExecutionService) closeAndWriteLog(azureWriter *io.PipeWriter, errChan <-chan error) {\n\tazureWriter.Close()\n\tif err := <-errChan; err != nil {\n\t\ttes.logger.Errorf(\"failed to upload logs for test execution, error: %v\", err)\n\t}\n}\n\nfunc (tes *testExecutionService) buildCmdArgs(ctx context.Context,\n\ttestConfigFile string,\n\tframeWork string,\n\tframeworkVersion int,\n\tpayload *core.Payload,\n\ttarget []string) ([]string, error) {\n\targs := []string{global.FrameworkRunnerMap[frameWork]}\n\n\targs = append(args, utils.GetArgs(\"execute\", frameWork, frameworkVersion, testConfigFile, target)...)\n\n\tif payload.LocatorAddress != \"\" {\n\t\tlocatorFile, err := tes.getLocatorsFile(ctx, payload.LocatorAddress)\n\t\ttes.logger.Debugf(\"locators : %v\\n\", locatorFile)\n\t\tif err != nil {\n\t\t\ttes.logger.Errorf(\"failed to get locator file, error: %v\", err)\n\t\t\treturn nil, err\n\t\t}\n\n\t\targs = append(args, global.ArgLocator, locatorFile)\n\t}\n\n\treturn args, nil\n}\n"
  },
  {
    "path": "pkg/testexecutionservice/testexecution_test.go",
    "content": "// Package testexecutionservice is used for executing tests\npackage testexecutionservice\n\nimport (\n\t\"context\"\n\t\"io\"\n\t\"io/ioutil\"\n\t\"reflect\"\n\t\"strings\"\n\t\"testing\"\n\n\t\"github.com/LambdaTest/test-at-scale/config\"\n\t\"github.com/LambdaTest/test-at-scale/mocks\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/global\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/requestutils\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/service/teststats\"\n\t\"github.com/LambdaTest/test-at-scale/testutils\"\n\t\"github.com/cenkalti/backoff/v4\"\n\t\"github.com/stretchr/testify/mock\"\n)\n\n// These tests are meant to be run on a Linux machine\n\nfunc TestNewTestExecutionService(t *testing.T) {\n\tlogger, err := testutils.GetLogger()\n\tif err != nil {\n\t\tt.Errorf(\"Couldn't initialise logger, error: %v\", err)\n\t}\n\tcfg := new(config.NucleusConfig)\n\tcfg.ConsecutiveRuns = 1\n\tcfg.CollectStats = true\n\tvar ts *teststats.ProcStats\n\tazureClient := new(mocks.AzureClient)\n\texecManager := new(mocks.ExecutionManager)\n\trequests := requestutils.New(logger, global.DefaultAPITimeout, &backoff.StopBackOff{})\n\n\ttype args struct {\n\t\texecManager core.ExecutionManager\n\t\tazureClient core.AzureClient\n\t\tts          *teststats.ProcStats\n\t\tlogger      lumber.Logger\n\t}\n\ttests := []struct {\n\t\tname string\n\t\targs args\n\t\twant *testExecutionService\n\t}{\n\t\t{\"TestNewTestExecutionService\",\n\t\t\targs{execManager, azureClient, ts, logger},\n\t\t\t&testExecutionService{logger, azureClient, cfg, ts, execManager, requests, global.NeuronHost + \"/report\"}},\n\t}\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\tif got := NewTestExecutionService(cfg, requests, tt.args.execManager,\n\t\t\t\ttt.args.azureClient, tt.args.ts, tt.args.logger); !reflect.DeepEqual(got, tt.want) {\n\t\t\t\tt.Errorf(\"NewTestExecutionService() = %v, want %v\", got, tt.want)\n\t\t\t}\n\t\t})\n\t}\n}\n\nfunc Test_testExecutionService_GetLocatorsFile(t *testing.T) {\n\tlogger, err := testutils.GetLogger()\n\tif err != nil {\n\t\tt.Errorf(\"Couldn't initialise logger, error: %v\", err)\n\t}\n\tvar ts *teststats.ProcStats\n\tazureClient := new(mocks.AzureClient)\n\texecManager := new(mocks.ExecutionManager)\n\tazureClient.On(\"GetSASURL\",\n\t\tmock.AnythingOfType(\"*context.emptyCtx\"),\n\t\tmock.AnythingOfType(\"string\"),\n\t\tmock.AnythingOfType(\"core.ContainerType\"),\n\t).Return(\"sasURL\", nil)\n\tazureClient.On(\"FindUsingSASUrl\",\n\t\tmock.AnythingOfType(\"*context.emptyCtx\"),\n\t\tmock.AnythingOfType(\"string\"),\n\t).Return(io.NopCloser(strings.NewReader(\"Hello, world!\")), nil)\n\n\ttype fields struct {\n\t\tlogger      lumber.Logger\n\t\tazureClient core.AzureClient\n\t\tts          *teststats.ProcStats\n\t\texecManager core.ExecutionManager\n\t}\n\ttype args struct {\n\t\tctx            context.Context\n\t\tlocatorAddress string\n\t}\n\ttests := []struct {\n\t\tname    string\n\t\tfields  fields\n\t\targs    args\n\t\twant    string\n\t\twantErr bool\n\t}{\n\t\t{\"Test GetLocatorsFile\",\n\t\t\tfields{\n\t\t\t\tlogger:      logger,\n\t\t\t\tazureClient: azureClient,\n\t\t\t\tts:          ts,\n\t\t\t\texecManager: execManager,\n\t\t\t},\n\t\t\targs{\n\t\t\t\tctx:            context.TODO(),\n\t\t\t\tlocatorAddress: \"locAddr\",\n\t\t\t},\n\t\t\t\"/tmp/locators\",\n\t\t\tfalse,\n\t\t},\n\t}\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\ttes := &testExecutionService{\n\t\t\t\tlogger:      tt.fields.logger,\n\t\t\t\tazureClient: tt.fields.azureClient,\n\t\t\t\tts:          tt.fields.ts,\n\t\t\t\texecManager: tt.fields.execManager,\n\t\t\t}\n\t\t\tgot, err := tes.getLocatorsFile(tt.args.ctx, tt.args.locatorAddress)\n\t\t\tif (err != nil) != tt.wantErr {\n\t\t\t\tt.Errorf(\"testExecutionService.GetLocatorsFile() error = %v, wantErr %v\", err, tt.wantErr)\n\t\t\t\treturn\n\t\t\t}\n\t\t\tif got != tt.want {\n\t\t\t\tt.Errorf(\"testExecutionService.GetLocatorsFile() = %v, want %v\", got, tt.want)\n\t\t\t}\n\t\t\tfile, err := ioutil.ReadFile(got)\n\t\t\tif err != nil {\n\t\t\t\tt.Errorf(\"testExecutionService.GetLocatorsFile() error in opening file = %v\", err)\n\t\t\t\treturn\n\t\t\t}\n\t\t\tif string(file) != \"Hello, world!\" {\n\t\t\t\tt.Errorf(\"testExecutionService.GetLocatorsFile() = %v, want %v\", string(file), \"Hello, world!\")\n\t\t\t}\n\t\t})\n\t}\n}\n"
  },
  {
    "path": "pkg/tests/testutils.go",
    "content": "package tests\n\nimport (\n\t\"github.com/LambdaTest/test-at-scale/config\"\n)\n\n// MockConfig creates new dummy config\nfunc MockConfig() *config.SynapseConfig {\n\tcfg := config.SynapseConfig{\n\t\tLogFile: \"./synapsetest.go\",\n\t\tVerbose: true,\n\t\tLambdatest: config.LambdatestConfig{\n\t\t\tSecretKey: \"dummysecretkey\",\n\t\t},\n\t\tGit: config.GitConfig{\n\t\t\tToken:     \"dummytoken\",\n\t\t\tTokenType: \"Bearer\",\n\t\t},\n\t\tContainerRegistry: config.ContainerRegistryConfig{\n\t\t\tMode:       config.PublicMode,\n\t\t\tPullPolicy: config.PullAlways,\n\t\t},\n\t}\n\treturn &cfg\n}\n"
  },
  {
    "path": "pkg/urlmanager/urlmanager.go",
    "content": "package urlmanager\n\nimport (\n\t\"fmt\"\n\t\"net/url\"\n\t\"strings\"\n\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/errs\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/global\"\n)\n\n// GetCloneURL returns repo clone url for given git provider\nfunc GetCloneURL(gitprovider, repoLink, repo, commitID, forkSlug, repoSlug string) (string, error) {\n\tif global.TestEnv {\n\t\treturn global.TestServer, nil\n\t}\n\tswitch gitprovider {\n\tcase core.GitHub:\n\t\treturn fmt.Sprintf(\"%s/%s/zipball/%s\", global.APIHostURLMap[gitprovider], repoSlug, commitID), nil\n\tcase core.GitLab:\n\t\treturn fmt.Sprintf(\"%s/-/archive/%s/%s-%s.zip\", repoLink, commitID, repo, commitID), nil\n\n\tcase core.Bitbucket:\n\t\tif forkSlug != \"\" {\n\t\t\tforkLink := strings.Replace(repoLink, repoSlug, forkSlug, -1)\n\t\t\treturn fmt.Sprintf(\"%s/get/%s.zip\", forkLink, commitID), nil\n\t\t}\n\n\t\treturn fmt.Sprintf(\"%s/get/%s.zip\", repoLink, commitID), nil\n\n\tdefault:\n\t\treturn \"\", errs.ErrUnsupportedGitProvider\n\t}\n}\n\n// GetCommitDiffURL returns commit diff url for given git provider\nfunc GetCommitDiffURL(gitprovider, path, baseCommit, targetCommit, forkSlug string) (string, error) {\n\tif global.TestEnv {\n\t\treturn global.TestServer, nil\n\t}\n\tswitch gitprovider {\n\tcase core.GitHub:\n\t\treturn fmt.Sprintf(\"%s%s/compare/%s...%s\", global.APIHostURLMap[gitprovider], path, baseCommit, targetCommit), nil\n\n\tcase core.GitLab:\n\t\tencodedPath := url.QueryEscape(path[1:])\n\t\treturn fmt.Sprintf(\"%s/%s/repository/compare?from=%s&to=%s\",\n\t\t\tglobal.APIHostURLMap[gitprovider], encodedPath, baseCommit, targetCommit), nil\n\n\tcase core.Bitbucket:\n\t\tif forkSlug != \"\" {\n\t\t\treturn fmt.Sprintf(\"%s/repositories%s/diff/%s..%s\",\n\t\t\t\tglobal.APIHostURLMap[gitprovider], path, fmt.Sprintf(\"%s:%s\", forkSlug, targetCommit), baseCommit), nil\n\t\t}\n\t\treturn fmt.Sprintf(\"%s/repositories%s/diff/%s..%s\", global.APIHostURLMap[gitprovider], path, targetCommit, baseCommit), nil\n\n\tdefault:\n\t\treturn \"\", errs.ErrUnsupportedGitProvider\n\t}\n}\n\n// GetPullRequestDiffURL returns PR Diff url for given git provider\nfunc GetPullRequestDiffURL(gitprovider, path string, prNumber int) (string, error) {\n\tif global.TestEnv {\n\t\treturn global.TestServer, nil\n\t}\n\tswitch gitprovider {\n\tcase core.GitHub:\n\t\treturn fmt.Sprintf(\"%s%s/pulls/%d\", global.APIHostURLMap[gitprovider], path, prNumber), nil\n\n\tcase core.GitLab:\n\t\tencodedPath := url.QueryEscape(path[1:])\n\t\treturn fmt.Sprintf(\"%s/%s/merge_requests/%d/changes\", global.APIHostURLMap[gitprovider], encodedPath, prNumber), nil\n\n\tcase core.Bitbucket:\n\t\treturn fmt.Sprintf(\"%s/repositories%s/pullrequests/%d/diff\", global.APIHostURLMap[gitprovider], path, prNumber), nil\n\n\tdefault:\n\t\treturn \"\", errs.ErrUnsupportedGitProvider\n\t}\n}\n\n// GetFileDownloadURL returns download URL for file in repo\nfunc GetFileDownloadURL(gitprovider, commitID, repoSlug, filePath string) (string, error) {\n\tif global.TestEnv {\n\t\treturn global.TestServer, nil\n\t}\n\tswitch gitprovider {\n\tcase core.GitHub:\n\t\treturn fmt.Sprintf(\"https://raw.githubusercontent.com/%s/%s/%s\", repoSlug, commitID, filePath), nil\n\tcase core.GitLab:\n\t\trepoSlug = url.PathEscape(repoSlug)\n\t\tfilePath = url.PathEscape(filePath)\n\t\treturn fmt.Sprintf(\"%s/%s/repository/files/%s/raw?ref=%s\", global.APIHostURLMap[gitprovider], repoSlug, filePath, commitID), nil\n\tcase core.Bitbucket:\n\t\t// TODO: check for fork PR\n\t\treturn fmt.Sprintf(\"%s/repositories/%s/src/%s/%s\", global.APIHostURLMap[gitprovider], repoSlug, commitID, filePath), nil\n\tdefault:\n\t\treturn \"\", nil\n\t}\n}\n"
  },
  {
    "path": "pkg/urlmanager/urlmanager_test.go",
    "content": "package urlmanager\n\nimport (\n\t\"net/url\"\n\t\"testing\"\n\n\t\"github.com/LambdaTest/test-at-scale/pkg/global\"\n)\n\nfunc TestGetCloneURL(t *testing.T) {\n\ttype args struct {\n\t\tgitprovider string\n\t\trepoLink    string\n\t\trepo        string\n\t\tcommitID    string\n\t\trepoSlug    string\n\t\tforkSlug    string\n\t}\n\ttests := []struct {\n\t\tname    string\n\t\targs    args\n\t\twant    string\n\t\twantErr bool\n\t}{\n\t\t{\n\t\t\t\"For github as git provider\",\n\t\t\targs{\"github\", \"https://github.com/nexe\", \"nexe\", \"abc\", \"nexe\", \"nexe/nexe\"},\n\t\t\t\"https://api.github.com/repos/nexe/nexe/zipball/abc\",\n\t\t\tfalse,\n\t\t},\n\t\t{\n\t\t\t\"For non-github and gitlab as git provider\",\n\t\t\targs{\"gittest\", \"https://github.com/nexe\", \"nexe\", \"abc\", \"nexe\", \"\"},\n\t\t\t\"\",\n\t\t\ttrue,\n\t\t},\n\t\t{\n\t\t\t\"For gitlab as git provider\",\n\t\t\targs{\"gitlab\", \"https://gitlab.com/nexe\", \"nexe\", \"abc\", \"nexe\", \"\"},\n\t\t\t\"https://gitlab.com/nexe/-/archive/abc/nexe-abc.zip\",\n\t\t\tfalse,\n\t\t},\n\t}\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\tgot, err := GetCloneURL(tt.args.gitprovider, tt.args.repoLink, tt.args.repo, tt.args.commitID, tt.args.repoSlug, tt.args.forkSlug)\n\t\t\tif (err != nil) != tt.wantErr {\n\t\t\t\tt.Errorf(\"GetCloneURL() error = %v, wantErr %v\", err, tt.wantErr)\n\t\t\t\treturn\n\t\t\t}\n\t\t\tif got != tt.want {\n\t\t\t\tt.Errorf(\"GetCloneURL() = %v, want %v\", got, tt.want)\n\t\t\t}\n\t\t})\n\t}\n}\n\nfunc TestGetCommitDiffURL(t *testing.T) {\n\ttype args struct {\n\t\tgitprovider  string\n\t\tpath         string\n\t\tbaseCommit   string\n\t\ttargetCommit string\n\t\tforkSlug     string\n\t}\n\ttests := []struct {\n\t\tname    string\n\t\targs    args\n\t\twant    string\n\t\twantErr bool\n\t}{\n\t\t{\n\t\t\t\"For github as git provider\",\n\t\t\targs{\"github\", \"/tests/nexe\", \"abc\", \"xyz\", \"\"},\n\t\t\t\"https://api.github.com/repos/tests/nexe/compare/abc...xyz\",\n\t\t\tfalse,\n\t\t},\n\t\t{\n\t\t\t\"For non-github and gitlab as git provider\",\n\t\t\targs{\"gittest\", \"tests/nexe\", \"abc\", \"xyz\", \"\"},\n\t\t\t\"\",\n\t\t\ttrue,\n\t\t},\n\t\t{\n\t\t\t\"For gitlab as git provider\",\n\t\t\targs{\"gitlab\", \"/tests/nexe\", \"abc\", \"xyz\", \"\"},\n\t\t\tglobal.APIHostURLMap[\"gitlab\"] + \"/\" + url.QueryEscape(\"tests/nexe\") + \"/repository/compare?from=abc&to=xyz\",\n\t\t\tfalse,\n\t\t},\n\t}\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\tgot, err := GetCommitDiffURL(tt.args.gitprovider, tt.args.path, tt.args.baseCommit, tt.args.targetCommit, tt.args.forkSlug)\n\t\t\tif (err != nil) != tt.wantErr {\n\t\t\t\tt.Errorf(\"GetCommitDiffURL() error = %v, wantErr %v\", err, tt.wantErr)\n\t\t\t\treturn\n\t\t\t}\n\t\t\tif got != tt.want {\n\t\t\t\tt.Errorf(\"GetCommitDiffURL() = %v, want %v\", got, tt.want)\n\t\t\t}\n\t\t})\n\t}\n}\n\nfunc TestGetPullRequestDiffURL(t *testing.T) {\n\ttype args struct {\n\t\tgitprovider string\n\t\tpath        string\n\t\tprNumber    int\n\t}\n\ttests := []struct {\n\t\tname    string\n\t\targs    args\n\t\twant    string\n\t\twantErr bool\n\t}{\n\t\t{\n\t\t\t\"For github as git provider\",\n\t\t\targs{\"github\", \"/tests/nexe\", 2},\n\t\t\t\"https://api.github.com/repos/tests/nexe/pulls/2\",\n\t\t\tfalse,\n\t\t},\n\t\t{\n\t\t\t\"For non-github and gitlab as git provider\",\n\t\t\targs{\"gittest\", \"tests/nexe\", 2},\n\t\t\t\"\",\n\t\t\ttrue},\n\t\t{\n\t\t\t\"For gitlab as git provider\",\n\t\t\targs{\"gitlab\", \"/tests/nexe\", 2},\n\t\t\tglobal.APIHostURLMap[\"gitlab\"] + \"/\" + url.QueryEscape(\"tests/nexe\") + \"/merge_requests/2/changes\",\n\t\t\tfalse,\n\t\t},\n\t}\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\tgot, err := GetPullRequestDiffURL(tt.args.gitprovider, tt.args.path, tt.args.prNumber)\n\t\t\tif (err != nil) != tt.wantErr {\n\t\t\t\tt.Errorf(\"GetPullRequestDiffURL() error = %v, wantErr %v\", err, tt.wantErr)\n\t\t\t\treturn\n\t\t\t}\n\t\t\tif got != tt.want {\n\t\t\t\tt.Errorf(\"GetPullRequestDiffURL() = %v, want %v\", got, tt.want)\n\t\t\t}\n\t\t})\n\t}\n}\n"
  },
  {
    "path": "pkg/utils/utils.go",
    "content": "package utils\n\nimport (\n\t\"context\"\n\t\"crypto/md5\"\n\t\"fmt\"\n\t\"io\"\n\t\"os\"\n\t\"path/filepath\"\n\t\"reflect\"\n\t\"strconv\"\n\t\"strings\"\n\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/errs\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/global\"\n\t\"github.com/bmatcuk/doublestar/v4\"\n\t\"github.com/go-playground/locales/en\"\n\tut \"github.com/go-playground/universal-translator\"\n\t\"github.com/go-playground/validator/v10\"\n\ten_translations \"github.com/go-playground/validator/v10/translations/en\"\n\t\"github.com/google/uuid\"\n\t\"gopkg.in/yaml.v3\"\n)\n\nconst (\n\tnamespaceSeparator = \".\"\n\temptyTagName       = \"-\"\n\tyamlTagName        = \"yaml\"\n\trequiredTagName    = \"required\"\n\tv1                 = 1\n\tv2                 = 2\n)\n\n// Min returns the smaller of x or y.\nfunc Min(x, y int) int {\n\tif x > y {\n\t\treturn y\n\t}\n\treturn x\n}\n\n// ComputeChecksum compute the md5 hash for the given filename\nfunc ComputeChecksum(filename string) (string, error) {\n\tchecksum := \"\"\n\n\tfile, err := os.Open(filename)\n\tif err != nil {\n\t\treturn checksum, err\n\t}\n\n\tdefer file.Close()\n\n\thash := md5.New()\n\tif _, err := io.Copy(hash, file); err != nil {\n\t\treturn checksum, err\n\t}\n\n\tchecksum = fmt.Sprintf(\"%x\", hash.Sum(nil))\n\treturn checksum, nil\n}\n\n// InterfaceToMap converts interface{} to map[string]string\nfunc InterfaceToMap(in interface{}) map[string]string {\n\tresult := make(map[string]string)\n\tfor key, value := range in.(map[string]interface{}) {\n\t\tresult[key] = value.(string)\n\t}\n\treturn result\n}\n\n// CreateDirectory creates directory recursively if does not exists\nfunc CreateDirectory(path string) error {\n\tif _, err := os.Lstat(path); os.IsNotExist(err) {\n\t\tif err := os.MkdirAll(path, global.DirectoryPermissions); err != nil {\n\t\t\treturn errs.ERR_DIR_CRT(err.Error())\n\t\t}\n\t}\n\treturn nil\n}\n\n// DeleteDirectory deletes directory and all its children\nfunc DeleteDirectory(path string) error {\n\tif err := os.RemoveAll(path); err != nil {\n\t\treturn errs.ErrDirDel(err.Error())\n\t}\n\treturn nil\n}\n\n// WriteFileToDirectory writes `data` file to `filename`/`path`\nfunc WriteFileToDirectory(path, filename string, data []byte) error {\n\tlocation := fmt.Sprintf(\"%s/%s\", path, filename)\n\tif err := os.WriteFile(location, data, global.FilePermissions); err != nil {\n\t\treturn errs.ERR_FIL_CRT(err.Error())\n\t}\n\treturn nil\n}\n\n// GetOutboundIP returns preferred outbound ip of this container\nfunc GetOutboundIP() string {\n\treturn global.SynapseContainerURL\n}\n\n// GetConfigFileName returns the name of the configuration file\nfunc GetConfigFileName(path string) (string, error) {\n\tif global.TestEnv {\n\t\treturn path, nil\n\t}\n\text := filepath.Ext(path)\n\t// Add support for both yaml extensions\n\tif ext == \".yaml\" || ext == \".yml\" {\n\t\tmatches, _ := doublestar.Glob(os.DirFS(global.RepoDir), strings.TrimSuffix(path, ext)+\".{yml,yaml}\")\n\t\tif len(matches) == 0 {\n\t\t\treturn \"\", errs.New(\n\t\t\t\tfmt.Sprintf(\n\t\t\t\t\t\"`%s` configuration file not found at the root of your project. Please make sure you have placed it correctly.\",\n\t\t\t\t\tpath))\n\t\t}\n\t\t// If there are files with the both extensions, pick the first match\n\t\tpath = matches[0]\n\t}\n\treturn path, nil\n}\n\nfunc ValidateStructTASYmlV1(ctx context.Context, ymlContent []byte, ymlFilename string) (*core.TASConfig, error) {\n\tvalidate, err := getValidator()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\ttasConfig := &core.TASConfig{SmartRun: true, Tier: core.Small, SplitMode: core.TestSplit, Version: global.DefaultTASVersion}\n\tif err := yaml.Unmarshal(ymlContent, tasConfig); err != nil {\n\t\treturn nil, fmt.Errorf(\"`%s` configuration file contains invalid format. Please correct the `%s` file\", ymlFilename, ymlFilename)\n\t}\n\tif err := validateStruct(validate, tasConfig, ymlFilename); err != nil {\n\t\treturn nil, err\n\t}\n\treturn tasConfig, nil\n}\n\n// configureValidator configure the struct validator\nfunc configureValidator(validate *validator.Validate, trans ut.Translator) {\n\tvalidate.RegisterTagNameFunc(func(fld reflect.StructField) string {\n\t\t// nolint: gomnd\n\t\tname := strings.SplitN(fld.Tag.Get(yamlTagName), \",\", 2)[0]\n\t\tif name == emptyTagName {\n\t\t\treturn fld.Name\n\t\t}\n\t\treturn name\n\t})\n\n\t// nolint: errcheck\n\tvalidate.RegisterTranslation(requiredTagName, trans, func(ut ut.Translator) error {\n\t\treturn ut.Add(requiredTagName, \"{0} field is required!\", true)\n\t}, func(ut ut.Translator, fe validator.FieldError) string {\n\t\ti := strings.Index(fe.Namespace(), namespaceSeparator)\n\t\tt, _ := ut.T(requiredTagName, fe.Namespace()[i+1:])\n\t\treturn t\n\t})\n}\n\n// GetVersion returns version of tas yml file\nfunc GetVersion(ymlContent []byte) (int, error) {\n\ttasVersion := &core.TasVersion{Version: global.DefaultTASVersion}\n\tif err := yaml.Unmarshal(ymlContent, tasVersion); err != nil {\n\t\treturn 0, fmt.Errorf(\"error in unmarshling tas yml file\")\n\t}\n\tmajorVersion := strings.Split(tasVersion.Version, \".\")[0]\n\n\tversion, err := strconv.Atoi(majorVersion)\n\tif err != nil {\n\t\treturn version, errs.New(\"error while parsing version for tas yml\")\n\t}\n\treturn version, err\n}\n\n// ValidateStructTASYmlV2 validates tas configuration file\nfunc ValidateStructTASYmlV2(ctx context.Context, ymlContent []byte, ymlFileName string) (*core.TASConfigV2, error) {\n\ttasConfig := &core.TASConfigV2{SmartRun: true, Tier: core.Small, SplitMode: core.TestSplit}\n\tif err := yaml.Unmarshal(ymlContent, tasConfig); err != nil {\n\t\treturn nil, fmt.Errorf(\"`%s` configuration file contains invalid format. Please correct the `%s` file\", ymlFileName, ymlFileName)\n\t}\n\tvalidate, err := getValidator()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif err := validateStruct(validate, tasConfig, ymlFileName); err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn tasConfig, nil\n}\n\nfunc getValidator() (*validator.Validate, error) {\n\tenObj := en.New()\n\tuni := ut.New(enObj, enObj)\n\ttrans, _ := uni.GetTranslator(\"en\")\n\tvalidate := validator.New()\n\tif err := en_translations.RegisterDefaultTranslations(validate, trans); err != nil {\n\t\treturn nil, err\n\t}\n\tconfigureValidator(validate, trans)\n\treturn validate, nil\n}\n\nfunc validateStruct(validate *validator.Validate, config interface{}, ymlFilename string) error {\n\tvalidateErr := validate.Struct(config)\n\tif validateErr != nil {\n\t\t// translate all error at once\n\t\tvalidationErrs := validateErr.(validator.ValidationErrors)\n\t\terr := new(errs.ErrInvalidConf)\n\t\terr.Message = errs.New(\n\t\t\tfmt.Sprintf(\n\t\t\t\t\"Invalid values provided for the following fields in the `%s` configuration file: \\n\",\n\t\t\t\tymlFilename),\n\t\t).Error()\n\t\tfor _, e := range validationErrs {\n\t\t\t// can translate each error one at a time.\n\t\t\terr.Fields = append(err.Fields, e.Field())\n\t\t\terr.Values = append(err.Values, e.Value())\n\t\t}\n\t\treturn err\n\t}\n\treturn nil\n}\n\n// ValidateSubModule validates submodule\nfunc ValidateSubModule(module *core.SubModule) error {\n\tif module.Name == \"\" {\n\t\treturn errs.New(\"module name is not defined\")\n\t}\n\tif module.Path == \"\" {\n\t\treturn errs.New(fmt.Sprintf(\"module path is not defined for module %s \", module.Name))\n\t}\n\tif len(module.Patterns) == 0 {\n\t\treturn errs.New(fmt.Sprintf(\"module %s pattern length is 0\", module.Name))\n\t}\n\n\treturn nil\n}\n\n// GetDefaultQueryAndHeaders returns the query and headers that should be supplied with each request made to TAS Server\nfunc GetDefaultQueryAndHeaders() (query map[string]interface{}, headers map[string]string) {\n\tquery = map[string]interface{}{\n\t\t\"repoID\":  os.Getenv(\"REPO_ID\"),\n\t\t\"buildID\": os.Getenv(\"BUILD_ID\"),\n\t\t\"orgID\":   os.Getenv(\"ORG_ID\"),\n\t\t\"taskID\":  os.Getenv(\"TASK_ID\"),\n\t}\n\theaders = map[string]string{\n\t\t\"Authorization\": fmt.Sprintf(\"Bearer %s\", os.Getenv(\"TOKEN\")),\n\t}\n\treturn query, headers\n}\n\nfunc GetArgs(command string, frameWork string, frameworkVersion int,\n\tconfigFile string,\n\ttarget []string) []string {\n\tlanguage := global.FrameworkLanguageMap[frameWork]\n\n\targs := []string{}\n\tif language == \"java\" {\n\t\targs = append(args, \"-jar\", \"/test-at-scale-java.jar\",\n\t\t\tglobal.ArgCommand, command, global.ArgFrameworVersion,\n\t\t\tstrconv.Itoa(frameworkVersion))\n\t} else {\n\t\targs = append(args, global.ArgCommand, command)\n\t}\n\n\tif configFile != \"\" {\n\t\targs = append(args, global.ArgConfig, configFile)\n\t}\n\n\tfor _, pattern := range target {\n\t\targs = append(args, global.ArgPattern, pattern)\n\t}\n\n\treturn args\n}\n\n// GetTASFilePath returns tas file path\nfunc GetTASFilePath(path string) (string, error) {\n\tpath, err := GetConfigFileName(path)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\tfilePath := fmt.Sprintf(\"%s/%s\", global.RepoDir, path)\n\treturn filePath, nil\n}\n\n// GenerateUUID generates uuid v4\nfunc GenerateUUID() string {\n\tuuidV4 := uuid.New() // panics on error\n\treturn strings.Map(func(r rune) rune {\n\t\tif r == '-' {\n\t\t\treturn -1\n\t\t}\n\t\treturn r\n\t}, uuidV4.String())\n}\n\n// ValidateStructTASYml validates the TAS config for all supported version\nfunc ValidateStructTASYml(ctx context.Context, ymlContent []byte, ymlFilename string) (interface{}, error) {\n\tversion, err := GetVersion(ymlContent)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tswitch version {\n\tcase v1:\n\t\treturn ValidateStructTASYmlV1(ctx, ymlContent, ymlFilename)\n\tcase v2:\n\t\treturn ValidateStructTASYmlV2(ctx, ymlContent, ymlFilename)\n\tdefault:\n\t\treturn nil, fmt.Errorf(\"version %d is not supported \", version)\n\t}\n}\n"
  },
  {
    "path": "pkg/utils/utils_test.go",
    "content": "package utils\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\t\"os\"\n\t\"path/filepath\"\n\t\"testing\"\n\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/errs\"\n\t\"github.com/LambdaTest/test-at-scale/testutils\"\n\t\"github.com/stretchr/testify/assert\"\n)\n\nconst (\n\tdirectory = \"../../testutils/testdirectory\"\n)\n\nfunc TestMin(t *testing.T) {\n\ttype args struct {\n\t\tx int\n\t\ty int\n\t}\n\ttests := []struct {\n\t\tname string\n\t\targs args\n\t\twant int\n\t}{\n\t\t{\"x: 5, y: -1\", args{5, -1}, -1},\n\t\t{\"x: 0, y: 0\", args{0, 0}, 0},\n\t\t{\"x: -293836, y: 0\", args{-293836, 0}, -293836},\n\t\t{\"x: 2545, y: 374\", args{2545, 374}, 374},\n\t}\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\tif got := Min(tt.args.x, tt.args.y); got != tt.want {\n\t\t\t\tt.Errorf(\"Min() = %v, want %v\", got, tt.want)\n\t\t\t}\n\t\t})\n\t}\n}\n\nfunc TestComputeChecksum(t *testing.T) {\n\t_, err := os.Create(\"dummy_file\")\n\tif err != nil {\n\t\tfmt.Printf(\"Error in creating file, error: %v\", err)\n\t}\n\ttype args struct {\n\t\tfilename string\n\t}\n\ttests := []struct {\n\t\tname    string\n\t\targs    args\n\t\twant    string\n\t\twantErr bool\n\t}{\n\t\t{\"dummy_file_name\", args{\"dummy_file_name\"}, \"\", true},\n\t\t{\"dummy_file\", args{\"dummy_file\"}, \"d41d8cd98f00b204e9800998ecf8427e\", false},\n\t}\n\tdefer removeCreatedFile(\"dummy_file\")\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\tgot, err := ComputeChecksum(tt.args.filename)\n\t\t\tif (err != nil) != tt.wantErr {\n\t\t\t\tt.Errorf(\"ComputeChecksum() error = %v, wantErr %v\", err, tt.wantErr)\n\t\t\t\treturn\n\t\t\t}\n\t\t\tif got != tt.want {\n\t\t\t\tt.Errorf(\"ComputeChecksum() = %v, want %v\", got, tt.want)\n\t\t\t}\n\t\t})\n\t}\n}\n\nfunc TestCreateDirectory(t *testing.T) {\n\tnewDir := \"../../testutils/nonExistingDir\"\n\texistDir := directory\n\ttype args struct {\n\t\tpath string\n\t}\n\ttests := []struct {\n\t\tname    string\n\t\targs    args\n\t\twantErr bool\n\t}{\n\t\t{\"Existing directory: ../../testutils/testdirecotry\", args{existDir}, false},\n\t\t{\"Non-existing directory: ../../testutils/nonExistingDir\", args{newDir}, false},\n\t}\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\tif err := CreateDirectory(tt.args.path); (err != nil) != tt.wantErr {\n\t\t\t\tt.Errorf(\"CreateDirectory() error = %v, wantErr %v\", err, tt.wantErr)\n\t\t\t\treturn\n\t\t\t}\n\t\t\tif tt.args.path == newDir {\n\t\t\t\tif _, err := os.Lstat(newDir); err != nil {\n\t\t\t\t\tt.Errorf(\"Directory did not exist, error: %v\", err)\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t\tdefer removeCreatedFile(newDir)\n\t\t\t}\n\t\t})\n\t}\n}\n\nfunc TestWriteFileToDirectory(t *testing.T) {\n\tpath := directory\n\tfilename := \"writeFileToDirectory\"\n\tdata := []byte(\"Hello world!\")\n\terr := WriteFileToDirectory(path, filename, data)\n\tif err != nil {\n\t\tt.Errorf(\"Error: %v\", err)\n\t\treturn\n\t}\n\tdefer removeCreatedFile(filepath.Join(path, filename))\n\tcheckData, err := os.ReadFile(filepath.Join(path, filename))\n\tif err != nil {\n\t\tt.Errorf(\"Error: %v\", err)\n\t\treturn\n\t}\n\tif string(checkData) != \"Hello world!\" {\n\t\tt.Errorf(\"expected file contents: Hello world!, got: %s\", string(checkData))\n\t}\n}\n\nfunc TestGetOutboundIP(t *testing.T) {\n\ttests := []struct {\n\t\tname string\n\t\twant string\n\t}{\n\t\t{\"Test1\", \"http://synapse:8000\"},\n\t}\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\tif got := GetOutboundIP(); got != tt.want {\n\t\t\t\tt.Errorf(\"GetOutboundIP() = %v, want %v\", got, tt.want)\n\t\t\t}\n\t\t})\n\t}\n}\n\nfunc TestValidateStructv1(t *testing.T) {\n\tctx := context.TODO()\n\ttests := []struct {\n\t\tname     string\n\t\tfilename string\n\t\twantErr  error\n\t\twant     *core.TASConfig\n\t}{\n\t\t{\n\t\t\t\"Junk characters File\",\n\t\t\t\"testutils/testdata/tasyml/junk.yml\",\n\t\t\t// nolint:lll\n\t\t\tfmt.Errorf(\"`testutils/testdata/tasyml/junk.yml` configuration file contains invalid format. Please correct the `testutils/testdata/tasyml/junk.yml` file\"),\n\t\t\tnil,\n\t\t},\n\t\t{\n\t\t\t\"Invalid Types\",\n\t\t\t\"testutils/testdata/tasyml/invalid_types.yml\",\n\t\t\t// nolint:lll\n\t\t\tfmt.Errorf(\"`testutils/testdata/tasyml/invalid_types.yml` configuration file contains invalid format. Please correct the `testutils/testdata/tasyml/invalid_types.yml` file\"),\n\t\t\tnil,\n\t\t},\n\t\t{\n\t\t\t\"Invalid Field Values\",\n\t\t\t\"testutils/testdata/tasyml/invalid_fields.yml\",\n\t\t\terrs.ErrInvalidConf{\n\t\t\t\t// nolint:lll\n\t\t\t\tMessage: \"Invalid values provided for the following fields in the `testutils/testdata/tasyml/invalid_fields.yml` configuration file: \\n\",\n\t\t\t\tFields:  []string{\"framework\", \"nodeVersion\"},\n\t\t\t\tValues:  []interface{}{\"hello\", \"test\"}},\n\t\t\tnil,\n\t\t},\n\t\t{\n\t\t\t\"Valid Config\",\n\t\t\t\"testutils/testdata/tasyml/valid.yml\",\n\t\t\tnil,\n\t\t\t&core.TASConfig{\n\t\t\t\tSmartRun:  true,\n\t\t\t\tFramework: \"jest\",\n\t\t\t\tPostmerge: &core.Merge{\n\t\t\t\t\tEnvMap:   map[string]string{\"NODE_ENV\": \"development\"},\n\t\t\t\t\tPatterns: []string{\"{packages,scripts}/**/__tests__/*{.js,.coffee,[!d].ts}\"},\n\t\t\t\t},\n\t\t\t\tPremerge: &core.Merge{\n\t\t\t\t\tEnvMap:   map[string]string{\"NODE_ENV\": \"development\"},\n\t\t\t\t\tPatterns: []string{\"{packages,scripts}/**/__tests__/*{.js,.coffee,[!d].ts}\"},\n\t\t\t\t},\n\t\t\t\tPrerun:      &core.Run{EnvMap: map[string]string{\"NODE_ENV\": \"development\"}, Commands: []string{\"yarn\"}},\n\t\t\t\tPostrun:     &core.Run{Commands: []string{\"node --version\"}},\n\t\t\t\tConfigFile:  \"scripts/jest/config.source-www.js\",\n\t\t\t\tNodeVersion: \"14.17.6\",\n\t\t\t\tTier:        \"small\",\n\t\t\t\tSplitMode:   core.TestSplit,\n\t\t\t\tVersion:     \"1.0\",\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\t\"Valid Config - Only Framework\",\n\t\t\t\"testutils/testdata/tasyml/framework_only_required.yml\",\n\t\t\tnil,\n\t\t\t&core.TASConfig{\n\t\t\t\tSmartRun:  true,\n\t\t\t\tFramework: \"mocha\",\n\t\t\t\tTier:      \"small\",\n\t\t\t\tSplitMode: core.TestSplit,\n\t\t\t\tVersion:   \"1.2\",\n\t\t\t},\n\t\t},\n\t}\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\tymlContent, err := testutils.LoadFile(tt.filename)\n\t\t\tif err != nil {\n\t\t\t\tt.Errorf(\"Error loading testfile %s\", tt.filename)\n\t\t\t\treturn\n\t\t\t}\n\t\t\ttasConfig, errV := ValidateStructTASYmlV1(ctx, ymlContent, tt.filename)\n\t\t\tif errV != nil {\n\t\t\t\tassert.Equal(t, errV.Error(), tt.wantErr.Error(), \"Error mismatch\")\n\t\t\t\treturn\n\t\t\t}\n\t\t\tassert.Equal(t, tt.want, tasConfig, \"Struct mismatch\")\n\t\t})\n\t}\n}\n\nfunc removeCreatedFile(path string) {\n\terr := os.RemoveAll(path)\n\tif err != nil {\n\t\tfmt.Println(\"error in removing!!\")\n\t}\n}\nfunc TestValidateStructv2(t *testing.T) {\n\tctx := context.TODO()\n\ttests := []struct {\n\t\tname     string\n\t\tfilename string\n\t\twantErr  error\n\t\twant     *core.TASConfigV2\n\t}{\n\t\t{\n\t\t\t\"Junk characters File\",\n\t\t\t\"testutils/testdata/tasyml/junk.yml\",\n\t\t\t// nolint:lll\n\t\t\tfmt.Errorf(\"`testutils/testdata/tasyml/junk.yml` configuration file contains invalid format. Please correct the `testutils/testdata/tasyml/junk.yml` file\"),\n\t\t\tnil,\n\t\t},\n\t\t{\n\t\t\t\"Invalid Types\",\n\t\t\t\"testutils/testdata/tasyml/invalid_typesv2.yml\",\n\t\t\t// nolint:lll\n\t\t\tfmt.Errorf(\"`testutils/testdata/tasyml/invalid_typesv2.yml` configuration file contains invalid format. Please correct the `testutils/testdata/tasyml/invalid_typesv2.yml` file\"),\n\t\t\tnil,\n\t\t},\n\n\t\t{\n\t\t\t\"Valid Config\",\n\t\t\t\"testutils/testdata/tasyml/validV2.yml\",\n\t\t\tnil,\n\t\t\t&core.TASConfigV2{\n\t\t\t\tSmartRun:  true,\n\t\t\t\tTier:      \"small\",\n\t\t\t\tSplitMode: core.TestSplit,\n\t\t\t\tPostMerge: &core.MergeV2{\n\t\t\t\t\tSubModules: []core.SubModule{\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tName: \"some-module-1\",\n\t\t\t\t\t\t\tPath: \"./somepath\",\n\t\t\t\t\t\t\tPatterns: []string{\n\t\t\t\t\t\t\t\t\"./x/y/z\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tFramework:  \"mocha\",\n\t\t\t\t\t\t\tConfigFile: \"x/y/z\",\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tPreMerge: &core.MergeV2{\n\t\t\t\t\tSubModules: []core.SubModule{\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tName: \"some-module-1\",\n\t\t\t\t\t\t\tPath: \"./somepath\",\n\t\t\t\t\t\t\tPatterns: []string{\n\t\t\t\t\t\t\t\t\"./x/y/z\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\tFramework:  \"jasmine\",\n\t\t\t\t\t\t\tConfigFile: \"/x/y/z\",\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tParallelism: 1,\n\t\t\t\tVersion:     \"2.0.1\",\n\t\t\t},\n\t\t},\n\t}\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\tymlContent, err := testutils.LoadFile(tt.filename)\n\t\t\tif err != nil {\n\t\t\t\tt.Errorf(\"Error loading testfile %s\", tt.filename)\n\t\t\t\treturn\n\t\t\t}\n\t\t\ttasConfig, errV := ValidateStructTASYmlV2(ctx, ymlContent, tt.filename)\n\t\t\tif errV != nil {\n\t\t\t\tassert.Equal(t, errV.Error(), tt.wantErr.Error(), \"Error mismatch\")\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\tassert.Equal(t, tt.want, tasConfig, \"Struct mismatch\")\n\t\t})\n\t}\n}\n\nfunc TestGetVersion(t *testing.T) {\n\ttests := []struct {\n\t\tname     string\n\t\tfilename string\n\t\twantErr  error\n\t\twant     int\n\t}{\n\t\t{\n\t\t\t\"Test with invalid version type\",\n\t\t\t\"testutils/testdata/tasyml/invalidVersion.yml\",\n\t\t\tfmt.Errorf(\"error while parsing version for tas yml\"),\n\t\t\t0,\n\t\t},\n\t\t{\n\t\t\t\"Test valid yml type for tas version 1\",\n\t\t\t\"testutils/testdata/tasyml/valid.yml\",\n\t\t\tnil,\n\t\t\t1,\n\t\t},\n\t\t{\n\t\t\t\"Test valid yml type for tas version 2\",\n\t\t\t\"testutils/testdata/tasyml/validV2.yml\",\n\t\t\tnil,\n\t\t\t2,\n\t\t},\n\t}\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\tymlContent, err := testutils.LoadFile(tt.filename)\n\t\t\tif err != nil {\n\t\t\t\tt.Errorf(\"Error loading testfile %s\", tt.filename)\n\t\t\t\treturn\n\t\t\t}\n\t\t\tversion, errV := GetVersion(ymlContent)\n\t\t\tif errV != nil {\n\t\t\t\tassert.Equal(t, errV.Error(), tt.wantErr.Error(), \"Error mismatch\")\n\t\t\t\treturn\n\t\t\t}\n\t\t\tassert.Equal(t, tt.want, version, \"value mismatch\")\n\t\t})\n\t}\n}\n\nfunc TestValidateSubModule(t *testing.T) {\n\ttests := []struct {\n\t\tname      string\n\t\tsubModule core.SubModule\n\t\twantErr   error\n\t}{\n\t\t{\n\t\t\t\"Test submodule if name is empty\",\n\t\t\tcore.SubModule{\n\t\t\t\tPath:     \"/x/y\",\n\t\t\t\tPatterns: []string{\"/a/c\"},\n\t\t\t},\n\n\t\t\terrs.New(\"module name is not defined\"),\n\t\t},\n\t\t{\n\t\t\t\"Test submodule if path is empty\",\n\t\t\tcore.SubModule{\n\t\t\t\tName:     \"some name\",\n\t\t\t\tPatterns: []string{\"/a/c\"},\n\t\t\t},\n\n\t\t\terrs.New(\"module path is not defined for module some name \"),\n\t\t},\n\t\t{\n\t\t\t\"Test submodule if pattern length is empty\",\n\t\t\tcore.SubModule{\n\t\t\t\tName: \"some-name\",\n\t\t\t\tPath: \"/x/y\",\n\t\t\t},\n\n\t\t\terrs.New(\"module some-name pattern length is 0\"),\n\t\t},\n\t}\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\tgotErr := ValidateSubModule(&tt.subModule)\n\t\t\tassert.Equal(t, tt.wantErr, gotErr, \"Error mismatch\")\n\t\t})\n\t}\n}\n"
  },
  {
    "path": "pkg/zstd/zstd.go",
    "content": "package zstd\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\t\"io/ioutil\"\n\t\"os\"\n\t\"os/exec\"\n\t\"path/filepath\"\n\t\"strings\"\n\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n)\n\ntype zstdCompressor struct {\n\tlogger      lumber.Logger\n\texecManager core.ExecutionManager\n\texecPath    string\n}\n\nconst (\n\tmanifestFileName = \"manifest.txt\"\n\texecutableName   = \"tar\"\n)\n\n//New return zStandard compression manager\nfunc New(execManager core.ExecutionManager, logger lumber.Logger) (core.ZstdCompressor, error) {\n\tpath, err := exec.LookPath(executableName)\n\tif err != nil {\n\t\tlogger.Errorf(\"failed to find path for tar, error:%v\", err)\n\t\treturn nil, err\n\t}\n\n\treturn &zstdCompressor{logger: logger, execManager: execManager, execPath: path}, nil\n}\n\nfunc (z *zstdCompressor) createManifestFile(workingDir string, fileNames ...string) error {\n\treturn ioutil.WriteFile(filepath.Join(os.TempDir(), manifestFileName), []byte(strings.Join(fileNames, \"\\n\")), 0660)\n}\n\n// Compress compress the list of files\nfunc (z *zstdCompressor) Compress(ctx context.Context, compressedFileName string, preservePath bool, workingDirectory string, filesToCompress ...string) error {\n\tif err := z.createManifestFile(workingDirectory, filesToCompress...); err != nil {\n\t\tz.logger.Errorf(\"failed to create manifest file %v\", err)\n\t\treturn err\n\t}\n\tcommand := fmt.Sprintf(\"%s --posix -I 'zstd -5 -T0' -cf %s -C %s -T %s\", z.execPath, compressedFileName, workingDirectory, filepath.Join(os.TempDir(), manifestFileName))\n\tif preservePath {\n\t\tcommand = fmt.Sprintf(\"%s -P\", command)\n\t}\n\tcommands := []string{command}\n\tif err := z.execManager.ExecuteInternalCommands(ctx, core.Zstd, commands, workingDirectory, nil, nil); err != nil {\n\t\tz.logger.Errorf(\"error while zstd compression %v\", err)\n\t\treturn err\n\t}\n\treturn nil\n}\n\n//Decompress performs the decompression operation for the given file\nfunc (z *zstdCompressor) Decompress(ctx context.Context, filePath string, preservePath bool, workingDirectory string) error {\n\tcommand := fmt.Sprintf(\"%s --posix -I 'zstd -d' -xf %s -C %s\", z.execPath, filePath, workingDirectory)\n\tif preservePath {\n\t\tcommand = fmt.Sprintf(\"%s -P\", command)\n\t}\n\tcommands := []string{command}\n\tif err := z.execManager.ExecuteInternalCommands(ctx, core.Zstd, commands, workingDirectory, nil, nil); err != nil {\n\t\tz.logger.Errorf(\"error while zstd decompression %v\", err)\n\t\treturn err\n\t}\n\treturn nil\n}\n"
  },
  {
    "path": "pkg/zstd/zstd_test.go",
    "content": "package zstd\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\t\"os\"\n\t\"path/filepath\"\n\t\"reflect\"\n\t\"testing\"\n\n\t\"github.com/LambdaTest/test-at-scale/mocks\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/errs\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n\t\"github.com/LambdaTest/test-at-scale/testutils\"\n\t\"github.com/stretchr/testify/mock\"\n)\n\nconst tarPath = \"tar\"\n\nfunc TestNew(t *testing.T) {\n\texecManager := new(mocks.ExecutionManager)\n\tlogger, err := testutils.GetLogger()\n\tif err != nil {\n\t\tt.Errorf(\"Couldn't initialize logger, error: %v\", err)\n\t}\n\n\t_, err2 := New(execManager, logger)\n\tif err2 != nil {\n\t\tt.Errorf(\"Couldn't initialize a new zstdCompressor, error: %v\", err2)\n\t}\n}\n\nfunc Test_zstdCompressor_createManifestFile(t *testing.T) {\n\texecManager := new(mocks.ExecutionManager)\n\tlogger, err := testutils.GetLogger()\n\tif err != nil {\n\t\tt.Errorf(\"Couldn't initialize logger, error: %v\", err)\n\t}\n\n\tpath := tarPath\n\n\ttype fields struct {\n\t\tlogger      lumber.Logger\n\t\texecManager core.ExecutionManager\n\t\texecPath    string\n\t}\n\ttype args struct {\n\t\tworkingDir string\n\t\tfileNames  []string\n\t}\n\ttests := []struct {\n\t\tname    string\n\t\tfields  fields\n\t\targs    args\n\t\twantErr bool\n\t}{\n\t\t{\n\t\t\t\"Test createManifestFile\",\n\t\t\tfields{logger: logger, execManager: execManager, execPath: path},\n\t\t\targs{\"./\", []string{\"file1\", \"file2\"}},\n\t\t\tfalse,\n\t\t},\n\t}\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\tz := &zstdCompressor{\n\t\t\t\tlogger:      tt.fields.logger,\n\t\t\t\texecManager: tt.fields.execManager,\n\t\t\t\texecPath:    tt.fields.execPath,\n\t\t\t}\n\t\t\tif err := z.createManifestFile(tt.args.workingDir, tt.args.fileNames...); (err != nil) != tt.wantErr {\n\t\t\t\tt.Errorf(\"zstdCompressor.createManifestFile() error = %v, wantErr %v\", err, tt.wantErr)\n\t\t\t}\n\t\t})\n\t}\n}\n\nfunc Test_zstdCompressor_Compress(t *testing.T) {\n\tlogger, err := testutils.GetLogger()\n\tif err != nil {\n\t\tt.Errorf(\"Couldn't initialize logger, error: %v\", err)\n\t}\n\tpath := tarPath\n\t// ReceivedStringArg will have args passed to ExecuteInternalCommands\n\tvar ReceivedArgs []string\n\texecManager := new(mocks.ExecutionManager)\n\texecManager.On(\"ExecuteInternalCommands\",\n\t\tmock.AnythingOfType(\"*context.emptyCtx\"),\n\t\tmock.AnythingOfType(\"core.CommandType\"),\n\t\tmock.AnythingOfType(\"[]string\"),\n\t\tmock.AnythingOfType(\"string\"),\n\t\tmock.AnythingOfType(\"map[string]string\"),\n\t\tmock.AnythingOfType(\"map[string]string\"),\n\t).Return(\n\t\tfunc(ctx context.Context, commandType core.CommandType, commands []string, cwd string, envMap, secretData map[string]string) error {\n\t\t\tReceivedArgs = commands\n\t\t\treturn nil\n\t\t},\n\t)\n\texecManagerErr := new(mocks.ExecutionManager)\n\texecManagerErr.On(\"ExecuteInternalCommands\",\n\t\tmock.AnythingOfType(\"*context.emptyCtx\"),\n\t\tmock.AnythingOfType(\"core.CommandType\"),\n\t\tmock.AnythingOfType(\"[]string\"),\n\t\tmock.AnythingOfType(\"string\"),\n\t\tmock.AnythingOfType(\"map[string]string\"),\n\t\tmock.AnythingOfType(\"map[string]string\"),\n\t).Return(\n\t\tfunc(ctx context.Context, commandType core.CommandType, commands []string,\n\t\t\tcwd string, envMap, secretData map[string]string) error {\n\t\t\tReceivedArgs = commands\n\t\t\treturn errs.New(\"error from mocked interface\")\n\t\t},\n\t)\n\n\ttype fields struct {\n\t\tlogger      lumber.Logger\n\t\texecManager core.ExecutionManager\n\t\texecPath    string\n\t}\n\ttype args struct {\n\t\tctx                context.Context\n\t\tcompressedFileName string\n\t\tpreservePath       bool\n\t\tworkingDirectory   string\n\t\tfilesToCompress    []string\n\t}\n\ttests := []struct {\n\t\tname    string\n\t\tfields  fields\n\t\targs    args\n\t\twantErr bool\n\t}{\n\t\t{\n\t\t\t\"Test Compress for success, with preservePath=true\",\n\t\t\tfields{logger: logger, execManager: execManager, execPath: path},\n\t\t\targs{context.TODO(), \"compressedFileName\", true, \"./\", []string{\"f1\", \"f2\"}},\n\t\t\tfalse,\n\t\t},\n\t\t{\n\t\t\t\"Test Compress for success, with preservePath=false\",\n\t\t\tfields{logger: logger, execManager: execManager, execPath: path},\n\t\t\targs{context.TODO(), \"compressedFileName\", false, \"./\", []string{\"f1\", \"f2\"}},\n\t\t\tfalse,\n\t\t},\n\t\t{\n\t\t\t\"Test Compress for error\",\n\t\t\tfields{logger: logger, execManager: execManagerErr, execPath: path},\n\t\t\targs{context.TODO(), \"compressedFileName\", true, \"./\", []string{\"f1\", \"f2\"}},\n\t\t\ttrue,\n\t\t},\n\t}\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\tz := &zstdCompressor{\n\t\t\t\tlogger:      tt.fields.logger,\n\t\t\t\texecManager: tt.fields.execManager,\n\t\t\t\texecPath:    tt.fields.execPath,\n\t\t\t}\n\t\t\terr := z.Compress(tt.args.ctx, tt.args.compressedFileName, tt.args.preservePath, tt.args.workingDirectory, tt.args.filesToCompress...)\n\t\t\tif (err != nil) != tt.wantErr {\n\t\t\t\tt.Errorf(\"zstdCompressor.Compress() error = %v, wantErr %v\", err, tt.wantErr)\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\tcommand := fmt.Sprintf(\"%s --posix -I 'zstd -5 -T0' -cf compressedFileName -C ./ -T %s\",\n\t\t\t\tz.execPath, filepath.Join(os.TempDir(), manifestFileName))\n\t\t\tif tt.args.preservePath {\n\t\t\t\tcommand = fmt.Sprintf(\"%s -P\", command)\n\t\t\t}\n\t\t\tcommands := []string{command}\n\t\t\tif !reflect.DeepEqual(ReceivedArgs, commands) {\n\t\t\t\tt.Errorf(\"Expected commands: %v, got: %v\", commands, ReceivedArgs)\n\t\t\t}\n\t\t})\n\t}\n}\n\nfunc Test_zstdCompressor_Decompress(t *testing.T) {\n\tlogger, err := testutils.GetLogger()\n\tif err != nil {\n\t\tt.Errorf(\"Couldn't initialize logger, error: %v\", err)\n\t}\n\n\tpath := tarPath\n\n\t// ReceivedStringArg will have args passed to ExecuteInternalCommands\n\tvar ReceivedArgs []string\n\texecManager := new(mocks.ExecutionManager)\n\texecManager.On(\"ExecuteInternalCommands\",\n\t\tmock.AnythingOfType(\"*context.emptyCtx\"),\n\t\tmock.AnythingOfType(\"core.CommandType\"),\n\t\tmock.AnythingOfType(\"[]string\"),\n\t\tmock.AnythingOfType(\"string\"),\n\t\tmock.AnythingOfType(\"map[string]string\"),\n\t\tmock.AnythingOfType(\"map[string]string\")).Return(\n\t\tfunc(ctx context.Context, commandType core.CommandType, commands []string,\n\t\t\tcwd string, envMap, secretData map[string]string) error {\n\t\t\tReceivedArgs = commands\n\t\t\treturn nil\n\t\t})\n\n\texecManagerErr := new(mocks.ExecutionManager)\n\texecManagerErr.On(\"ExecuteInternalCommands\",\n\t\tmock.AnythingOfType(\"*context.emptyCtx\"),\n\t\tmock.AnythingOfType(\"core.CommandType\"),\n\t\tmock.AnythingOfType(\"[]string\"),\n\t\tmock.AnythingOfType(\"string\"),\n\t\tmock.AnythingOfType(\"map[string]string\"),\n\t\tmock.AnythingOfType(\"map[string]string\"),\n\t).Return(\n\t\tfunc(ctx context.Context, commandType core.CommandType, commands []string,\n\t\t\tcwd string, envMap, secretData map[string]string) error {\n\t\t\tReceivedArgs = commands\n\t\t\treturn errs.New(\"error from mocked interface\")\n\t\t})\n\n\ttype fields struct {\n\t\tlogger      lumber.Logger\n\t\texecManager core.ExecutionManager\n\t\texecPath    string\n\t}\n\ttype args struct {\n\t\tctx              context.Context\n\t\tfilePath         string\n\t\tpreservePath     bool\n\t\tworkingDirectory string\n\t}\n\ttests := []struct {\n\t\tname    string\n\t\tfields  fields\n\t\targs    args\n\t\twantErr bool\n\t}{\n\t\t{\n\t\t\t\"Tests Decompress for success with preservePath=true\",\n\t\t\tfields{logger: logger, execManager: execManager, execPath: path},\n\t\t\targs{ctx: context.TODO(), filePath: \"./\", preservePath: true, workingDirectory: \"./\"},\n\t\t\tfalse,\n\t\t},\n\n\t\t{\n\t\t\t\"Tests Decompress for success with preservePath=false\",\n\t\t\tfields{logger: logger, execManager: execManager, execPath: path},\n\t\t\targs{ctx: context.TODO(), filePath: \"./\", preservePath: false, workingDirectory: \"./\"},\n\t\t\tfalse,\n\t\t},\n\n\t\t{\n\t\t\t\"Tests Decompress for error\",\n\t\t\tfields{logger: logger, execManager: execManagerErr, execPath: path},\n\t\t\targs{ctx: context.TODO(), filePath: \"./\", preservePath: true, workingDirectory: \"./\"},\n\t\t\ttrue,\n\t\t},\n\t}\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\tz := &zstdCompressor{\n\t\t\t\tlogger:      tt.fields.logger,\n\t\t\t\texecManager: tt.fields.execManager,\n\t\t\t\texecPath:    tt.fields.execPath,\n\t\t\t}\n\t\t\tif err := z.Decompress(tt.args.ctx, tt.args.filePath, tt.args.preservePath, tt.args.workingDirectory); (err != nil) != tt.wantErr {\n\t\t\t\tt.Errorf(\"zstdCompressor.Decompress() error = %v, wantErr %v\", err, tt.wantErr)\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\tcommand := fmt.Sprintf(\"%s --posix -I 'zstd -d' -xf ./ -C ./\", z.execPath)\n\n\t\t\tif tt.args.preservePath {\n\t\t\t\tcommand = fmt.Sprintf(\"%s -P\", command)\n\t\t\t}\n\t\t\tcommands := []string{command}\n\t\t\tif !reflect.DeepEqual(ReceivedArgs, commands) {\n\t\t\t\tt.Errorf(\"Expected args: %v, got: %v\", commands, ReceivedArgs)\n\t\t\t}\n\t\t})\n\t}\n}\n"
  },
  {
    "path": "runner.conf",
    "content": "root:              .\ntmp_path:          ./builds\nbuild_name:         lt\nvalid_ext:         .go\nno_rebuild_ext:    .tpl, .tmpl, .html\nignored:           assets, tmp, vendor\nbuild_delay:       600\ncolors:            1\nlog_color_main:    cyan\nlog_color_build:   yellow\nlog_color_runner:  green\nlog_color_watcher: magenta\nlog_color_app:"
  },
  {
    "path": "sample-tas.yaml",
    "content": "# supported frameworks: mocha|jest|jasmine\nframework: mocha\n# supported tiers: xmall|small|medium|large|xlarge\ntier: xsmall\nblocklist:\n  # format: \"<filename>##<suit-name>##<suit-name>##<test-name>\"\n  - \"src/test/api.js\"\n  - \"src/test/api1.js##this is a test-suite\"\n  - \"src/test/api2.js##this is a test-suite##this is a test-case\"\npostMerge:\n  # env vars provided at the time of discovering and executing the post-merge tests\n  env:\n    REPONAME: nexe\n    AWS_KEY: ${{ secrets.AWS_KEY }}\n  # glob-pattern for identifying the test files\n  pattern:\n    - \"./test/**/*.spec.ts\"\n  # strategy for trigerring builds for post-merge\n  strategy:\n    threshold: 1\n    name: after_n_commits\npreMerge:\n  pattern:\n    - \"./test/**/*.spec.ts\"\npreRun:\n  # set of commands to run before running the tests like `yarn install`, `yarn build`\n  command:\n    - npm ci\n    - docker build --build-arg NPM_TOKEN=${{ secrets.NPM_TOKEN }} --tag=nucleus\npostRun:\n  # set of commands to run after running the tests\n  command:\n    - node --version\n# path to your custom configuration file required by framework\nconfigFile: mocharc.yml\n# provide the version of nodejs required for your project\nnodeVersion: 14.17.2\nversion: 2.0\n"
  },
  {
    "path": "scripts/.eslintrc.json",
    "content": "{\n    \"env\": {\n        \"commonjs\": true,\n        \"es2021\": true,\n        \"node\": true\n    },\n    \"extends\": [\n        \"google\"\n    ],\n    \"parserOptions\": {\n        \"ecmaVersion\": 12\n    },\n    \"rules\": {\n        \"require-jsdoc\":\"off\",\n        \"max-len\":\"off\"\n    }\n}\n"
  },
  {
    "path": "scripts/custom-reporter.js",
    "content": "\"use strict\";\nconst { ReportBase } = require(\"istanbul-lib-report\");\n\nfunction nodeMissing(metrics, fileCoverage) {\n  const isEmpty = metrics.isEmpty();\n  const lines = isEmpty ? 0 : metrics.lines.pct;\n  let coveredLines;\n\n  if (lines === 100) {\n    const branches = fileCoverage.getBranchCoverageByLine();\n    coveredLines = Object.entries(branches).map(([key, { coverage }]) => [\n      key,\n      coverage === 100,\n    ]);\n  } else {\n    coveredLines = Object.entries(fileCoverage.getLineCoverage());\n  }\n\n  let newRange = true;\n  const ranges = coveredLines\n    .reduce((acum, [line, hit]) => {\n      if (hit) newRange = true;\n      else {\n        line = parseInt(line);\n        if (newRange) {\n          acum.push([line]);\n          newRange = false;\n        } else acum[acum.length - 1][1] = line;\n      }\n\n      return acum;\n    }, [])\n    .map((range) => {\n      const { length } = range;\n\n      if (length === 1) return range[0];\n\n      return `${range[0]}-${range[1]}`;\n    });\n\n  return [].concat(...ranges).join(\",\");\n}\n\nclass JsonSummaryReport extends ReportBase {\n  constructor(opts) {\n    super();\n\n    const { maxCols } = opts;\n\n    this.maxCols = maxCols != null ? maxCols : process.stdout.columns || 80;\n    this.file = opts.file || \"coverage-merged.json\";\n    this.contentWriter = null;\n    this.first = true;\n  }\n\n  onStart(root, context) {\n    this.contentWriter = context.writer.writeFile(this.file);\n    this.contentWriter.write(\"{\");\n  }\n\n  writeSummary(filePath, sc, uncovered) {\n    const cw = this.contentWriter;\n    if (this.first) {\n      this.first = false;\n    } else {\n      cw.write(\",\");\n    }\n    if (uncovered) {\n      sc.data.uncovered_lines = uncovered;\n    }\n    cw.write(JSON.stringify(filePath));\n    cw.write(\": \");\n    cw.write(JSON.stringify(sc));\n    cw.println(\"\");\n  }\n\n  onSummary(node) {\n    if (!node.isRoot()) {\n      return;\n    }\n    this.writeSummary(\"total\", node.getCoverageSummary());\n  }\n\n  onDetail(node) {\n    const metrics = node.getCoverageSummary();\n    const fileCoverage = node.getFileCoverage();\n    let missingLines;\n    if (!node.isSummary()) {\n      missingLines = nodeMissing(metrics, fileCoverage);\n    }\n    this.writeSummary(fileCoverage.path, metrics, missingLines);\n  }\n\n  onEnd() {\n    const cw = this.contentWriter;\n    cw.println(\"}\");\n    cw.close();\n  }\n}\nmodule.exports = JsonSummaryReport;\n"
  },
  {
    "path": "scripts/mapCoverage.js",
    "content": "const istanbulCoverage = require('istanbul-lib-coverage');\nconst istanbulReport = require('istanbul-lib-report');\nconst istanbulReports = require('istanbul-reports');\nconst libSourceMaps = require('istanbul-lib-source-maps');\nconst map = istanbulCoverage.createCoverageMap();\nconst parser = require('yargs-parser');\nconst argv = parser(process.argv.slice(2));\n\nif (!!!argv.commitDir || !!!argv.coverageFiles) {\n  console.error('error while running merging coverage files');\n  process.exit(-1);\n}\n\n\nconst mapFileCoverage = (fileCoverage) => {\n  fileCoverage.path = fileCoverage.path.replace(\n      /(.*packages\\/.*\\/)(build)(\\/.*)/,\n      '$1src$3',\n  );\n  return fileCoverage;\n};\n\n\nfor (const coverageFile of argv.coverageFiles.split(' ')) {\n  console.log(coverageFile);\n  try {\n    const coverageJSON = require(coverageFile);\n    Object.keys(coverageJSON).forEach((filename) =>\n      map.addFileCoverage(mapFileCoverage(coverageJSON[filename])),\n    );\n  } catch (err) {\n    console.error('error while loading ' + coverageFile + err);\n    process.exit(-1);\n  }\n}\n\nconst checkCoverage = (summary, thresholds, file) => {\n  console.log(thresholds);\n  console.log(summary);\n  Object.keys(thresholds).forEach((key) => {\n    if (summary[key]) {\n      const coverage = summary[key].pct;\n      if (coverage < thresholds[key]) {\n        if (file) {\n          console.error('ERROR: Coverage for ' + key + ' (' + coverage + '%) does not meet threshold (' + thresholds[key] + '%) for ' + file);\n        } else {\n          console.error('ERROR: Coverage for ' + key + ' (' + coverage + '%) does not meet global threshold (' + thresholds[key] + '%)');\n        }\n      }\n    }\n  });\n};\n(async () => {\n  const sourceMapStore = libSourceMaps.createSourceMapStore();\n  const transformedMap = await sourceMapStore.transformCoverage(map);\n  const context = istanbulReport.createContext({coverageMap: transformedMap, dir: argv.commitDir});\n  [{name: '/scripts/custom-reporter.js', file: 'coverage-merged.json'}, {name: 'text'}].forEach((reporter) =>\n    istanbulReports.create(reporter.name, {file: reporter.file}).execute(context),\n  );\n\n  if (argv.coverageManifest) {\n    const manifestFile = require(argv.coverageManifest);\n    const thresholds = manifestFile.coverage_threshold;\n    if (thresholds) {\n      if (thresholds.perfile) {\n        transformedMap.files().forEach((file) => {\n          checkCoverage(transformedMap.fileCoverageFor(file).toSummary(), thresholds, file);\n        });\n      } else {\n        checkCoverage(transformedMap.getCoverageSummary(), thresholds);\n      }\n    }\n  }\n})();\n"
  },
  {
    "path": "scripts/package.json",
    "content": "{\n  \"name\": \"scripts\",\n  \"version\": \"1.0.0\",\n  \"description\": \"JS scripts for nucleus\",\n  \"dependencies\": {\n    \"@babel/core\": \"^7.14.3\",\n    \"@babel/node\": \"^7.14.2\",\n    \"istanbul-lib-coverage\": \"^3.2.0\",\n    \"istanbul-lib-report\": \"^3.0.0\",\n    \"istanbul-lib-source-maps\": \"^4.0.1\",\n    \"istanbul-reports\": \"^3.0.5\",\n    \"yargs-parser\": \"^20.2.7\"\n  },\n  \"license\": \"ISC\"\n}\n"
  },
  {
    "path": "testutils/constants.go",
    "content": "package testutils\n\n// Various constant defined for to obtain dummy data for tests\nconst (\n\tApplicationConfigPath = \"/testutils/testdata/sample_config.json\"    // AplicationConfigPath points to dummy config file in json format for NucleusConfig\n\tTaskPayloadPath       = \"/testutils/testdata/taskPayload.json\"      // TaskPayloadPath points to json file containing dummy TaskPayload\n\tPayloadPath           = \"/testutils/testdata/payload.json\"          // PayloadPath points to json file containing dummy PayloadPath\n\tGitlabCommitDiff      = \"/testutils/testdata/gitlabCommitDiff.json\" // GitLabCommitDiff points to json file containing dummy GitLabCommitDiff\n)\n"
  },
  {
    "path": "testutils/testdata/compare/abc...xyz",
    "content": "  }\n  const step = compiler.log.step('Bundling Resources...')\n  let count = 0\n\n  const testCommitChangeM = \"Added 1 line in steps.ts\"\n  // workaround for https://github.com/sindresorhus/globby/issues/127\n  // and https://github.com/mrmlnc/fast-glob#pattern-syntax\n  const resourcesWithForwardSlashes = resources.map((r) => r.replace(/\\\\/g, '/'))"
  },
  {
    "path": "testutils/testdata/coverage/coverage-final.json",
    "content": "{\n    \"cover1\" : \"f1\"\n}"
  },
  {
    "path": "testutils/testdata/coverage/sample/coverage-final.json",
    "content": "{\n    \"build_id\" : \"dummyBuildID\",\n    \"repo_id\" : \"dummyRepoID\",\n    \"commit_id\" : \"dummyCommitID\",\n    \"blob_link\" : \"dummy://BlobLink.com\",\n    \"total\" : \"80%\"\n}"
  },
  {
    "path": "testutils/testdata/gitlabCommitDiff.json",
    "content": "{\"commit\":{\"id\":\"2295d352f6073101497f9bf4e4981c7ae72706a3\",\"short_id\":\"2295d352\",\"created_at\":\"2021-11-08T21:10:05.000+00:00\",\"parent_ids\":[\"f18d1ffec0ecaae592a0ccd708ce77146f5f37e3\"],\"title\":\"Add latest changes from gitlab-org/gitlab@master\",\"message\":\"Add latest changes from gitlab-org/gitlab@master\\n\",\"author_name\":\"GitLab Bot\",\"author_email\":\"gitlab-bot@gitlab.com\",\"authored_date\":\"2021-11-08T21:10:05.000+00:00\",\"committer_name\":\"GitLab Bot\",\"committer_email\":\"gitlab-bot@gitlab.com\",\"committed_date\":\"2021-11-08T21:10:05.000+00:00\",\"trailers\":{},\"web_url\":\"https://gitlab.com/gitlab-org/gitlab-foss/-/commit/2295d352f6073101497f9bf4e4981c7ae72706a3\"},\"commits\":[{\"id\":\"6a380347147d1a55afbc6a1c16e04b567ab90d86\",\"short_id\":\"6a380347\",\"created_at\":\"2021-11-08T12:12:07.000+00:00\",\"parent_ids\":[\"4901ff1764398bb017487d4a5104b74bc284f33a\"],\"title\":\"Add latest changes from gitlab-org/gitlab@master\",\"message\":\"Add latest changes from gitlab-org/gitlab@master\\n\",\"author_name\":\"GitLab Bot\",\"author_email\":\"gitlab-bot@gitlab.com\",\"authored_date\":\"2021-11-08T12:12:07.000+00:00\",\"committer_name\":\"GitLab Bot\",\"committer_email\":\"gitlab-bot@gitlab.com\",\"committed_date\":\"2021-11-08T12:12:07.000+00:00\",\"trailers\":{},\"web_url\":\"https://gitlab.com/gitlab-org/gitlab-foss/-/commit/6a380347147d1a55afbc6a1c16e04b567ab90d86\"},{\"id\":\"05db4ead6d5c73cf62ad95d80ccac415bc3bf3cd\",\"short_id\":\"05db4ead\",\"created_at\":\"2021-11-08T15:13:35.000+00:00\",\"parent_ids\":[\"6a380347147d1a55afbc6a1c16e04b567ab90d86\"],\"title\":\"Add latest changes from gitlab-org/gitlab@master\",\"message\":\"Add latest changes from gitlab-org/gitlab@master\\n\",\"author_name\":\"GitLab Bot\",\"author_email\":\"gitlab-bot@gitlab.com\",\"authored_date\":\"2021-11-08T15:13:35.000+00:00\",\"committer_name\":\"GitLab Bot\",\"committer_email\":\"gitlab-bot@gitlab.com\",\"committed_date\":\"2021-11-08T15:13:35.000+00:00\",\"trailers\":{},\"web_url\":\"https://gitlab.com/gitlab-org/gitlab-foss/-/commit/05db4ead6d5c73cf62ad95d80ccac415bc3bf3cd\"},{\"id\":\"f18d1ffec0ecaae592a0ccd708ce77146f5f37e3\",\"short_id\":\"f18d1ffe\",\"created_at\":\"2021-11-08T18:09:52.000+00:00\",\"parent_ids\":[\"05db4ead6d5c73cf62ad95d80ccac415bc3bf3cd\"],\"title\":\"Add latest changes from gitlab-org/gitlab@master\",\"message\":\"Add latest changes from gitlab-org/gitlab@master\\n\",\"author_name\":\"GitLab Bot\",\"author_email\":\"gitlab-bot@gitlab.com\",\"authored_date\":\"2021-11-08T18:09:52.000+00:00\",\"committer_name\":\"GitLab Bot\",\"committer_email\":\"gitlab-bot@gitlab.com\",\"committed_date\":\"2021-11-08T18:09:52.000+00:00\",\"trailers\":{},\"web_url\":\"https://gitlab.com/gitlab-org/gitlab-foss/-/commit/f18d1ffec0ecaae592a0ccd708ce77146f5f37e3\"},{\"id\":\"2295d352f6073101497f9bf4e4981c7ae72706a3\",\"short_id\":\"2295d352\",\"created_at\":\"2021-11-08T21:10:05.000+00:00\",\"parent_ids\":[\"f18d1ffec0ecaae592a0ccd708ce77146f5f37e3\"],\"title\":\"Add latest changes from gitlab-org/gitlab@master\",\"message\":\"Add latest changes from gitlab-org/gitlab@master\\n\",\"author_name\":\"GitLab Bot\",\"author_email\":\"gitlab-bot@gitlab.com\",\"authored_date\":\"2021-11-08T21:10:05.000+00:00\",\"committer_name\":\"GitLab Bot\",\"committer_email\":\"gitlab-bot@gitlab.com\",\"committed_date\":\"2021-11-08T21:10:05.000+00:00\",\"trailers\":{},\"web_url\":\"https://gitlab.com/gitlab-org/gitlab-foss/-/commit/2295d352f6073101497f9bf4e4981c7ae72706a3\"}],\"diffs\":[{\"old_path\":\".gitlab/ci/qa.gitlab-ci.yml\",\"new_path\":\".gitlab/ci/qa.gitlab-ci.yml\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -58,10 +58,13 @@ update-qa-cache:\\n     - tooling/bin/find_change_diffs ${CHANGES_DIFFS_DIR}\\n   script:\\n     - |\\n-      if tooling/bin/qa/check_if_only_quarantined_specs ${CHANGES_DIFFS_DIR}; then\\n-        exit 0\\n-      else\\n+      tooling/bin/qa/package_and_qa_check ${CHANGES_DIFFS_DIR} \\u0026\\u0026 exit_code=$?\\n+      if [ $exit_code -eq 0 ]; then\\n         ./scripts/trigger-build omnibus\\n+      elif [ $exit_code -eq 1 ]; then\\n+        exit 1\\n+      else\\n+        echo \\\"Downstream jobs will not be triggered because package_and_qa_check exited with code: $exit_code\\\"\\n       fi\\n   # These jobs often time out, so temporarily use private runners and a long timeout: https://gitlab.com/gitlab-org/gitlab/-/issues/238563\\n   tags:\\n\"},{\"old_path\":\".gitlab/ci/rails.gitlab-ci.yml\",\"new_path\":\".gitlab/ci/rails.gitlab-ci.yml\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -534,6 +534,50 @@ rspec:feature-flags:\\n         run_timed_command \\\"bundle exec scripts/used-feature-flags\\\";\\n       fi\\n \\n+rspec:skipped-flaky-tests-report:\\n+  extends:\\n+    - .default-retry\\n+    - .rails:rules:skipped-flaky-tests-report\\n+  image: ruby:2.7-alpine\\n+  stage: post-test\\n+  # We cannot use needs since it would mean needing 84 jobs (since most are parallelized)\\n+  # so we use `dependencies` here.\\n+  dependencies:\\n+    # FOSS/EE jobs\\n+    - rspec migration pg12\\n+    - rspec unit pg12\\n+    - rspec integration pg12\\n+    - rspec system pg12\\n+    # FOSS/EE minimal jobs\\n+    - rspec migration pg12 minimal\\n+    - rspec unit pg12 minimal\\n+    - rspec integration pg12 minimal\\n+    - rspec system pg12 minimal\\n+    # EE jobs\\n+    - rspec-ee migration pg12\\n+    - rspec-ee unit pg12\\n+    - rspec-ee integration pg12\\n+    - rspec-ee system pg12\\n+    # EE minimal jobs\\n+    - rspec-ee migration pg12 minimal\\n+    - rspec-ee unit pg12 minimal\\n+    - rspec-ee integration pg12 minimal\\n+    - rspec-ee system pg12 minimal\\n+    # Geo jobs\\n+    - rspec-ee unit pg12 geo\\n+    - rspec-ee integration pg12 geo\\n+    - rspec-ee system pg12 geo\\n+    # Geo minimal jobs\\n+    - rspec-ee unit pg12 geo minimal\\n+    - rspec-ee integration pg12 geo minimal\\n+    - rspec-ee system pg12 geo minimal\\n+  script:\\n+    - cat rspec_flaky/skipped_flaky_tests_*_report.txt \\u003e\\u003e skipped_flaky_tests_report.txt\\n+  artifacts:\\n+    expire_in: 31d\\n+    paths:\\n+      - skipped_flaky_tests_report.txt\\n+\\n # EE/FOSS: default refs (MRs, default branch, schedules) jobs #\\n #######################################################\\n \\n\"},{\"old_path\":\".gitlab/ci/rules.gitlab-ci.yml\",\"new_path\":\".gitlab/ci/rules.gitlab-ci.yml\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -1352,6 +1352,13 @@\\n       when: never\\n     - changes: *code-backstage-patterns\\n \\n+.rails:rules:skipped-flaky-tests-report:\\n+  rules:\\n+    - \\u003c\\u003c: *if-not-ee\\n+      when: never\\n+    - if: '$SKIP_FLAKY_TESTS_AUTOMATICALLY == \\\"true\\\"'\\n+      changes: *code-backstage-patterns\\n+\\n #########################\\n # Static analysis rules #\\n #########################\\n\"},{\"old_path\":\"CHANGELOG.md\",\"new_path\":\"CHANGELOG.md\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -2,6 +2,22 @@\\n documentation](doc/development/changelog.md) for instructions on adding your own\\n entry.\\n \\n+## 14.4.2 (2021-11-08)\\n+\\n+### Fixed (3 changes)\\n+\\n+- [Skip retrying for reads on connection errors if primary only](gitlab-org/gitlab@8e1976ed75bd6c606d49c83863cf46bf3c4d5070) ([merge request](gitlab-org/gitlab!73919))\\n+- [Fix error 500 loading branch with UTF-8 characters with performance bar](gitlab-org/gitlab@67ddc428472d57bb3d8a4a84eb0750487a175f75) ([merge request](gitlab-org/gitlab!73919))\\n+- [Skip st_diff callback setting on LegacyDiffNote when importing](gitlab-org/gitlab@84f5c66321473cd702b3b671584054fcf3d141ae) ([merge request](gitlab-org/gitlab!73919))\\n+\\n+### Changed (1 change)\\n+\\n+- [Remove skip_legacy_diff_note_callback_on_import from legacy diff note](gitlab-org/gitlab@547a2ec29ea9e9299eab727899c3d90886ffc21c) ([merge request](gitlab-org/gitlab!73919))\\n+\\n+### Performance (1 change)\\n+\\n+- [Prevent Sidekiq size limiter middleware from running multiple times on the same job](gitlab-org/gitlab@294c01be38d400607536fb20a2038e098c0f0e28) ([merge request](gitlab-org/gitlab!73919))\\n+\\n ## 14.4.1 (2021-10-28)\\n \\n ### Security (13 changes)\\n\"},{\"old_path\":\"GITALY_SERVER_VERSION\",\"new_path\":\"GITALY_SERVER_VERSION\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -1 +1 @@\\n-7b9cd199b0851fd1b6615e0798f2aafddafd63cb\\n+460a880c6993ab5f76cac951fccc02efd5cbd444\\n\"},{\"old_path\":\"Gemfile\",\"new_path\":\"Gemfile\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -342,7 +342,7 @@ group :development do\\n   gem 'lefthook', '~\\u003e 0.7.0', require: false\\n   gem 'solargraph', '~\\u003e 0.43', require: false\\n \\n-  gem 'letter_opener_web', '~\\u003e 1.4.1'\\n+  gem 'letter_opener_web', '~\\u003e 2.0.0'\\n \\n   # Better errors handler\\n   gem 'better_errors', '~\\u003e 2.9.0'\\n\"},{\"old_path\":\"Gemfile.lock\",\"new_path\":\"Gemfile.lock\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -700,10 +700,11 @@ GEM\\n     lefthook (0.7.5)\\n     letter_opener (1.7.0)\\n       launchy (~\\u003e 2.2)\\n-    letter_opener_web (1.4.1)\\n-      actionmailer (\\u003e= 3.2)\\n-      letter_opener (~\\u003e 1.0)\\n-      railties (\\u003e= 3.2)\\n+    letter_opener_web (2.0.0)\\n+      actionmailer (\\u003e= 5.2)\\n+      letter_opener (~\\u003e 1.7)\\n+      railties (\\u003e= 5.2)\\n+      rexml\\n     libyajl2 (1.2.0)\\n     license_finder (6.0.0)\\n       bundler\\n@@ -1516,7 +1517,7 @@ DEPENDENCIES\\n   kramdown (~\\u003e 2.3.1)\\n   kubeclient (~\\u003e 4.9.2)\\n   lefthook (~\\u003e 0.7.0)\\n-  letter_opener_web (~\\u003e 1.4.1)\\n+  letter_opener_web (~\\u003e 2.0.0)\\n   license_finder (~\\u003e 6.0)\\n   licensee (~\\u003e 9.14.1)\\n   lockbox (~\\u003e 0.6.2)\\n\"},{\"old_path\":\"app/assets/javascripts/analytics/devops_report/components/devops_score.vue\",\"new_path\":\"app/assets/javascripts/analytics/devops_reports/components/devops_score.vue\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":true,\"deleted_file\":false,\"diff\":\"\"},{\"old_path\":\"app/assets/javascripts/analytics/devops_report/components/devops_score_callout.vue\",\"new_path\":\"app/assets/javascripts/analytics/devops_reports/components/devops_score_callout.vue\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":true,\"deleted_file\":false,\"diff\":\"\"},{\"old_path\":\"app/assets/javascripts/analytics/devops_report/components/service_ping_disabled.vue\",\"new_path\":\"app/assets/javascripts/analytics/devops_reports/components/service_ping_disabled.vue\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":true,\"deleted_file\":false,\"diff\":\"\"},{\"old_path\":\"app/assets/javascripts/analytics/devops_report/constants.js\",\"new_path\":\"app/assets/javascripts/analytics/devops_reports/constants.js\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":true,\"deleted_file\":false,\"diff\":\"\"},{\"old_path\":\"app/assets/javascripts/analytics/devops_report/devops_score.js\",\"new_path\":\"app/assets/javascripts/analytics/devops_reports/devops_score.js\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":true,\"deleted_file\":false,\"diff\":\"\"},{\"old_path\":\"app/assets/javascripts/analytics/devops_report/devops_score_disabled_service_ping.js\",\"new_path\":\"app/assets/javascripts/analytics/devops_reports/devops_score_disabled_service_ping.js\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":true,\"deleted_file\":false,\"diff\":\"\"},{\"old_path\":\"app/assets/javascripts/boards/graphql/group_board_iterations.query.graphql\",\"new_path\":\"app/assets/javascripts/boards/graphql/group_board_iterations.query.graphql\",\"a_mode\":\"100644\",\"b_mode\":\"0\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":true,\"diff\":\"@@ -1,10 +0,0 @@\\n-query GroupBoardIterations($fullPath: ID!, $title: String) {\\n-  group(fullPath: $fullPath) {\\n-    iterations(includeAncestors: true, title: $title) {\\n-      nodes {\\n-        id\\n-        title\\n-      }\\n-    }\\n-  }\\n-}\\n\"},{\"old_path\":\"app/assets/javascripts/boards/graphql/project_board_iterations.query.graphql\",\"new_path\":\"app/assets/javascripts/boards/graphql/project_board_iterations.query.graphql\",\"a_mode\":\"100644\",\"b_mode\":\"0\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":true,\"diff\":\"@@ -1,10 +0,0 @@\\n-query ProjectBoardIterations($fullPath: ID!, $title: String) {\\n-  project(fullPath: $fullPath) {\\n-    iterations(includeAncestors: true, title: $title) {\\n-      nodes {\\n-        id\\n-        title\\n-      }\\n-    }\\n-  }\\n-}\\n\"},{\"old_path\":\"app/assets/javascripts/boards/stores/actions.js\",\"new_path\":\"app/assets/javascripts/boards/stores/actions.js\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -36,13 +36,11 @@ import {\\n } from '../boards_util';\\n import { gqlClient } from '../graphql';\\n import boardLabelsQuery from '../graphql/board_labels.query.graphql';\\n-import groupBoardIterationsQuery from '../graphql/group_board_iterations.query.graphql';\\n import groupBoardMilestonesQuery from '../graphql/group_board_milestones.query.graphql';\\n import groupProjectsQuery from '../graphql/group_projects.query.graphql';\\n import issueCreateMutation from '../graphql/issue_create.mutation.graphql';\\n import issueSetLabelsMutation from '../graphql/issue_set_labels.mutation.graphql';\\n import listsIssuesQuery from '../graphql/lists_issues.query.graphql';\\n-import projectBoardIterationsQuery from '../graphql/project_board_iterations.query.graphql';\\n import projectBoardMilestonesQuery from '../graphql/project_board_milestones.query.graphql';\\n \\n import * as types from './mutation_types';\\n@@ -203,52 +201,6 @@ export default {\\n       });\\n   },\\n \\n-  fetchIterations({ state, commit }, title) {\\n-    commit(types.RECEIVE_ITERATIONS_REQUEST);\\n-\\n-    const { fullPath, boardType } = state;\\n-\\n-    const variables = {\\n-      fullPath,\\n-      title,\\n-    };\\n-\\n-    let query;\\n-    if (boardType === BoardType.project) {\\n-      query = projectBoardIterationsQuery;\\n-    }\\n-    if (boardType === BoardType.group) {\\n-      query = groupBoardIterationsQuery;\\n-    }\\n-\\n-    if (!query) {\\n-      // eslint-disable-next-line @gitlab/require-i18n-strings\\n-      throw new Error('Unknown board type');\\n-    }\\n-\\n-    return gqlClient\\n-      .query({\\n-        query,\\n-        variables,\\n-      })\\n-      .then(({ data }) =\\u003e {\\n-        const errors = data[boardType]?.errors;\\n-        const iterations = data[boardType]?.iterations.nodes;\\n-\\n-        if (errors?.[0]) {\\n-          throw new Error(errors[0]);\\n-        }\\n-\\n-        commit(types.RECEIVE_ITERATIONS_SUCCESS, iterations);\\n-\\n-        return iterations;\\n-      })\\n-      .catch((e) =\\u003e {\\n-        commit(types.RECEIVE_ITERATIONS_FAILURE);\\n-        throw e;\\n-      });\\n-  },\\n-\\n   fetchMilestones({ state, commit }, searchTerm) {\\n     commit(types.RECEIVE_MILESTONES_REQUEST);\\n \\n\"},{\"old_path\":\"app/assets/javascripts/boards/stores/mutation_types.js\",\"new_path\":\"app/assets/javascripts/boards/stores/mutation_types.js\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -41,7 +41,3 @@ export const ADD_LIST_TO_HIGHLIGHTED_LISTS = 'ADD_LIST_TO_HIGHLIGHTED_LISTS';\\n export const REMOVE_LIST_FROM_HIGHLIGHTED_LISTS = 'REMOVE_LIST_FROM_HIGHLIGHTED_LISTS';\\n export const RESET_BOARD_ITEM_SELECTION = 'RESET_BOARD_ITEM_SELECTION';\\n export const SET_ERROR = 'SET_ERROR';\\n-\\n-export const RECEIVE_ITERATIONS_REQUEST = 'RECEIVE_ITERATIONS_REQUEST';\\n-export const RECEIVE_ITERATIONS_SUCCESS = 'RECEIVE_ITERATIONS_SUCCESS';\\n-export const RECEIVE_ITERATIONS_FAILURE = 'RECEIVE_ITERATIONS_FAILURE';\\n\"},{\"old_path\":\"app/assets/javascripts/boards/stores/mutations.js\",\"new_path\":\"app/assets/javascripts/boards/stores/mutations.js\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -64,20 +64,6 @@ export default {\\n     );\\n   },\\n \\n-  [mutationTypes.RECEIVE_ITERATIONS_REQUEST](state) {\\n-    state.iterationsLoading = true;\\n-  },\\n-\\n-  [mutationTypes.RECEIVE_ITERATIONS_SUCCESS](state, iterations) {\\n-    state.iterations = iterations;\\n-    state.iterationsLoading = false;\\n-  },\\n-\\n-  [mutationTypes.RECEIVE_ITERATIONS_FAILURE](state) {\\n-    state.iterationsLoading = false;\\n-    state.error = __('Failed to load iterations.');\\n-  },\\n-\\n   [mutationTypes.SET_ACTIVE_ID](state, { id, sidebarType }) {\\n     state.activeId = id;\\n     state.sidebarType = sidebarType;\\n\"},{\"old_path\":\"app/assets/javascripts/diffs/components/app.vue\",\"new_path\":\"app/assets/javascripts/diffs/components/app.vue\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -44,6 +44,7 @@ import {\\n   TRACKING_MULTIPLE_FILES_MODE,\\n } from '../constants';\\n \\n+import { discussionIntersectionObserverHandlerFactory } from '../utils/discussions';\\n import diffsEventHub from '../event_hub';\\n import { reviewStatuses } from '../utils/file_reviews';\\n import { diffsApp } from '../utils/performance';\\n@@ -86,6 +87,9 @@ export default {\\n     ALERT_MERGE_CONFLICT,\\n     ALERT_COLLAPSED_FILES,\\n   },\\n+  provide: {\\n+    discussionObserverHandler: discussionIntersectionObserverHandlerFactory(),\\n+  },\\n   props: {\\n     endpoint: {\\n       type: String,\\n\"},{\"old_path\":\"app/assets/javascripts/diffs/utils/discussions.js\",\"new_path\":\"app/assets/javascripts/diffs/utils/discussions.js\",\"a_mode\":\"0\",\"b_mode\":\"100644\",\"new_file\":true,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -0,0 +1,76 @@\\n+function normalize(processable) {\\n+  const { entry } = processable;\\n+  const offset = entry.rootBounds.bottom - entry.boundingClientRect.top;\\n+  const direction =\\n+    offset \\u003c 0 ? 'Up' : 'Down'; /* eslint-disable-line @gitlab/require-i18n-strings */\\n+\\n+  return {\\n+    ...processable,\\n+    entry: {\\n+      time: entry.time,\\n+      type: entry.isIntersecting ? 'intersection' : `scroll${direction}`,\\n+    },\\n+  };\\n+}\\n+\\n+function sort({ entry: alpha }, { entry: beta }) {\\n+  const diff = alpha.time - beta.time;\\n+  let order = 0;\\n+\\n+  if (diff \\u003c 0) {\\n+    order = -1;\\n+  } else if (diff \\u003e 0) {\\n+    order = 1;\\n+  } else if (alpha.type === 'intersection' \\u0026\\u0026 beta.type === 'scrollUp') {\\n+    order = 2;\\n+  } else if (alpha.type === 'scrollUp' \\u0026\\u0026 beta.type === 'intersection') {\\n+    order = -2;\\n+  }\\n+\\n+  return order;\\n+}\\n+\\n+function filter(entry) {\\n+  return entry.type !== 'scrollDown';\\n+}\\n+\\n+export function discussionIntersectionObserverHandlerFactory() {\\n+  let unprocessed = [];\\n+  let timer = null;\\n+\\n+  return (processable) =\\u003e {\\n+    unprocessed.push(processable);\\n+\\n+    if (timer) {\\n+      clearTimeout(timer);\\n+    }\\n+\\n+    timer = setTimeout(() =\\u003e {\\n+      unprocessed\\n+        .map(normalize)\\n+        .filter(filter)\\n+        .sort(sort)\\n+        .forEach((discussionObservationContainer) =\\u003e {\\n+          const {\\n+            entry: { type },\\n+            currentDiscussion,\\n+            isFirstUnresolved,\\n+            isDiffsPage,\\n+            functions: { setCurrentDiscussionId, getPreviousUnresolvedDiscussionId },\\n+          } = discussionObservationContainer;\\n+\\n+          if (type === 'intersection') {\\n+            setCurrentDiscussionId(currentDiscussion.id);\\n+          } else if (type === 'scrollUp') {\\n+            setCurrentDiscussionId(\\n+              isFirstUnresolved\\n+                ? null\\n+                : getPreviousUnresolvedDiscussionId(currentDiscussion.id, isDiffsPage),\\n+            );\\n+          }\\n+        });\\n+\\n+      unprocessed = [];\\n+    }, 0);\\n+  };\\n+}\\n\"},{\"old_path\":\"app/assets/javascripts/notes/components/discussion_notes.vue\",\"new_path\":\"app/assets/javascripts/notes/components/discussion_notes.vue\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -1,5 +1,6 @@\\n \\u003cscript\\u003e\\n import { mapGetters, mapActions } from 'vuex';\\n+import { GlIntersectionObserver } from '@gitlab/ui';\\n import { __ } from '~/locale';\\n import PlaceholderNote from '~/vue_shared/components/notes/placeholder_note.vue';\\n import PlaceholderSystemNote from '~/vue_shared/components/notes/placeholder_system_note.vue';\\n@@ -16,7 +17,9 @@ export default {\\n     ToggleRepliesWidget,\\n     NoteEditedText,\\n     DiscussionNotesRepliesWrapper,\\n+    GlIntersectionObserver,\\n   },\\n+  inject: ['discussionObserverHandler'],\\n   props: {\\n     discussion: {\\n       type: Object,\\n@@ -54,7 +57,11 @@ export default {\\n     },\\n   },\\n   computed: {\\n-    ...mapGetters(['userCanReply']),\\n+    ...mapGetters([\\n+      'userCanReply',\\n+      'previousUnresolvedDiscussionId',\\n+      'firstUnresolvedDiscussionId',\\n+    ]),\\n     hasReplies() {\\n       return Boolean(this.replies.length);\\n     },\\n@@ -77,9 +84,20 @@ export default {\\n         url: this.discussion.discussion_path,\\n       };\\n     },\\n+    isFirstUnresolved() {\\n+      return this.firstUnresolvedDiscussionId === this.discussion.id;\\n+    },\\n+  },\\n+  observerOptions: {\\n+    threshold: 0,\\n+    rootMargin: '0px 0px -50% 0px',\\n   },\\n   methods: {\\n-    ...mapActions(['toggleDiscussion', 'setSelectedCommentPositionHover']),\\n+    ...mapActions([\\n+      'toggleDiscussion',\\n+      'setSelectedCommentPositionHover',\\n+      'setCurrentDiscussionId',\\n+    ]),\\n     componentName(note) {\\n       if (note.isPlaceholderNote) {\\n         if (note.placeholderType === SYSTEM_NOTE) {\\n@@ -110,6 +128,18 @@ export default {\\n         this.setSelectedCommentPositionHover();\\n       }\\n     },\\n+    observerTriggered(entry) {\\n+      this.discussionObserverHandler({\\n+        entry,\\n+        isFirstUnresolved: this.isFirstUnresolved,\\n+        currentDiscussion: { ...this.discussion },\\n+        isDiffsPage: !this.isOverviewTab,\\n+        functions: {\\n+          setCurrentDiscussionId: this.setCurrentDiscussionId,\\n+          getPreviousUnresolvedDiscussionId: this.previousUnresolvedDiscussionId,\\n+        },\\n+      });\\n+    },\\n   },\\n };\\n \\u003c/script\\u003e\\n@@ -122,33 +152,35 @@ export default {\\n       @mouseleave=\\\"handleMouseLeave(discussion)\\\"\\n     \\u003e\\n       \\u003ctemplate v-if=\\\"shouldGroupReplies\\\"\\u003e\\n-        \\u003ccomponent\\n-          :is=\\\"componentName(firstNote)\\\"\\n-          :note=\\\"componentData(firstNote)\\\"\\n-          :line=\\\"line || diffLine\\\"\\n-          :discussion-file=\\\"discussion.diff_file\\\"\\n-          :commit=\\\"commit\\\"\\n-          :help-page-path=\\\"helpPagePath\\\"\\n-          :show-reply-button=\\\"userCanReply\\\"\\n-          :discussion-root=\\\"true\\\"\\n-          :discussion-resolve-path=\\\"discussion.resolve_path\\\"\\n-          :is-overview-tab=\\\"isOverviewTab\\\"\\n-          @handleDeleteNote=\\\"$emit('deleteNote')\\\"\\n-          @startReplying=\\\"$emit('startReplying')\\\"\\n-        \\u003e\\n-          \\u003ctemplate #discussion-resolved-text\\u003e\\n-            \\u003cnote-edited-text\\n-              v-if=\\\"discussion.resolved\\\"\\n-              :edited-at=\\\"discussion.resolved_at\\\"\\n-              :edited-by=\\\"discussion.resolved_by\\\"\\n-              :action-text=\\\"resolvedText\\\"\\n-              class-name=\\\"discussion-headline-light js-discussion-headline discussion-resolved-text\\\"\\n-            /\\u003e\\n-          \\u003c/template\\u003e\\n-          \\u003ctemplate #avatar-badge\\u003e\\n-            \\u003cslot name=\\\"avatar-badge\\\"\\u003e\\u003c/slot\\u003e\\n-          \\u003c/template\\u003e\\n-        \\u003c/component\\u003e\\n+        \\u003cgl-intersection-observer :options=\\\"$options.observerOptions\\\" @update=\\\"observerTriggered\\\"\\u003e\\n+          \\u003ccomponent\\n+            :is=\\\"componentName(firstNote)\\\"\\n+            :note=\\\"componentData(firstNote)\\\"\\n+            :line=\\\"line || diffLine\\\"\\n+            :discussion-file=\\\"discussion.diff_file\\\"\\n+            :commit=\\\"commit\\\"\\n+            :help-page-path=\\\"helpPagePath\\\"\\n+            :show-reply-button=\\\"userCanReply\\\"\\n+            :discussion-root=\\\"true\\\"\\n+            :discussion-resolve-path=\\\"discussion.resolve_path\\\"\\n+            :is-overview-tab=\\\"isOverviewTab\\\"\\n+            @handleDeleteNote=\\\"$emit('deleteNote')\\\"\\n+            @startReplying=\\\"$emit('startReplying')\\\"\\n+          \\u003e\\n+            \\u003ctemplate #discussion-resolved-text\\u003e\\n+              \\u003cnote-edited-text\\n+                v-if=\\\"discussion.resolved\\\"\\n+                :edited-at=\\\"discussion.resolved_at\\\"\\n+                :edited-by=\\\"discussion.resolved_by\\\"\\n+                :action-text=\\\"resolvedText\\\"\\n+                class-name=\\\"discussion-headline-light js-discussion-headline discussion-resolved-text\\\"\\n+              /\\u003e\\n+            \\u003c/template\\u003e\\n+            \\u003ctemplate #avatar-badge\\u003e\\n+              \\u003cslot name=\\\"avatar-badge\\\"\\u003e\\u003c/slot\\u003e\\n+            \\u003c/template\\u003e\\n+          \\u003c/component\\u003e\\n+        \\u003c/gl-intersection-observer\\u003e\\n         \\u003cdiscussion-notes-replies-wrapper :is-diff-discussion=\\\"discussion.diff_discussion\\\"\\u003e\\n           \\u003ctoggle-replies-widget\\n             v-if=\\\"hasReplies\\\"\\n\"},{\"old_path\":\"app/assets/javascripts/notes/components/notes_app.vue\",\"new_path\":\"app/assets/javascripts/notes/components/notes_app.vue\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -8,6 +8,7 @@ import TimelineEntryItem from '~/vue_shared/components/notes/timeline_entry_item\\n import OrderedLayout from '~/vue_shared/components/ordered_layout.vue';\\n import glFeatureFlagsMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';\\n import draftNote from '../../batch_comments/components/draft_note.vue';\\n+import { discussionIntersectionObserverHandlerFactory } from '../../diffs/utils/discussions';\\n import { getLocationHash, doesHashExistInUrl } from '../../lib/utils/url_utility';\\n import placeholderNote from '../../vue_shared/components/notes/placeholder_note.vue';\\n import placeholderSystemNote from '../../vue_shared/components/notes/placeholder_system_note.vue';\\n@@ -38,6 +39,9 @@ export default {\\n     TimelineEntryItem,\\n   },\\n   mixins: [glFeatureFlagsMixin()],\\n+  provide: {\\n+    discussionObserverHandler: discussionIntersectionObserverHandlerFactory(),\\n+  },\\n   props: {\\n     noteableData: {\\n       type: Object,\\n\"},{\"old_path\":\"app/assets/javascripts/pages/admin/dev_ops_report/index.js\",\"new_path\":\"app/assets/javascripts/pages/admin/dev_ops_report/index.js\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -1,5 +1,5 @@\\n-import initDevOpsScore from '~/analytics/devops_report/devops_score';\\n-import initDevOpsScoreDisabledServicePing from '~/analytics/devops_report/devops_score_disabled_service_ping';\\n+import initDevOpsScore from '~/analytics/devops_reports/devops_score';\\n+import initDevOpsScoreDisabledServicePing from '~/analytics/devops_reports/devops_score_disabled_service_ping';\\n \\n initDevOpsScoreDisabledServicePing();\\n initDevOpsScore();\\n\"},{\"old_path\":\"app/assets/javascripts/runner/components/cells/runner_actions_cell.vue\",\"new_path\":\"app/assets/javascripts/runner/components/cells/runner_actions_cell.vue\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -3,7 +3,7 @@ import { GlButton, GlButtonGroup, GlTooltipDirective } from '@gitlab/ui';\\n import createFlash from '~/flash';\\n import { __, s__ } from '~/locale';\\n import runnerDeleteMutation from '~/runner/graphql/runner_delete.mutation.graphql';\\n-import runnerUpdateMutation from '~/runner/graphql/runner_update.mutation.graphql';\\n+import runnerActionsUpdateMutation from '~/runner/graphql/runner_actions_update.mutation.graphql';\\n import { captureException } from '~/runner/sentry_utils';\\n \\n const i18n = {\\n@@ -71,7 +71,7 @@ export default {\\n             runnerUpdate: { errors },\\n           },\\n         } = await this.$apollo.mutate({\\n-          mutation: runnerUpdateMutation,\\n+          mutation: runnerActionsUpdateMutation,\\n           variables: {\\n             input: {\\n               id: this.runner.id,\\n\"},{\"old_path\":\"app/assets/javascripts/runner/components/cells/runner_type_cell.vue\",\"new_path\":\"app/assets/javascripts/runner/components/cells/runner_status_cell.vue\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":true,\"deleted_file\":false,\"diff\":\"@@ -1,15 +1,15 @@\\n \\u003cscript\\u003e\\n import { GlTooltipDirective } from '@gitlab/ui';\\n-import RunnerTypeBadge from '../runner_type_badge.vue';\\n-import RunnerStateLockedBadge from '../runner_state_locked_badge.vue';\\n-import RunnerStatePausedBadge from '../runner_state_paused_badge.vue';\\n+\\n+import RunnerContactedStateBadge from '../runner_contacted_state_badge.vue';\\n+import RunnerPausedBadge from '../runner_paused_badge.vue';\\n+\\n import { I18N_LOCKED_RUNNER_DESCRIPTION, I18N_PAUSED_RUNNER_DESCRIPTION } from '../../constants';\\n \\n export default {\\n   components: {\\n-    RunnerTypeBadge,\\n-    RunnerStateLockedBadge,\\n-    RunnerStatePausedBadge,\\n+    RunnerContactedStateBadge,\\n+    RunnerPausedBadge,\\n   },\\n   directives: {\\n     GlTooltip: GlTooltipDirective,\\n@@ -21,12 +21,6 @@ export default {\\n     },\\n   },\\n   computed: {\\n-    runnerType() {\\n-      return this.runner.runnerType;\\n-    },\\n-    locked() {\\n-      return this.runner.locked;\\n-    },\\n     paused() {\\n       return !this.runner.active;\\n     },\\n@@ -40,8 +34,7 @@ export default {\\n \\n \\u003ctemplate\\u003e\\n   \\u003cdiv\\u003e\\n-    \\u003crunner-type-badge :type=\\\"runnerType\\\" size=\\\"sm\\\" /\\u003e\\n-    \\u003crunner-state-locked-badge v-if=\\\"locked\\\" size=\\\"sm\\\" /\\u003e\\n-    \\u003crunner-state-paused-badge v-if=\\\"paused\\\" size=\\\"sm\\\" /\\u003e\\n+    \\u003crunner-contacted-state-badge :runner=\\\"runner\\\" size=\\\"sm\\\" /\\u003e\\n+    \\u003crunner-paused-badge v-if=\\\"paused\\\" size=\\\"sm\\\" /\\u003e\\n   \\u003c/div\\u003e\\n \\u003c/template\\u003e\\n\"},{\"old_path\":\"app/assets/javascripts/runner/components/cells/runner_summary_cell.vue\",\"new_path\":\"app/assets/javascripts/runner/components/cells/runner_summary_cell.vue\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -1,11 +1,21 @@\\n \\u003cscript\\u003e\\n+import { GlIcon, GlTooltipDirective } from '@gitlab/ui';\\n+\\n import TooltipOnTruncate from '~/vue_shared/components/tooltip_on_truncate.vue';\\n import RunnerName from '../runner_name.vue';\\n+import RunnerTypeBadge from '../runner_type_badge.vue';\\n+\\n+import { I18N_LOCKED_RUNNER_DESCRIPTION } from '../../constants';\\n \\n export default {\\n   components: {\\n+    GlIcon,\\n     TooltipOnTruncate,\\n     RunnerName,\\n+    RunnerTypeBadge,\\n+  },\\n+  directives: {\\n+    GlTooltip: GlTooltipDirective,\\n   },\\n   props: {\\n     runner: {\\n@@ -14,10 +24,19 @@ export default {\\n     },\\n   },\\n   computed: {\\n+    runnerType() {\\n+      return this.runner.runnerType;\\n+    },\\n+    locked() {\\n+      return this.runner.locked;\\n+    },\\n     description() {\\n       return this.runner.description;\\n     },\\n   },\\n+  i18n: {\\n+    I18N_LOCKED_RUNNER_DESCRIPTION,\\n+  },\\n };\\n \\u003c/script\\u003e\\n \\n@@ -26,6 +45,14 @@ export default {\\n     \\u003cslot :runner=\\\"runner\\\" name=\\\"runner-name\\\"\\u003e\\n       \\u003crunner-name :runner=\\\"runner\\\" /\\u003e\\n     \\u003c/slot\\u003e\\n+\\n+    \\u003crunner-type-badge :type=\\\"runnerType\\\" size=\\\"sm\\\" /\\u003e\\n+    \\u003cgl-icon\\n+      v-if=\\\"locked\\\"\\n+      v-gl-tooltip\\n+      :title=\\\"$options.i18n.I18N_LOCKED_RUNNER_DESCRIPTION\\\"\\n+      name=\\\"lock\\\"\\n+    /\\u003e\\n     \\u003ctooltip-on-truncate class=\\\"gl-display-block\\\" :title=\\\"description\\\" truncate-target=\\\"child\\\"\\u003e\\n       \\u003cdiv class=\\\"gl-text-truncate\\\"\\u003e\\n         {{ description }}\\n\"},{\"old_path\":\"app/assets/javascripts/runner/components/runner_contacted_state_badge.vue\",\"new_path\":\"app/assets/javascripts/runner/components/runner_contacted_state_badge.vue\",\"a_mode\":\"0\",\"b_mode\":\"100644\",\"new_file\":true,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -0,0 +1,69 @@\\n+\\u003cscript\\u003e\\n+import { GlBadge, GlTooltipDirective } from '@gitlab/ui';\\n+import { s__, sprintf } from '~/locale';\\n+import { getTimeago } from '~/lib/utils/datetime_utility';\\n+import {\\n+  I18N_ONLINE_RUNNER_DESCRIPTION,\\n+  I18N_OFFLINE_RUNNER_DESCRIPTION,\\n+  I18N_NOT_CONNECTED_RUNNER_DESCRIPTION,\\n+  STATUS_ONLINE,\\n+  STATUS_OFFLINE,\\n+  STATUS_NOT_CONNECTED,\\n+} from '../constants';\\n+\\n+export default {\\n+  components: {\\n+    GlBadge,\\n+  },\\n+  directives: {\\n+    GlTooltip: GlTooltipDirective,\\n+  },\\n+  props: {\\n+    runner: {\\n+      required: true,\\n+      type: Object,\\n+    },\\n+  },\\n+  computed: {\\n+    contactedAtTimeAgo() {\\n+      if (this.runner.contactedAt) {\\n+        return getTimeago().format(this.runner.contactedAt);\\n+      }\\n+      return null;\\n+    },\\n+    badge() {\\n+      switch (this.runner.status) {\\n+        case STATUS_ONLINE:\\n+          return {\\n+            variant: 'success',\\n+            label: s__('Runners|online'),\\n+            tooltip: sprintf(I18N_ONLINE_RUNNER_DESCRIPTION, {\\n+              timeAgo: this.contactedAtTimeAgo,\\n+            }),\\n+          };\\n+        case STATUS_OFFLINE:\\n+          return {\\n+            variant: 'muted',\\n+            label: s__('Runners|offline'),\\n+            tooltip: sprintf(I18N_OFFLINE_RUNNER_DESCRIPTION, {\\n+              timeAgo: this.contactedAtTimeAgo,\\n+            }),\\n+          };\\n+        case STATUS_NOT_CONNECTED:\\n+          return {\\n+            variant: 'muted',\\n+            label: s__('Runners|not connected'),\\n+            tooltip: I18N_NOT_CONNECTED_RUNNER_DESCRIPTION,\\n+          };\\n+        default:\\n+          return null;\\n+      }\\n+    },\\n+  },\\n+};\\n+\\u003c/script\\u003e\\n+\\u003ctemplate\\u003e\\n+  \\u003cgl-badge v-if=\\\"badge\\\" v-gl-tooltip=\\\"badge.tooltip\\\" :variant=\\\"badge.variant\\\" v-bind=\\\"$attrs\\\"\\u003e\\n+    {{ badge.label }}\\n+  \\u003c/gl-badge\\u003e\\n+\\u003c/template\\u003e\\n\"},{\"old_path\":\"app/assets/javascripts/runner/components/runner_list.vue\",\"new_path\":\"app/assets/javascripts/runner/components/runner_list.vue\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -5,7 +5,7 @@ import { __, s__ } from '~/locale';\\n import TimeAgo from '~/vue_shared/components/time_ago_tooltip.vue';\\n import RunnerActionsCell from './cells/runner_actions_cell.vue';\\n import RunnerSummaryCell from './cells/runner_summary_cell.vue';\\n-import RunnerTypeCell from './cells/runner_type_cell.vue';\\n+import RunnerStatusCell from './cells/runner_status_cell.vue';\\n import RunnerTags from './runner_tags.vue';\\n \\n const tableField = ({ key, label = '', width = 10 }) =\\u003e {\\n@@ -36,7 +36,7 @@ export default {\\n     RunnerActionsCell,\\n     RunnerSummaryCell,\\n     RunnerTags,\\n-    RunnerTypeCell,\\n+    RunnerStatusCell,\\n   },\\n   directives: {\\n     GlTooltip: GlTooltipDirective,\\n@@ -63,8 +63,8 @@ export default {\\n     },\\n   },\\n   fields: [\\n-    tableField({ key: 'type', label: __('Type/State') }),\\n-    tableField({ key: 'summary', label: s__('Runners|Runner'), width: 30 }),\\n+    tableField({ key: 'status', label: s__('Runners|Status') }),\\n+    tableField({ key: 'summary', label: s__('Runners|Runner ID'), width: 30 }),\\n     tableField({ key: 'version', label: __('Version') }),\\n     tableField({ key: 'ipAddress', label: __('IP Address') }),\\n     tableField({ key: 'tagList', label: __('Tags'), width: 20 }),\\n@@ -88,8 +88,8 @@ export default {\\n         \\u003cgl-skeleton-loader v-for=\\\"i in 4\\\" :key=\\\"i\\\" /\\u003e\\n       \\u003c/template\\u003e\\n \\n-      \\u003ctemplate #cell(type)=\\\"{ item }\\\"\\u003e\\n-        \\u003crunner-type-cell :runner=\\\"item\\\" /\\u003e\\n+      \\u003ctemplate #cell(status)=\\\"{ item }\\\"\\u003e\\n+        \\u003crunner-status-cell :runner=\\\"item\\\" /\\u003e\\n       \\u003c/template\\u003e\\n \\n       \\u003ctemplate #cell(summary)=\\\"{ item, index }\\\"\\u003e\\n\"},{\"old_path\":\"app/assets/javascripts/runner/components/runner_state_paused_badge.vue\",\"new_path\":\"app/assets/javascripts/runner/components/runner_paused_badge.vue\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":true,\"deleted_file\":false,\"diff\":\"\"},{\"old_path\":\"app/assets/javascripts/runner/components/runner_state_locked_badge.vue\",\"new_path\":\"app/assets/javascripts/runner/components/runner_state_locked_badge.vue\",\"a_mode\":\"100644\",\"b_mode\":\"0\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":true,\"diff\":\"@@ -1,25 +0,0 @@\\n-\\u003cscript\\u003e\\n-import { GlBadge, GlTooltipDirective } from '@gitlab/ui';\\n-import { I18N_LOCKED_RUNNER_DESCRIPTION } from '../constants';\\n-\\n-export default {\\n-  components: {\\n-    GlBadge,\\n-  },\\n-  directives: {\\n-    GlTooltip: GlTooltipDirective,\\n-  },\\n-  i18n: {\\n-    I18N_LOCKED_RUNNER_DESCRIPTION,\\n-  },\\n-};\\n-\\u003c/script\\u003e\\n-\\u003ctemplate\\u003e\\n-  \\u003cgl-badge\\n-    v-gl-tooltip=\\\"$options.i18n.I18N_LOCKED_RUNNER_DESCRIPTION\\\"\\n-    variant=\\\"warning\\\"\\n-    v-bind=\\\"$attrs\\\"\\n-  \\u003e\\n-    {{ s__('Runners|locked') }}\\n-  \\u003c/gl-badge\\u003e\\n-\\u003c/template\\u003e\\n\"},{\"old_path\":\"app/assets/javascripts/runner/components/runner_type_alert.vue\",\"new_path\":\"app/assets/javascripts/runner/components/runner_type_alert.vue\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -9,17 +9,14 @@ const ALERT_DATA = {\\n     message: s__(\\n       'Runners|This runner is available to all groups and projects in your GitLab instance.',\\n     ),\\n-    variant: 'success',\\n     anchor: 'shared-runners',\\n   },\\n   [GROUP_TYPE]: {\\n     message: s__('Runners|This runner is available to all projects and subgroups in a group.'),\\n-    variant: 'success',\\n     anchor: 'group-runners',\\n   },\\n   [PROJECT_TYPE]: {\\n     message: s__('Runners|This runner is associated with one or more projects.'),\\n-    variant: 'info',\\n     anchor: 'specific-runners',\\n   },\\n };\\n@@ -50,7 +47,7 @@ export default {\\n };\\n \\u003c/script\\u003e\\n \\u003ctemplate\\u003e\\n-  \\u003cgl-alert v-if=\\\"alert\\\" :variant=\\\"alert.variant\\\" :dismissible=\\\"false\\\"\\u003e\\n+  \\u003cgl-alert v-if=\\\"alert\\\" variant=\\\"info\\\" :dismissible=\\\"false\\\"\\u003e\\n     {{ alert.message }}\\n     \\u003cgl-link :href=\\\"helpHref\\\"\\u003e{{ __('Learn more.') }}\\u003c/gl-link\\u003e\\n   \\u003c/gl-alert\\u003e\\n\"},{\"old_path\":\"app/assets/javascripts/runner/components/runner_type_badge.vue\",\"new_path\":\"app/assets/javascripts/runner/components/runner_type_badge.vue\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -12,17 +12,14 @@ import {\\n \\n const BADGE_DATA = {\\n   [INSTANCE_TYPE]: {\\n-    variant: 'success',\\n     text: s__('Runners|shared'),\\n     tooltip: I18N_INSTANCE_RUNNER_DESCRIPTION,\\n   },\\n   [GROUP_TYPE]: {\\n-    variant: 'success',\\n     text: s__('Runners|group'),\\n     tooltip: I18N_GROUP_RUNNER_DESCRIPTION,\\n   },\\n   [PROJECT_TYPE]: {\\n-    variant: 'info',\\n     text: s__('Runners|specific'),\\n     tooltip: I18N_PROJECT_RUNNER_DESCRIPTION,\\n   },\\n@@ -53,7 +50,7 @@ export default {\\n };\\n \\u003c/script\\u003e\\n \\u003ctemplate\\u003e\\n-  \\u003cgl-badge v-if=\\\"badge\\\" v-gl-tooltip=\\\"badge.tooltip\\\" :variant=\\\"badge.variant\\\" v-bind=\\\"$attrs\\\"\\u003e\\n+  \\u003cgl-badge v-if=\\\"badge\\\" v-gl-tooltip=\\\"badge.tooltip\\\" variant=\\\"info\\\" v-bind=\\\"$attrs\\\"\\u003e\\n     {{ badge.text }}\\n   \\u003c/gl-badge\\u003e\\n \\u003c/template\\u003e\\n\"},{\"old_path\":\"app/assets/javascripts/runner/constants.js\",\"new_path\":\"app/assets/javascripts/runner/constants.js\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -6,11 +6,24 @@ export const GROUP_RUNNER_COUNT_LIMIT = 1000;\\n export const I18N_FETCH_ERROR = s__('Runners|Something went wrong while fetching runner data.');\\n export const I18N_DETAILS_TITLE = s__('Runners|Runner #%{runner_id}');\\n \\n+// Type\\n export const I18N_INSTANCE_RUNNER_DESCRIPTION = s__('Runners|Available to all projects');\\n export const I18N_GROUP_RUNNER_DESCRIPTION = s__(\\n   'Runners|Available to all projects and subgroups in the group',\\n );\\n export const I18N_PROJECT_RUNNER_DESCRIPTION = s__('Runners|Associated with one or more projects');\\n+\\n+// Status\\n+export const I18N_ONLINE_RUNNER_DESCRIPTION = s__(\\n+  'Runners|Runner is online; last contact was %{timeAgo}',\\n+);\\n+export const I18N_OFFLINE_RUNNER_DESCRIPTION = s__(\\n+  'Runners|No recent contact from this runner; last contact was %{timeAgo}',\\n+);\\n+export const I18N_NOT_CONNECTED_RUNNER_DESCRIPTION = s__(\\n+  'Runners|This runner has never connected to this instance',\\n+);\\n+\\n export const I18N_LOCKED_RUNNER_DESCRIPTION = s__('Runners|You cannot assign to other projects');\\n export const I18N_PAUSED_RUNNER_DESCRIPTION = s__('Runners|Not available to run jobs');\\n \\n\"},{\"old_path\":\"app/assets/javascripts/runner/graphql/runner_actions_update.mutation.graphql\",\"new_path\":\"app/assets/javascripts/runner/graphql/runner_actions_update.mutation.graphql\",\"a_mode\":\"0\",\"b_mode\":\"100644\",\"new_file\":true,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -0,0 +1,14 @@\\n+#import \\\"~/runner/graphql/runner_node.fragment.graphql\\\"\\n+\\n+# Mutation for updates within the runners list via action\\n+# buttons (play, pause, ...), loads attributes shown in the\\n+# runner list.\\n+\\n+mutation runnerActionsUpdate($input: RunnerUpdateInput!) {\\n+  runnerUpdate(input: $input) {\\n+    runner {\\n+      ...RunnerNode\\n+    }\\n+    errors\\n+  }\\n+}\\n\"},{\"old_path\":\"app/assets/javascripts/runner/graphql/runner_node.fragment.graphql\",\"new_path\":\"app/assets/javascripts/runner/graphql/runner_node.fragment.graphql\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -10,4 +10,5 @@ fragment RunnerNode on CiRunner {\\n   locked\\n   tagList\\n   contactedAt\\n+  status\\n }\\n\"},{\"old_path\":\"app/assets/javascripts/runner/graphql/runner_update.mutation.graphql\",\"new_path\":\"app/assets/javascripts/runner/graphql/runner_update.mutation.graphql\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -1,5 +1,8 @@\\n #import \\\"ee_else_ce/runner/graphql/runner_details.fragment.graphql\\\"\\n \\n+# Mutation for updates from the runner form, loads\\n+# attributes shown in the runner details.\\n+\\n mutation runnerUpdate($input: RunnerUpdateInput!) {\\n   runnerUpdate(input: $input) {\\n     runner {\\n\"},{\"old_path\":\"app/assets/javascripts/vue_merge_request_widget/components/added_commit_message.vue\",\"new_path\":\"app/assets/javascripts/vue_merge_request_widget/components/added_commit_message.vue\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -40,9 +40,9 @@ export default {\\n     },\\n     message() {\\n       return this.isFastForwardEnabled\\n-        ? s__('mrWidgetCommitsAdded|%{commitCount} will be added to %{targetBranch}.')\\n+        ? s__('mrWidgetCommitsAdded|Adds %{commitCount} to %{targetBranch}.')\\n         : s__(\\n-            'mrWidgetCommitsAdded|%{commitCount} and %{mergeCommitCount} will be added to %{targetBranch}%{squashedCommits}.',\\n+            'mrWidgetCommitsAdded|Adds %{commitCount} and %{mergeCommitCount} to %{targetBranch}%{squashedCommits}.',\\n           );\\n     },\\n     textDecorativeComponent() {\\n@@ -69,7 +69,7 @@ export default {\\n       \\u003c/template\\u003e\\n       \\u003ctemplate #squashedCommits\\u003e\\n         \\u003ctemplate v-if=\\\"glFeatures.restructuredMrWidget \\u0026\\u0026 isSquashEnabled\\\"\\u003e\\n-          {{ __('(commits will be squashed)') }}\\u003c/template\\n+          {{ n__('(squashes %d commit)', '(squashes %d commits)', commitsCount) }}\\u003c/template\\n         \\u003e\\u003c/template\\n       \\u003e\\n     \\u003c/gl-sprintf\\u003e\\n\"},{\"old_path\":\"app/assets/javascripts/vue_merge_request_widget/components/source_branch_removal_status.vue\",\"new_path\":\"app/assets/javascripts/vue_merge_request_widget/components/source_branch_removal_status.vue\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -4,7 +4,7 @@ import { __ } from '../../locale';\\n \\n export default {\\n   i18n: {\\n-    removesBranchText: __('The source branch will be deleted'),\\n+    removesBranchText: __('Deletes the source branch'),\\n     tooltipTitle: __('A user with write access to the source branch selected this option'),\\n   },\\n   components: {\\n\"},{\"old_path\":\"app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_auto_merge_enabled.vue\",\"new_path\":\"app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_auto_merge_enabled.vue\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -177,10 +177,10 @@ export default {\\n         \\u003c/h4\\u003e\\n         \\u003csection class=\\\"mr-info-list\\\"\\u003e\\n           \\u003cp v-if=\\\"shouldRemoveSourceBranch\\\"\\u003e\\n-            {{ s__('mrWidget|The source branch will be deleted') }}\\n+            {{ s__('mrWidget|Deletes the source branch') }}\\n           \\u003c/p\\u003e\\n           \\u003cp v-else class=\\\"gl-display-flex\\\"\\u003e\\n-            \\u003cspan class=\\\"gl-mr-3\\\"\\u003e{{ s__('mrWidget|The source branch will not be deleted') }}\\u003c/span\\u003e\\n+            \\u003cspan class=\\\"gl-mr-3\\\"\\u003e{{ s__('mrWidget|Does not delete the source branch') }}\\u003c/span\\u003e\\n             \\u003cgl-button\\n               v-if=\\\"canRemoveSourceBranch\\\"\\n               :loading=\\\"isRemovingSourceBranch\\\"\\n\"},{\"old_path\":\"app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_merging.vue\",\"new_path\":\"app/assets/javascripts/vue_merge_request_widget/components/states/mr_widget_merging.vue\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -32,7 +32,7 @@ export default {\\n       \\u003c/h4\\u003e\\n       \\u003csection class=\\\"mr-info-list\\\"\\u003e\\n         \\u003cp\\u003e\\n-          {{ s__('mrWidget|The changes will be merged into') }}\\n+          {{ s__('mrWidget|Merges changes into') }}\\n           \\u003cspan class=\\\"label-branch\\\"\\u003e\\n             \\u003ca :href=\\\"mr.targetBranchPath\\\"\\u003e{{ mr.targetBranch }}\\u003c/a\\u003e\\n           \\u003c/span\\u003e\\n\"},{\"old_path\":\"app/assets/javascripts/vue_merge_request_widget/components/states/ready_to_merge.vue\",\"new_path\":\"app/assets/javascripts/vue_merge_request_widget/components/states/ready_to_merge.vue\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -710,10 +710,10 @@ export default {\\n                   \\u003c/li\\u003e\\n                   \\u003cli class=\\\"gl-line-height-normal\\\"\\u003e\\n                     \\u003ctemplate v-if=\\\"removeSourceBranch\\\"\\u003e\\n-                      {{ __('Source branch will be deleted.') }}\\n+                      {{ __('Deletes the source branch.') }}\\n                     \\u003c/template\\u003e\\n                     \\u003ctemplate v-else\\u003e\\n-                      {{ __('Source branch will not be deleted.') }}\\n+                      {{ __('Does not delete the source branch.') }}\\n                     \\u003c/template\\u003e\\n                   \\u003c/li\\u003e\\n                   \\u003cli v-if=\\\"mr.relatedLinks\\\" class=\\\"gl-line-height-normal\\\"\\u003e\\n\"},{\"old_path\":\"app/assets/stylesheets/framework/files.scss\",\"new_path\":\"app/assets/stylesheets/framework/files.scss\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -227,7 +227,7 @@\\n       // IMPORTANT PERFORMANCE OPTIMIZATION\\n       //\\n       // When viewinng a blame with many commits a lot of content is rendered on the page.\\n-      // Two selectors below ensure that we only render what is visible to the user, thus reducing TBT in the browser.\\n+      // content-visibility rules below ensure that we only render what is visible to the user, thus reducing TBT in the browser.\\n       .commit {\\n         content-visibility: auto;\\n         contain-intrinsic-size: 1px 3em;\\n@@ -237,6 +237,10 @@\\n         content-visibility: auto;\\n         contain-intrinsic-size: 1px 1.1875rem;\\n       }\\n+\\n+      .line-numbers {\\n+        content-visibility: auto;\\n+      }\\n     }\\n \\n     \\u0026.logs {\\n\"},{\"old_path\":\"app/graphql/mutations/issues/set_crm_contacts.rb\",\"new_path\":\"app/graphql/mutations/issues/set_crm_contacts.rb\",\"a_mode\":\"0\",\"b_mode\":\"100644\",\"new_file\":true,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -0,0 +1,48 @@\\n+# frozen_string_literal: true\\n+\\n+module Mutations\\n+  module Issues\\n+    class SetCrmContacts \\u003c Base\\n+      graphql_name 'IssueSetCrmContacts'\\n+\\n+      argument :crm_contact_ids,\\n+               [::Types::GlobalIDType[::CustomerRelations::Contact]],\\n+               required: true,\\n+               description: 'Customer relations contact IDs to set. Replaces existing contacts by default.'\\n+\\n+      argument :operation_mode,\\n+               Types::MutationOperationModeEnum,\\n+               required: false,\\n+               description: 'Changes the operation mode. Defaults to REPLACE.'\\n+\\n+      def resolve(project_path:, iid:, crm_contact_ids:, operation_mode: Types::MutationOperationModeEnum.enum[:replace])\\n+        issue = authorized_find!(project_path: project_path, iid: iid)\\n+        project = issue.project\\n+        raise Gitlab::Graphql::Errors::ResourceNotAvailable, 'Feature disabled' unless Feature.enabled?(:customer_relations, project.group, default_enabled: :yaml)\\n+\\n+        crm_contact_ids = crm_contact_ids.compact.map do |crm_contact_id|\\n+          raise Gitlab::Graphql::Errors::ArgumentError, \\\"Contact #{crm_contact_id} is invalid.\\\" unless crm_contact_id.respond_to?(:model_id)\\n+\\n+          crm_contact_id.model_id.to_i\\n+        end\\n+\\n+        attribute_name = case operation_mode\\n+                         when Types::MutationOperationModeEnum.enum[:append]\\n+                           :add_crm_contact_ids\\n+                         when Types::MutationOperationModeEnum.enum[:remove]\\n+                           :remove_crm_contact_ids\\n+                         else\\n+                           :crm_contact_ids\\n+                         end\\n+\\n+        response = ::Issues::SetCrmContactsService.new(project: project, current_user: current_user, params: { attribute_name =\\u003e crm_contact_ids })\\n+          .execute(issue)\\n+\\n+        {\\n+          issue: issue,\\n+          errors: response.errors\\n+        }\\n+      end\\n+    end\\n+  end\\n+end\\n\"},{\"old_path\":\"app/graphql/types/mutation_type.rb\",\"new_path\":\"app/graphql/types/mutation_type.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -49,6 +49,7 @@ class MutationType \\u003c BaseObject\\n     mount_mutation Mutations::Environments::CanaryIngress::Update\\n     mount_mutation Mutations::Issues::Create\\n     mount_mutation Mutations::Issues::SetAssignees\\n+    mount_mutation Mutations::Issues::SetCrmContacts\\n     mount_mutation Mutations::Issues::SetConfidential\\n     mount_mutation Mutations::Issues::SetLocked\\n     mount_mutation Mutations::Issues::SetDueDate\\n\"},{\"old_path\":\"app/helpers/application_settings_helper.rb\",\"new_path\":\"app/helpers/application_settings_helper.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -404,6 +404,10 @@ def visible_attributes\\n       :keep_latest_artifact,\\n       :whats_new_variant,\\n       :user_deactivation_emails_enabled,\\n+      :sentry_enabled,\\n+      :sentry_dsn,\\n+      :sentry_clientside_dsn,\\n+      :sentry_environment,\\n       :sidekiq_job_limiter_mode,\\n       :sidekiq_job_limiter_compression_threshold_bytes,\\n       :sidekiq_job_limiter_limit_bytes,\\n\"},{\"old_path\":\"app/helpers/issues_helper.rb\",\"new_path\":\"app/helpers/issues_helper.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -1,6 +1,8 @@\\n # frozen_string_literal: true\\n \\n module IssuesHelper\\n+  include Issues::IssueTypeHelpers\\n+\\n   def issue_css_classes(issue)\\n     classes = [\\\"issue\\\"]\\n     classes \\u003c\\u003c \\\"closed\\\" if issue.closed?\\n\"},{\"old_path\":\"app/models/application_setting.rb\",\"new_path\":\"app/models/application_setting.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -536,6 +536,18 @@ def self.kroki_formats_attributes\\n   validates :sidekiq_job_limiter_limit_bytes,\\n             numericality: { only_integer: true, greater_than_or_equal_to: 0 }\\n \\n+  validates :sentry_enabled,\\n+    inclusion: { in: [true, false], message: _('must be a boolean value') }\\n+  validates :sentry_dsn,\\n+    addressable_url: true, presence: true, length: { maximum: 255 },\\n+    if: :sentry_enabled?\\n+  validates :sentry_clientside_dsn,\\n+    addressable_url: true, allow_blank: true, length: { maximum: 255 },\\n+    if: :sentry_enabled?\\n+  validates :sentry_environment,\\n+    presence: true, length: { maximum: 255 },\\n+    if: :sentry_enabled?\\n+\\n   attr_encrypted :asset_proxy_secret_key,\\n                  mode: :per_attribute_iv,\\n                  key: Settings.attr_encrypted_db_key_base_truncated,\\n\"},{\"old_path\":\"app/models/concerns/alert_event_lifecycle.rb\",\"new_path\":\"app/models/concerns/alert_event_lifecycle.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -41,8 +41,6 @@ module AlertEventLifecycle\\n     scope :firing, -\\u003e { where(status: status_value_for(:firing)) }\\n     scope :resolved, -\\u003e { where(status: status_value_for(:resolved)) }\\n \\n-    scope :count_by_project_id, -\\u003e { group(:project_id).count }\\n-\\n     def self.status_value_for(name)\\n       state_machines[:status].states[name].value\\n     end\\n\"},{\"old_path\":\"app/models/concerns/issuable.rb\",\"new_path\":\"app/models/concerns/issuable.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -92,7 +92,6 @@ def award_emojis_loaded?\\n     scope :recent, -\\u003e { reorder(id: :desc) }\\n     scope :of_projects, -\\u003e(ids) { where(project_id: ids) }\\n     scope :opened, -\\u003e { with_state(:opened) }\\n-    scope :only_opened, -\\u003e { with_state(:opened) }\\n     scope :closed, -\\u003e { with_state(:closed) }\\n \\n     # rubocop:disable GitlabSecurity/SqlInjection\\n\"},{\"old_path\":\"app/models/concerns/milestoneable.rb\",\"new_path\":\"app/models/concerns/milestoneable.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -14,7 +14,6 @@ module Milestoneable\\n \\n     validate :milestone_is_valid\\n \\n-    scope :of_milestones, -\\u003e(ids) { where(milestone_id: ids) }\\n     scope :any_milestone, -\\u003e { where.not(milestone_id: nil) }\\n     scope :with_milestone, -\\u003e(title) { left_joins_milestones.where(milestones: { title: title }) }\\n     scope :without_particular_milestone, -\\u003e(title) { left_outer_joins(:milestone).where(\\\"milestones.title != ? OR milestone_id IS NULL\\\", title) }\\n\"},{\"old_path\":\"app/models/customer_relations/issue_contact.rb\",\"new_path\":\"app/models/customer_relations/issue_contact.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -15,6 +15,6 @@ def contact_belongs_to_issue_group\\n     return unless issue\\u0026.project\\u0026.namespace_id\\n     return if contact.group_id == issue.project.namespace_id\\n \\n-    errors.add(:base, _('The contact does not belong to the same group as the issue.'))\\n+    errors.add(:base, _('The contact does not belong to the same group as the issue'))\\n   end\\n end\\n\"},{\"old_path\":\"app/models/group.rb\",\"new_path\":\"app/models/group.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -194,13 +194,8 @@ def preset_root_ancestor_for(groups)\\n     def ids_with_disabled_email(groups)\\n       inner_groups = Group.where('id = namespaces_with_emails_disabled.id')\\n \\n-      inner_ancestors = if Feature.enabled?(:linear_group_ancestor_scopes, default_enabled: :yaml)\\n-                          inner_groups.self_and_ancestors\\n-                        else\\n-                          Gitlab::ObjectHierarchy.new(inner_groups).base_and_ancestors\\n-                        end\\n-\\n-      inner_query = inner_ancestors\\n+      inner_query = inner_groups\\n+        .self_and_ancestors\\n         .where(emails_disabled: true)\\n         .select('1')\\n         .limit(1)\\n\"},{\"old_path\":\"app/policies/issue_policy.rb\",\"new_path\":\"app/policies/issue_policy.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -12,6 +12,9 @@ class IssuePolicy \\u003c IssuablePolicy\\n     @user \\u0026\\u0026 IssueCollection.new([@subject]).visible_to(@user).any?\\n   end\\n \\n+  desc \\\"User can read contacts belonging to the issue group\\\"\\n+  condition(:can_read_crm_contacts, scope: :subject) { @user.can?(:read_crm_contact, @subject.project.group) }\\n+\\n   desc \\\"Issue is confidential\\\"\\n   condition(:confidential, scope: :subject) { @subject.confidential? }\\n \\n@@ -77,6 +80,10 @@ class IssuePolicy \\u003c IssuablePolicy\\n   rule { ~persisted \\u0026 can?(:create_issue) }.policy do\\n     enable :set_confidentiality\\n   end\\n+\\n+  rule { can?(:set_issue_metadata) \\u0026 can_read_crm_contacts }.policy do\\n+    enable :set_issue_crm_contacts\\n+  end\\n end\\n \\n IssuePolicy.prepend_mod_with('IssuePolicy')\\n\"},{\"old_path\":\"app/services/authorized_project_update/project_access_changed_service.rb\",\"new_path\":\"app/services/authorized_project_update/project_access_changed_service.rb\",\"a_mode\":\"0\",\"b_mode\":\"100644\",\"new_file\":true,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -0,0 +1,19 @@\\n+# frozen_string_literal: true\\n+\\n+module AuthorizedProjectUpdate\\n+  class ProjectAccessChangedService\\n+    def initialize(project_ids)\\n+      @project_ids = Array.wrap(project_ids)\\n+    end\\n+\\n+    def execute(blocking: true)\\n+      bulk_args = @project_ids.map { |id| [id] }\\n+\\n+      if blocking\\n+        AuthorizedProjectUpdate::ProjectRecalculateWorker.bulk_perform_and_wait(bulk_args)\\n+      else\\n+        AuthorizedProjectUpdate::ProjectRecalculateWorker.bulk_perform_async(bulk_args) # rubocop:disable Scalability/BulkPerformWithContext\\n+      end\\n+    end\\n+  end\\n+end\\n\"},{\"old_path\":\"app/services/concerns/issues/issue_type_helpers.rb\",\"new_path\":\"app/services/concerns/issues/issue_type_helpers.rb\",\"a_mode\":\"0\",\"b_mode\":\"100644\",\"new_file\":true,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -0,0 +1,12 @@\\n+# frozen_string_literal: true\\n+\\n+module Issues\\n+  module IssueTypeHelpers\\n+    # @param object [Issue, Project]\\n+    # @param issue_type [String, Symbol]\\n+    def create_issue_type_allowed?(object, issue_type)\\n+      WorkItem::Type.base_types.key?(issue_type.to_s) \\u0026\\u0026\\n+        can?(current_user, :\\\"create_#{issue_type}\\\", object)\\n+    end\\n+  end\\n+end\\n\"},{\"old_path\":\"app/services/groups/transfer_service.rb\",\"new_path\":\"app/services/groups/transfer_service.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -175,21 +175,18 @@ def ensure_ownership\\n     end\\n \\n     def refresh_project_authorizations\\n-      ProjectAuthorization.where(project_id: @group.all_projects.select(:id)).delete_all # rubocop: disable CodeReuse/ActiveRecord\\n+      projects_to_update = Set.new\\n \\n-      # refresh authorized projects for current_user immediately\\n-      current_user.refresh_authorized_projects\\n-\\n-      # schedule refreshing projects for all the members of the group\\n-      @group.refresh_members_authorized_projects\\n+      # All projects in this hierarchy need to have their project authorizations recalculated\\n+      @group.all_projects.each_batch { |prjs| projects_to_update.merge(prjs.ids) } # rubocop: disable CodeReuse/ActiveRecord\\n \\n       # When a group is transferred, it also affects who gets access to the projects shared to\\n       # the subgroups within its hierarchy, so we also schedule jobs that refresh authorizations for all such shared projects.\\n-      project_group_shares_within_the_hierarchy = ProjectGroupLink.in_group(group.self_and_descendants.select(:id))\\n-\\n-      project_group_shares_within_the_hierarchy.find_each do |project_group_link|\\n-        AuthorizedProjectUpdate::ProjectRecalculateWorker.perform_async(project_group_link.project_id)\\n+      ProjectGroupLink.in_group(@group.self_and_descendants.select(:id)).each_batch do |project_group_links|\\n+        projects_to_update.merge(project_group_links.pluck(:project_id)) # rubocop: disable CodeReuse/ActiveRecord\\n       end\\n+\\n+      AuthorizedProjectUpdate::ProjectAccessChangedService.new(projects_to_update.to_a).execute unless projects_to_update.empty?\\n     end\\n \\n     def raise_transfer_error(message)\\n\"},{\"old_path\":\"app/services/issues/base_service.rb\",\"new_path\":\"app/services/issues/base_service.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -3,6 +3,7 @@\\n module Issues\\n   class BaseService \\u003c ::IssuableBaseService\\n     include IncidentManagement::UsageData\\n+    include IssueTypeHelpers\\n \\n     def hook_data(issue, action, old_associations: {})\\n       hook_data = issue.to_hook_data(current_user, old_associations: old_associations)\\n@@ -44,7 +45,7 @@ def find_work_item_type_id(issue_type)\\n     def filter_params(issue)\\n       super\\n \\n-      params.delete(:issue_type) unless issue_type_allowed?(issue)\\n+      params.delete(:issue_type) unless create_issue_type_allowed?(issue, params[:issue_type])\\n       filter_incident_label(issue) if params[:issue_type]\\n \\n       moved_issue = params.delete(:moved_issue)\\n@@ -89,12 +90,6 @@ def delete_milestone_total_issue_counter_cache(milestone)\\n       Milestones::IssuesCountService.new(milestone).delete_cache\\n     end\\n \\n-    # @param object [Issue, Project]\\n-    def issue_type_allowed?(object)\\n-      WorkItem::Type.base_types.key?(params[:issue_type]) \\u0026\\u0026\\n-        can?(current_user, :\\\"create_#{params[:issue_type]}\\\", object)\\n-    end\\n-\\n     # @param issue [Issue]\\n     def filter_incident_label(issue)\\n       return unless add_incident_label?(issue) || remove_incident_label?(issue)\\n\"},{\"old_path\":\"app/services/issues/build_service.rb\",\"new_path\":\"app/services/issues/build_service.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -80,7 +80,7 @@ def allowed_issue_params\\n       ]\\n \\n       allowed_params \\u003c\\u003c :milestone_id if can?(current_user, :admin_issue, project)\\n-      allowed_params \\u003c\\u003c :issue_type if issue_type_allowed?(project)\\n+      allowed_params \\u003c\\u003c :issue_type if create_issue_type_allowed?(project, params[:issue_type])\\n \\n       params.slice(*allowed_params)\\n     end\\n\"},{\"old_path\":\"app/services/issues/set_crm_contacts_service.rb\",\"new_path\":\"app/services/issues/set_crm_contacts_service.rb\",\"a_mode\":\"0\",\"b_mode\":\"100644\",\"new_file\":true,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -0,0 +1,90 @@\\n+# frozen_string_literal: true\\n+\\n+module Issues\\n+  class SetCrmContactsService \\u003c ::BaseProjectService\\n+    attr_accessor :issue, :errors\\n+\\n+    MAX_ADDITIONAL_CONTACTS = 6\\n+\\n+    def execute(issue)\\n+      @issue = issue\\n+      @errors = []\\n+\\n+      return error_no_permissions unless allowed?\\n+      return error_invalid_params unless valid_params?\\n+\\n+      determine_changes if params[:crm_contact_ids]\\n+\\n+      return error_too_many if too_many?\\n+\\n+      add_contacts if params[:add_crm_contact_ids]\\n+      remove_contacts if params[:remove_crm_contact_ids]\\n+\\n+      if issue.valid?\\n+        ServiceResponse.success(payload: issue)\\n+      else\\n+        # The default error isn't very helpful: \\\"Issue customer relations contacts is invalid\\\"\\n+        issue.errors.delete(:issue_customer_relations_contacts)\\n+        issue.errors.add(:issue_customer_relations_contacts, errors.to_sentence)\\n+        ServiceResponse.error(payload: issue, message: issue.errors.full_messages)\\n+      end\\n+    end\\n+\\n+    private\\n+\\n+    def determine_changes\\n+      existing_contact_ids = issue.issue_customer_relations_contacts.map(\\u0026:contact_id)\\n+      params[:add_crm_contact_ids] = params[:crm_contact_ids] - existing_contact_ids\\n+      params[:remove_crm_contact_ids] = existing_contact_ids - params[:crm_contact_ids]\\n+    end\\n+\\n+    def add_contacts\\n+      params[:add_crm_contact_ids].uniq.each do |contact_id|\\n+        issue_contact = issue.issue_customer_relations_contacts.create(contact_id: contact_id)\\n+\\n+        unless issue_contact.persisted?\\n+          # The validation ensures that the id exists and the user has permission\\n+          errors \\u003c\\u003c \\\"#{contact_id}: The resource that you are attempting to access does not exist or you don't have permission to perform this action\\\"\\n+        end\\n+      end\\n+    end\\n+\\n+    def remove_contacts\\n+      issue.issue_customer_relations_contacts\\n+        .where(contact_id: params[:remove_crm_contact_ids]) # rubocop: disable CodeReuse/ActiveRecord\\n+        .delete_all\\n+    end\\n+\\n+    def allowed?\\n+      current_user\\u0026.can?(:set_issue_crm_contacts, issue)\\n+    end\\n+\\n+    def valid_params?\\n+      set_present? ^ add_or_remove_present?\\n+    end\\n+\\n+    def set_present?\\n+      params[:crm_contact_ids].present?\\n+    end\\n+\\n+    def add_or_remove_present?\\n+      params[:add_crm_contact_ids].present? || params[:remove_crm_contact_ids].present?\\n+    end\\n+\\n+    def too_many?\\n+      params[:add_crm_contact_ids] \\u0026\\u0026 params[:add_crm_contact_ids].length \\u003e MAX_ADDITIONAL_CONTACTS\\n+    end\\n+\\n+    def error_no_permissions\\n+      ServiceResponse.error(message: ['You have insufficient permissions to set customer relations contacts for this issue'])\\n+    end\\n+\\n+    def error_invalid_params\\n+      ServiceResponse.error(message: ['You cannot combine crm_contact_ids with add_crm_contact_ids or remove_crm_contact_ids'])\\n+    end\\n+\\n+    def error_too_many\\n+      ServiceResponse.error(payload: issue, message: [\\\"You can only add up to #{MAX_ADDITIONAL_CONTACTS} contacts at one time\\\"])\\n+    end\\n+  end\\n+end\\n\"},{\"old_path\":\"app/services/projects/container_repository/cleanup_tags_service.rb\",\"new_path\":\"app/services/projects/container_repository/cleanup_tags_service.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -146,8 +146,7 @@ def cache\\n \\n       def caching_enabled?\\n         container_expiration_policy \\u0026\\u0026\\n-          older_than.present? \\u0026\\u0026\\n-          Feature.enabled?(:container_registry_expiration_policies_caching, @project)\\n+          older_than.present?\\n       end\\n \\n       def throttling_enabled?\\n\"},{\"old_path\":\"app/views/admin/application_settings/_sentry.html.haml\",\"new_path\":\"app/views/admin/application_settings/_sentry.html.haml\",\"a_mode\":\"0\",\"b_mode\":\"100644\",\"new_file\":true,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -0,0 +1,22 @@\\n+= form_for @application_setting, url: metrics_and_profiling_admin_application_settings_path(anchor: 'js-sentry-settings'), html: { class: 'fieldset-form', id: 'sentry-settings' } do |f|\\n+  = form_errors(@application_setting)\\n+\\n+  %span.text-muted\\n+    = _('Changing any setting here requires an application restart')\\n+\\n+  %fieldset\\n+    .form-group\\n+      .form-check\\n+        = f.check_box :sentry_enabled, class: 'form-check-input'\\n+        = f.label :sentry_enabled, _('Enable Sentry error tracking'), class: 'form-check-label'\\n+    .form-group\\n+      = f.label :sentry_dsn, _('DSN'), class: 'label-light'\\n+      = f.text_field :sentry_dsn, class: 'form-control gl-form-input', placeholder: 'https://public@sentry.example.com/1'\\n+    .form-group\\n+      = f.label :sentry_clientside_dsn, _('Clientside DSN'), class: 'label-light'\\n+      = f.text_field :sentry_clientside_dsn, class: 'form-control gl-form-input', placeholder: 'https://public@sentry.example.com/2'\\n+    .form-group\\n+      = f.label :sentry_environment, _('Environment'), class: 'label-light'\\n+      = f.text_field :sentry_environment, class: 'form-control gl-form-input', placeholder: Rails.env\\n+\\n+  = f.submit _('Save changes'), class: 'gl-button btn btn-confirm'\\n\"},{\"old_path\":\"app/views/admin/application_settings/metrics_and_profiling.html.haml\",\"new_path\":\"app/views/admin/application_settings/metrics_and_profiling.html.haml\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -54,3 +54,15 @@\\n     = render 'usage'\\n \\n = render_if_exists 'admin/application_settings/pseudonymizer_settings', expanded: expanded_by_default?\\n+\\n+- if Feature.enabled?(:configure_sentry_in_application_settings, default_enabled: :yaml)\\n+  %section.settings.as-sentry.no-animate#js-sentry-settings{ class: ('expanded' if expanded_by_default?), data: { qa_selector: 'sentry_settings_content' } }\\n+    .settings-header\\n+      %h4\\n+        = _('Sentry')\\n+      %button.btn.gl-button.btn-default.js-settings-toggle{ type: 'button' }\\n+        = expanded_by_default? ? _('Collapse') : _('Expand')\\n+      %p\\n+        = _('Configure Sentry integration for error tracking')\\n+    .settings-content\\n+      = render 'sentry'\\n\"},{\"old_path\":\"app/views/admin/dev_ops_report/_report.html.haml\",\"new_path\":\"app/views/admin/dev_ops_report/_score.html.haml\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":true,\"deleted_file\":false,\"diff\":\"\"},{\"old_path\":\"app/views/admin/dev_ops_report/show.html.haml\",\"new_path\":\"app/views/admin/dev_ops_report/show.html.haml\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -6,5 +6,5 @@\\n     - if show_adoption?\\n       = render_if_exists 'admin/dev_ops_report/devops_tabs'\\n     - else\\n-      = render 'report'\\n+      = render 'score'\\n \\n\"},{\"old_path\":\"app/views/projects/product_analytics/_links.html.haml\",\"new_path\":\"app/views/projects/product_analytics/_links.html.haml\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -1,10 +1,5 @@\\n-.mb-3\\n-  %ul.nav-links\\n-    = nav_link(path: 'product_analytics#index') do\\n-      = link_to _('Events'), project_product_analytics_path(@project)\\n-    = nav_link(path: 'product_analytics#graphs') do\\n-      = link_to 'Graphs', graphs_project_product_analytics_path(@project)\\n-    = nav_link(path: 'product_analytics#test') do\\n-      = link_to _('Test'), test_project_product_analytics_path(@project)\\n-    = nav_link(path: 'product_analytics#setup') do\\n-      = link_to _('Setup'), setup_project_product_analytics_path(@project)\\n+= gl_tabs_nav({ class: 'mb-3'}) do\\n+  = gl_tab_link_to _('Events'), project_product_analytics_path(@project)\\n+  = gl_tab_link_to _('Graphs'), graphs_project_product_analytics_path(@project)\\n+  = gl_tab_link_to _('Test'), test_project_product_analytics_path(@project)\\n+  = gl_tab_link_to _('Setup'), setup_project_product_analytics_path(@project)\\n\"},{\"old_path\":\"app/views/shared/issuable/form/_type_selector.html.haml\",\"new_path\":\"app/views/shared/issuable/form/_type_selector.html.haml\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -18,17 +18,19 @@\\n                 = sprite_icon('close', size: 16, css_class: 'dropdown-menu-close-icon')\\n             .dropdown-content{ data: { testid: 'issue-type-select-dropdown' } }\\n               %ul\\n-                %li.js-filter-issuable-type\\n-                  = link_to new_project_issue_path(@project), class: (\\\"is-active\\\" if issuable.issue?) do\\n-                    #{sprite_icon(work_item_type_icon(:issue), css_class: 'gl-icon')} #{_(\\\"Issue\\\")}\\n-                %li.js-filter-issuable-type{ data: { track: { action: \\\"select_issue_type_incident\\\", label: \\\"select_issue_type_incident_dropdown_option\\\" } } }\\n-                  = link_to new_project_issue_path(@project, { issuable_template: 'incident', issue: { issue_type: 'incident' } }), class: (\\\"is-active\\\" if issuable.incident?) do\\n-                    #{sprite_icon(work_item_type_icon(:incident), css_class: 'gl-icon')} #{_(\\\"Incident\\\")}\\n+                - if create_issue_type_allowed?(@project, :issue)\\n+                  %li.js-filter-issuable-type\\n+                    = link_to new_project_issue_path(@project), class: (\\\"is-active\\\" if issuable.issue?) do\\n+                      #{sprite_icon(work_item_type_icon(:issue), css_class: 'gl-icon')} #{_('Issue')}\\n+                - if create_issue_type_allowed?(@project, :incident)\\n+                  %li.js-filter-issuable-type{ data: { track: { action: \\\"select_issue_type_incident\\\", label: \\\"select_issue_type_incident_dropdown_option\\\" } } }\\n+                    = link_to new_project_issue_path(@project, { issuable_template: 'incident', issue: { issue_type: 'incident' } }), class: (\\\"is-active\\\" if issuable.incident?) do\\n+                      #{sprite_icon(work_item_type_icon(:incident), css_class: 'gl-icon')} #{_('Incident')}\\n \\n       #js-type-popover\\n \\n     - if issuable.incident?\\n       %p.form-text.text-muted\\n         - incident_docs_url = help_page_path('operations/incident_management/incidents.md')\\n-        - incident_docs_start = '\\u003ca href=\\\"%{url}\\\" target=\\\"_blank\\\" rel=\\\"noopener noreferrer\\\"\\u003e'.html_safe % { url: incident_docs_url }\\n-        = _('A %{incident_docs_start}modified issue%{incident_docs_end} to guide the resolution of incidents.').html_safe % { incident_docs_start: incident_docs_start, incident_docs_end: '\\u003c/a\\u003e'.html_safe }\\n+        - incident_docs_start = format('\\u003ca href=\\\"%{url}\\\" target=\\\"_blank\\\" rel=\\\"noopener noreferrer\\\"\\u003e', url: incident_docs_url)\\n+        = format(_('A %{incident_docs_start}modified issue%{incident_docs_end} to guide the resolution of incidents.'), incident_docs_start: incident_docs_start, incident_docs_end: '\\u003c/a\\u003e').html_safe\\n\"},{\"old_path\":\"app/workers/authorized_project_update/project_recalculate_worker.rb\",\"new_path\":\"app/workers/authorized_project_update/project_recalculate_worker.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -7,6 +7,8 @@ class ProjectRecalculateWorker\\n     data_consistency :always\\n     include Gitlab::ExclusiveLeaseHelpers\\n \\n+    prepend WaitableWorker\\n+\\n     feature_category :authentication_and_authorization\\n     urgency :high\\n     queue_namespace :authorized_project_update\\n\"},{\"old_path\":\"app/workers/container_expiration_policies/cleanup_container_repository_worker.rb\",\"new_path\":\"app/workers/container_expiration_policies/cleanup_container_repository_worker.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -159,7 +159,10 @@ def log_cache_ratio(result)\\n \\n       return unless tags_count \\u0026\\u0026 cached_tags_count \\u0026\\u0026 tags_count != 0\\n \\n-      log_extra_metadata_on_done(:cleanup_tags_service_cache_hit_ratio, cached_tags_count / tags_count.to_f)\\n+      ratio = cached_tags_count / tags_count.to_f\\n+      ratio_as_percentage = (ratio * 100).round(2)\\n+\\n+      log_extra_metadata_on_done(:cleanup_tags_service_cache_hit_ratio, ratio_as_percentage)\\n     end\\n \\n     def log_truncate(result)\\n\"},{\"old_path\":\"bin/sidekiq-cluster\",\"new_path\":\"bin/sidekiq-cluster\",\"a_mode\":\"100755\",\"b_mode\":\"100755\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -1,13 +1,7 @@\\n #!/usr/bin/env ruby\\n # frozen_string_literal: true\\n \\n-require 'optparse'\\n-require_relative '../lib/gitlab'\\n-require_relative '../lib/gitlab/utils'\\n-require_relative '../lib/gitlab/sidekiq_config/cli_methods'\\n-require_relative '../lib/gitlab/sidekiq_config/worker_matcher'\\n-require_relative '../lib/gitlab/sidekiq_cluster'\\n-require_relative '../lib/gitlab/sidekiq_cluster/cli'\\n+require_relative '../sidekiq_cluster/cli'\\n \\n Thread.abort_on_exception = true\\n \\n\"},{\"old_path\":\"config/application.rb\",\"new_path\":\"config/application.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -16,6 +16,8 @@\\n \\n module Gitlab\\n   class Application \\u003c Rails::Application\\n+    config.load_defaults 6.1\\n+\\n     require_dependency Rails.root.join('lib/gitlab')\\n     require_dependency Rails.root.join('lib/gitlab/utils')\\n     require_dependency Rails.root.join('lib/gitlab/action_cable/config')\\n@@ -37,8 +39,6 @@ class Application \\u003c Rails::Application\\n     require_dependency Rails.root.join('lib/gitlab/runtime')\\n     require_dependency Rails.root.join('lib/gitlab/patch/legacy_database_config')\\n \\n-    config.autoloader = :zeitwerk\\n-\\n     # To be removed in 15.0\\n     # This preload is needed to convert legacy `database.yml`\\n     # from `production: adapter: postgresql`\\n@@ -190,11 +190,12 @@ class Application \\u003c Rails::Application\\n     # regardless if schema_search_path is set, or not.\\n     config.active_record.dump_schemas = :all\\n \\n-    # Use new connection handling so that we can use Rails 6.1+ multiple\\n-    # database support.\\n-    config.active_record.legacy_connection_handling = false\\n-\\n-    config.action_mailer.delivery_job = \\\"ActionMailer::MailDeliveryJob\\\"\\n+    # Override default Active Record settings\\n+    # We cannot do this in an initializer because some models are already loaded by then\\n+    config.active_record.cache_versioning = false\\n+    config.active_record.collection_cache_versioning = false\\n+    config.active_record.has_many_inversing = false\\n+    config.active_record.belongs_to_required_by_default = false\\n \\n     # Enable the asset pipeline\\n     config.assets.enabled = true\\n@@ -380,6 +381,7 @@ class Application \\u003c Rails::Application\\n     config.cache_store = :redis_cache_store, Gitlab::Redis::Cache.active_support_config\\n \\n     config.active_job.queue_adapter = :sidekiq\\n+    config.action_mailer.deliver_later_queue_name = :mailers\\n \\n     # This is needed for gitlab-shell\\n     ENV['GITLAB_PATH_OUTSIDE_HOOK'] = ENV['PATH']\\n\"},{\"old_path\":\"config/feature_flags/development/linear_group_ancestor_scopes.yml\",\"new_path\":\"config/feature_flags/development/api_v3_commits_skip_diff_files.yml\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":true,\"deleted_file\":false,\"diff\":\"@@ -1,8 +1,8 @@\\n ---\\n-name: linear_group_ancestor_scopes\\n-introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/70495\\n-rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/341115\\n-milestone: '14.4'\\n+name: api_v3_commits_skip_diff_files\\n+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/67647\\n+rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/344617\\n+milestone: '14.5'\\n type: development\\n-group: group::access\\n+group: group::integrations\\n default_enabled: false\\n\"},{\"old_path\":\"config/feature_flags/development/ci_new_artifact_file_reader.yml\",\"new_path\":\"config/feature_flags/development/configure_sentry_in_application_settings.yml\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":true,\"deleted_file\":false,\"diff\":\"@@ -1,8 +1,8 @@\\n ---\\n-name: ci_new_artifact_file_reader\\n-introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/46552\\n-rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/273755\\n-milestone: '13.6'\\n+name: configure_sentry_in_application_settings\\n+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/73381\\n+rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/344832\\n+milestone: '14.5'\\n type: development\\n-group: group::pipeline authoring\\n-default_enabled: true\\n+group: group::pipeline execution\\n+default_enabled: false\\n\"},{\"old_path\":\"config/feature_flags/development/container_registry_expiration_policies_caching.yml\",\"new_path\":\"config/feature_flags/development/container_registry_expiration_policies_caching.yml\",\"a_mode\":\"100644\",\"b_mode\":\"0\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":true,\"diff\":\"@@ -1,8 +0,0 @@\\n----\\n-name: container_registry_expiration_policies_caching\\n-introduced_by_url:\\n-rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/340606\\n-milestone: '14.3'\\n-type: development\\n-group: group::package\\n-default_enabled: false\\n\"},{\"old_path\":\"config/initializers/0_acts_as_taggable.rb\",\"new_path\":\"config/initializers/1_acts_as_taggable.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":true,\"deleted_file\":false,\"diff\":\"@@ -9,3 +9,9 @@\\n # validate that counter cache is disabled\\n raise \\\"Counter cache is not disabled\\\" if\\n     ActsAsTaggableOn::Tagging.reflections[\\\"tag\\\"].options[:counter_cache]\\n+\\n+# Redirects retrieve_connection to use Ci::ApplicationRecord's connection\\n+[::ActsAsTaggableOn::Tag, ::ActsAsTaggableOn::Tagging].each do |model|\\n+  model.connection_specification_name = Ci::ApplicationRecord.connection_specification_name\\n+  model.singleton_class.delegate :connection, :sticking, to: '::Ci::ApplicationRecord'\\n+end\\n\"},{\"old_path\":\"config/initializers/action_view.rb\",\"new_path\":\"config/initializers/action_view.rb\",\"a_mode\":\"100644\",\"b_mode\":\"0\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":true,\"diff\":\"@@ -1,7 +0,0 @@\\n-# frozen_string_literal: true\\n-\\n-# This file was introduced during upgrading Rails from 5.2 to 6.0.\\n-# This file can be removed when `config.load_defaults 6.0` is introduced.\\n-\\n-# Don't force requests from old versions of IE to be UTF-8 encoded.\\n-Rails.application.config.action_view.default_enforce_utf8 = false\\n\"},{\"old_path\":\"config/initializers/cookies_serializer.rb\",\"new_path\":\"config/initializers/cookies_serializer.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -2,6 +2,5 @@\\n \\n # Be sure to restart your server when you modify this file.\\n \\n-Rails.application.config.action_dispatch.use_cookies_with_metadata = true\\n Rails.application.config.action_dispatch.cookies_serializer =\\n   Gitlab::Utils.to_boolean(ENV['USE_UNSAFE_HYBRID_COOKIES']) ? :hybrid : :json\\n\"},{\"old_path\":\"config/initializers/database_query_analyzers.rb\",\"new_path\":\"config/initializers/database_query_analyzers.rb\",\"a_mode\":\"0\",\"b_mode\":\"100644\",\"new_file\":true,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -0,0 +1,4 @@\\n+# frozen_string_literal: true\\n+\\n+# Currently we register validator only for `dev` or `test` environment\\n+Gitlab::Database::QueryAnalyzer.new.hook! if Gitlab.dev_or_test_env?\\n\"},{\"old_path\":\"config/initializers/new_framework_defaults.rb\",\"new_path\":\"config/initializers/new_framework_defaults.rb\",\"a_mode\":\"100644\",\"b_mode\":\"0\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":true,\"diff\":\"@@ -1,24 +0,0 @@\\n-# frozen_string_literal: true\\n-\\n-# Remove this `if` condition when upgraded to rails 5.0.\\n-# The body must be kept.\\n-# Be sure to restart your server when you modify this file.\\n-#\\n-# This file contains migration options to ease your Rails 5.0 upgrade.\\n-#\\n-# Once upgraded flip defaults one by one to migrate to the new default.\\n-#\\n-# Read the Guide for Upgrading Ruby on Rails for more info on each option.\\n-\\n-# Enable per-form CSRF tokens. Previous versions had false.\\n-Rails.application.config.action_controller.per_form_csrf_tokens = false\\n-\\n-# Enable origin-checking CSRF mitigation. Previous versions had false.\\n-Rails.application.config.action_controller.forgery_protection_origin_check = false\\n-\\n-# Make Ruby 2.4 preserve the timezone of the receiver when calling `to_time`.\\n-# Previous versions had false.\\n-ActiveSupport.to_time_preserves_timezone = false\\n-\\n-# Require `belongs_to` associations by default. Previous versions had false.\\n-Rails.application.config.active_record.belongs_to_required_by_default = false\\n\"},{\"old_path\":\"config/initializers_before_autoloader/000_override_framework_defaults.rb\",\"new_path\":\"config/initializers_before_autoloader/000_override_framework_defaults.rb\",\"a_mode\":\"0\",\"b_mode\":\"100644\",\"new_file\":true,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -0,0 +1,35 @@\\n+# frozen_string_literal: true\\n+\\n+# This contains configuration from Rails upgrades to override the new defaults so that we\\n+# keep existing behavior.\\n+#\\n+# For boolean values, the new default is the opposite of the value being set in this file.\\n+# For other types, the new default is noted in the comments. These are also documented in\\n+# https://guides.rubyonrails.org/configuring.html#results-of-config-load-defaults\\n+#\\n+# To switch a setting to the new default value, we just need to delete the specific line here.\\n+\\n+Rails.application.configure do\\n+  # Rails 6.1\\n+  config.action_dispatch.cookies_same_site_protection = nil # New default is :lax\\n+  config.action_dispatch.ssl_default_redirect_status = nil # New default is 308\\n+  ActiveSupport.utc_to_local_returns_utc_offset_times = false\\n+  config.action_controller.urlsafe_csrf_tokens = false\\n+  config.action_view.preload_links_header = false\\n+\\n+  # Rails 5.2\\n+  config.action_dispatch.use_authenticated_cookie_encryption = false\\n+  config.active_support.use_authenticated_message_encryption = false\\n+  config.active_support.hash_digest_class = ::Digest::MD5 # New default is ::Digest::SHA1\\n+  config.action_controller.default_protect_from_forgery = false\\n+  config.action_view.form_with_generates_ids = false\\n+\\n+  # Rails 5.1\\n+  config.assets.unknown_asset_fallback = true\\n+\\n+  # Rails 5.0\\n+  config.action_controller.per_form_csrf_tokens = false\\n+  config.action_controller.forgery_protection_origin_check = false\\n+  ActiveSupport.to_time_preserves_timezone = false\\n+  config.ssl_options = {} # New default is { hsts: { subdomains: true } }\\n+end\\n\"},{\"old_path\":\"config/plugins/graphql_known_operations_plugin.js\",\"new_path\":\"config/plugins/graphql_known_operations_plugin.js\",\"a_mode\":\"0\",\"b_mode\":\"100644\",\"new_file\":true,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -0,0 +1,112 @@\\n+/* eslint-disable no-underscore-dangle */\\n+const yaml = require('js-yaml');\\n+\\n+const PLUGIN_NAME = 'GraphqlKnownOperationsPlugin';\\n+const GRAPHQL_PATH_REGEX = /(query|mutation)\\\\.graphql$/;\\n+const OPERATION_NAME_SOURCE_REGEX = /^\\\\s*module\\\\.exports.*oneQuery.*\\\"(\\\\w+)\\\"/gm;\\n+\\n+/**\\n+ * Returns whether a given webpack module is a \\\"graphql\\\" module\\n+ */\\n+const isGraphqlModule = (module) =\\u003e {\\n+  return GRAPHQL_PATH_REGEX.test(module.resource);\\n+};\\n+\\n+/**\\n+ * Returns graphql operation names we can parse from the given module\\n+ *\\n+ * Since webpack gives us the source **after** the graphql-tag/loader runs,\\n+ * we can look for specific lines we're guaranteed to have from the\\n+ * graphql-tag/loader.\\n+ */\\n+const getOperationNames = (module) =\\u003e {\\n+  const originalSource = module.originalSource();\\n+\\n+  if (!originalSource) {\\n+    return [];\\n+  }\\n+\\n+  const matches = originalSource.source().toString().matchAll(OPERATION_NAME_SOURCE_REGEX);\\n+\\n+  return Array.from(matches).map((match) =\\u003e match[1]);\\n+};\\n+\\n+const createFileContents = (knownOperations) =\\u003e {\\n+  const sourceData = Array.from(knownOperations.values()).sort((a, b) =\\u003e a.localeCompare(b));\\n+\\n+  return yaml.dump(sourceData);\\n+};\\n+\\n+/**\\n+ * Creates a webpack4 compatible \\\"RawSource\\\"\\n+ *\\n+ * Inspired from https://sourcegraph.com/github.com/FormidableLabs/webpack-stats-plugin@e050ff8c362d5ddd45c66ade724d4a397ace3e5c/-/blob/lib/stats-writer-plugin.js?L144\\n+ */\\n+const createWebpackRawSource = (source) =\\u003e {\\n+  const buff = Buffer.from(source, 'utf-8');\\n+\\n+  return {\\n+    source() {\\n+      return buff;\\n+    },\\n+    size() {\\n+      return buff.length;\\n+    },\\n+  };\\n+};\\n+\\n+const onSucceedModule = ({ module, knownOperations }) =\\u003e {\\n+  if (!isGraphqlModule(module)) {\\n+    return;\\n+  }\\n+\\n+  getOperationNames(module).forEach((x) =\\u003e knownOperations.add(x));\\n+};\\n+\\n+const onCompilerEmit = ({ compilation, knownOperations, filename }) =\\u003e {\\n+  const contents = createFileContents(knownOperations);\\n+  const source = createWebpackRawSource(contents);\\n+\\n+  const asset = compilation.getAsset(filename);\\n+  if (asset) {\\n+    compilation.updateAsset(filename, source);\\n+  } else {\\n+    compilation.emitAsset(filename, source);\\n+  }\\n+};\\n+\\n+/**\\n+ * Webpack plugin that outputs a file containing known graphql operations.\\n+ *\\n+ * A lot of the mechanices was expired from [this example][1].\\n+ *\\n+ * [1]: https://sourcegraph.com/github.com/FormidableLabs/webpack-stats-plugin@e050ff8c362d5ddd45c66ade724d4a397ace3e5c/-/blob/lib/stats-writer-plugin.js?L136\\n+ */\\n+class GraphqlKnownOperationsPlugin {\\n+  constructor({ filename }) {\\n+    this._filename = filename;\\n+  }\\n+\\n+  apply(compiler) {\\n+    const knownOperations = new Set();\\n+\\n+    compiler.hooks.emit.tap(PLUGIN_NAME, (compilation) =\\u003e {\\n+      onCompilerEmit({\\n+        compilation,\\n+        knownOperations,\\n+        filename: this._filename,\\n+      });\\n+    });\\n+\\n+    compiler.hooks.compilation.tap(PLUGIN_NAME, (compilation) =\\u003e {\\n+      compilation.hooks.succeedModule.tap(PLUGIN_NAME, (module) =\\u003e {\\n+        onSucceedModule({\\n+          module,\\n+          knownOperations,\\n+        });\\n+      });\\n+    });\\n+  }\\n+}\\n+\\n+module.exports = GraphqlKnownOperationsPlugin;\\n\"},{\"old_path\":\"config/webpack.config.js\",\"new_path\":\"config/webpack.config.js\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -24,6 +24,7 @@ const IS_JH = require('./helpers/is_jh_env');\\n const vendorDllHash = require('./helpers/vendor_dll_hash');\\n \\n const MonacoWebpackPlugin = require('./plugins/monaco_webpack');\\n+const GraphqlKnownOperationsPlugin = require('./plugins/graphql_known_operations_plugin');\\n \\n const ROOT_PATH = path.resolve(__dirname, '..');\\n const SUPPORTED_BROWSERS = fs.readFileSync(path.join(ROOT_PATH, '.browserslistrc'), 'utf-8');\\n@@ -456,6 +457,8 @@ module.exports = {\\n       globalAPI: true,\\n     }),\\n \\n+    new GraphqlKnownOperationsPlugin({ filename: 'graphql_known_operations.yml' }),\\n+\\n     // fix legacy jQuery plugins which depend on globals\\n     new webpack.ProvidePlugin({\\n       $: 'jquery',\\n\"},{\"old_path\":\"db/migrate/20211021125908_add_sentry_settings_to_application_settings.rb\",\"new_path\":\"db/migrate/20211021125908_add_sentry_settings_to_application_settings.rb\",\"a_mode\":\"0\",\"b_mode\":\"100644\",\"new_file\":true,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -0,0 +1,12 @@\\n+# frozen_string_literal: true\\n+\\n+class AddSentrySettingsToApplicationSettings \\u003c Gitlab::Database::Migration[1.0]\\n+  # rubocop:disable Migration/AddLimitToTextColumns\\n+  def change\\n+    add_column :application_settings, :sentry_enabled, :boolean, default: false, null: false\\n+    add_column :application_settings, :sentry_dsn,            :text\\n+    add_column :application_settings, :sentry_clientside_dsn, :text\\n+    add_column :application_settings, :sentry_environment,    :text\\n+  end\\n+  # rubocop:enable Migration/AddLimitToTextColumns\\n+end\\n\"},{\"old_path\":\"db/migrate/20211021134458_add_limits_to_sentry_settings_on_application_settings.rb\",\"new_path\":\"db/migrate/20211021134458_add_limits_to_sentry_settings_on_application_settings.rb\",\"a_mode\":\"0\",\"b_mode\":\"100644\",\"new_file\":true,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -0,0 +1,17 @@\\n+# frozen_string_literal: true\\n+\\n+class AddLimitsToSentrySettingsOnApplicationSettings \\u003c Gitlab::Database::Migration[1.0]\\n+  disable_ddl_transaction!\\n+\\n+  def up\\n+    add_text_limit :application_settings, :sentry_dsn,            255\\n+    add_text_limit :application_settings, :sentry_clientside_dsn, 255\\n+    add_text_limit :application_settings, :sentry_environment,    255\\n+  end\\n+\\n+  def down\\n+    remove_text_limit :application_settings, :sentry_dsn\\n+    remove_text_limit :application_settings, :sentry_clientside_dsn\\n+    remove_text_limit :application_settings, :sentry_environment\\n+  end\\n+end\\n\"},{\"old_path\":\"db/post_migrate/20211005194425_schedule_requirements_migration.rb\",\"new_path\":\"db/post_migrate/20211005194425_schedule_requirements_migration.rb\",\"a_mode\":\"0\",\"b_mode\":\"100644\",\"new_file\":true,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -0,0 +1,35 @@\\n+# frozen_string_literal: true\\n+\\n+class ScheduleRequirementsMigration \\u003c Gitlab::Database::Migration[1.0]\\n+  DOWNTIME = false\\n+\\n+  # 2021-10-05 requirements count: ~12500\\n+  #\\n+  # Using 30 as batch size and 120 seconds default interval will produce:\\n+  # ~420 jobs - taking ~14 hours to perform\\n+  BATCH_SIZE = 30\\n+\\n+  MIGRATION = 'MigrateRequirementsToWorkItems'\\n+\\n+  disable_ddl_transaction!\\n+\\n+  class Requirement \\u003c ActiveRecord::Base\\n+    include EachBatch\\n+\\n+    self.table_name = 'requirements'\\n+  end\\n+\\n+  def up\\n+    queue_background_migration_jobs_by_range_at_intervals(\\n+      Requirement.where(issue_id: nil),\\n+      MIGRATION,\\n+      2.minutes,\\n+      batch_size: BATCH_SIZE,\\n+      track_jobs: true\\n+    )\\n+  end\\n+\\n+  def down\\n+    # NO OP\\n+  end\\n+end\\n\"},{\"old_path\":\"db/schema_migrations/20211005194425\",\"new_path\":\"db/schema_migrations/20211005194425\",\"a_mode\":\"0\",\"b_mode\":\"100644\",\"new_file\":true,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -0,0 +1 @@\\n+6647e94d315c76629f9726e26bafd124fb2fed361568d65315e7c7557f8d9ecf\\n\\\\ No newline at end of file\\n\"},{\"old_path\":\"db/schema_migrations/20211021125908\",\"new_path\":\"db/schema_migrations/20211021125908\",\"a_mode\":\"0\",\"b_mode\":\"100644\",\"new_file\":true,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -0,0 +1 @@\\n+d6fbe3efc3e45b750d82e277e30b7b0048b960d9f9f5b4f7c6a7a1ed869e76b5\\n\\\\ No newline at end of file\\n\"},{\"old_path\":\"db/schema_migrations/20211021134458\",\"new_path\":\"db/schema_migrations/20211021134458\",\"a_mode\":\"0\",\"b_mode\":\"100644\",\"new_file\":true,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -0,0 +1 @@\\n+1baa8db0d42a8d99e48b61930f5c42d1af5f86555488419b6551e1dbf417d3ad\\n\\\\ No newline at end of file\\n\"},{\"old_path\":\"db/structure.sql\",\"new_path\":\"db/structure.sql\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -10457,6 +10457,10 @@ CREATE TABLE application_settings (\\n     encrypted_content_validation_api_key bytea,\\n     encrypted_content_validation_api_key_iv bytea,\\n     content_validation_endpoint_enabled boolean DEFAULT false NOT NULL,\\n+    sentry_enabled boolean DEFAULT false NOT NULL,\\n+    sentry_dsn text,\\n+    sentry_clientside_dsn text,\\n+    sentry_environment text,\\n     CONSTRAINT app_settings_container_reg_cleanup_tags_max_list_size_positive CHECK ((container_registry_cleanup_tags_service_max_list_size \\u003e= 0)),\\n     CONSTRAINT app_settings_dep_proxy_ttl_policies_worker_capacity_positive CHECK ((dependency_proxy_ttl_group_policy_worker_capacity \\u003e= 0)),\\n     CONSTRAINT app_settings_ext_pipeline_validation_service_url_text_limit CHECK ((char_length(external_pipeline_validation_service_url) \\u003c= 255)),\\n@@ -10465,9 +10469,12 @@ CREATE TABLE application_settings (\\n     CONSTRAINT app_settings_yaml_max_size_positive CHECK ((max_yaml_size_bytes \\u003e 0)),\\n     CONSTRAINT check_17d9558205 CHECK ((char_length((kroki_url)::text) \\u003c= 1024)),\\n     CONSTRAINT check_2dba05b802 CHECK ((char_length(gitpod_url) \\u003c= 255)),\\n+    CONSTRAINT check_3def0f1829 CHECK ((char_length(sentry_clientside_dsn) \\u003c= 255)),\\n+    CONSTRAINT check_4f8b811780 CHECK ((char_length(sentry_dsn) \\u003c= 255)),\\n     CONSTRAINT check_51700b31b5 CHECK ((char_length(default_branch_name) \\u003c= 255)),\\n     CONSTRAINT check_57123c9593 CHECK ((char_length(help_page_documentation_base_url) \\u003c= 255)),\\n     CONSTRAINT check_5a84c3ffdc CHECK ((char_length(content_validation_endpoint_url) \\u003c= 255)),\\n+    CONSTRAINT check_5bcba483c4 CHECK ((char_length(sentry_environment) \\u003c= 255)),\\n     CONSTRAINT check_718b4458ae CHECK ((char_length(personal_access_token_prefix) \\u003c= 20)),\\n     CONSTRAINT check_7227fad848 CHECK ((char_length(rate_limiting_response_text) \\u003c= 255)),\\n     CONSTRAINT check_85a39b68ff CHECK ((char_length(encrypted_ci_jwt_signing_key_iv) \\u003c= 255)),\\n\"},{\"old_path\":\"doc/api/graphql/reference/index.md\",\"new_path\":\"doc/api/graphql/reference/index.md\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -2820,6 +2820,28 @@ Input type: `IssueSetConfidentialInput`\\n | \\u003ca id=\\\"mutationissuesetconfidentialerrors\\\"\\u003e\\u003c/a\\u003e`errors` | [`[String!]!`](#string) | Errors encountered during execution of the mutation. |\\n | \\u003ca id=\\\"mutationissuesetconfidentialissue\\\"\\u003e\\u003c/a\\u003e`issue` | [`Issue`](#issue) | Issue after mutation. |\\n \\n+### `Mutation.issueSetCrmContacts`\\n+\\n+Input type: `IssueSetCrmContactsInput`\\n+\\n+#### Arguments\\n+\\n+| Name | Type | Description |\\n+| ---- | ---- | ----------- |\\n+| \\u003ca id=\\\"mutationissuesetcrmcontactsclientmutationid\\\"\\u003e\\u003c/a\\u003e`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |\\n+| \\u003ca id=\\\"mutationissuesetcrmcontactscrmcontactids\\\"\\u003e\\u003c/a\\u003e`crmContactIds` | [`[CustomerRelationsContactID!]!`](#customerrelationscontactid) | Customer relations contact IDs to set. Replaces existing contacts by default. |\\n+| \\u003ca id=\\\"mutationissuesetcrmcontactsiid\\\"\\u003e\\u003c/a\\u003e`iid` | [`String!`](#string) | IID of the issue to mutate. |\\n+| \\u003ca id=\\\"mutationissuesetcrmcontactsoperationmode\\\"\\u003e\\u003c/a\\u003e`operationMode` | [`MutationOperationMode`](#mutationoperationmode) | Changes the operation mode. Defaults to REPLACE. |\\n+| \\u003ca id=\\\"mutationissuesetcrmcontactsprojectpath\\\"\\u003e\\u003c/a\\u003e`projectPath` | [`ID!`](#id) | Project the issue to mutate is in. |\\n+\\n+#### Fields\\n+\\n+| Name | Type | Description |\\n+| ---- | ---- | ----------- |\\n+| \\u003ca id=\\\"mutationissuesetcrmcontactsclientmutationid\\\"\\u003e\\u003c/a\\u003e`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |\\n+| \\u003ca id=\\\"mutationissuesetcrmcontactserrors\\\"\\u003e\\u003c/a\\u003e`errors` | [`[String!]!`](#string) | Errors encountered during execution of the mutation. |\\n+| \\u003ca id=\\\"mutationissuesetcrmcontactsissue\\\"\\u003e\\u003c/a\\u003e`issue` | [`Issue`](#issue) | Issue after mutation. |\\n+\\n ### `Mutation.issueSetDueDate`\\n \\n Input type: `IssueSetDueDateInput`\\n\"},{\"old_path\":\"doc/api/packages/maven.md\",\"new_path\":\"doc/api/packages/maven.md\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -36,13 +36,13 @@ GET packages/maven/*path/:file_name\\n | `file_name`  | string | yes | The name of the Maven package file. |\\n \\n ```shell\\n-curl --header \\\"Private-Token: \\u003cpersonal_access_token\\u003e\\\" \\\"https://gitlab.example.com/api/v4/packages/maven/foo/bar/baz/mypkg-1.0-SNAPSHOT.jar\\\"\\n+curl --header \\\"Private-Token: \\u003cpersonal_access_token\\u003e\\\" \\\"https://gitlab.example.com/api/v4/packages/maven/foo/bar/mypkg/1.0-SNAPSHOT/mypkg-1.0-SNAPSHOT.jar\\\"\\n ```\\n \\n To write the output to file:\\n \\n ```shell\\n-curl --header \\\"Private-Token: \\u003cpersonal_access_token\\u003e\\\" \\\"https://gitlab.example.com/api/v4/packages/maven/foo/bar/baz/mypkg-1.0-SNAPSHOT.jar\\\" \\u003e\\u003e mypkg-1.0-SNAPSHOT.jar\\n+curl --header \\\"Private-Token: \\u003cpersonal_access_token\\u003e\\\" \\\"https://gitlab.example.com/api/v4/packages/maven/foo/bar/mypkg/1.0-SNAPSHOT/mypkg-1.0-SNAPSHOT.jar\\\" \\u003e\\u003e mypkg-1.0-SNAPSHOT.jar\\n ```\\n \\n This writes the downloaded file to `mypkg-1.0-SNAPSHOT.jar` in the current directory.\\n@@ -63,13 +63,13 @@ GET groups/:id/-/packages/maven/*path/:file_name\\n | `file_name`  | string | yes | The name of the Maven package file. |\\n \\n ```shell\\n-curl --header \\\"Private-Token: \\u003cpersonal_access_token\\u003e\\\" \\\"https://gitlab.example.com/api/v4/groups/1/-/packages/maven/foo/bar/baz/mypkg-1.0-SNAPSHOT.jar\\\"\\n+curl --header \\\"Private-Token: \\u003cpersonal_access_token\\u003e\\\" \\\"https://gitlab.example.com/api/v4/groups/1/-/packages/maven/foo/bar/mypkg/1.0-SNAPSHOT/mypkg-1.0-SNAPSHOT.jar\\\"\\n ```\\n \\n To write the output to file:\\n \\n ```shell\\n-curl --header \\\"Private-Token: \\u003cpersonal_access_token\\u003e\\\" \\\"https://gitlab.example.com/api/v4/groups/1/-/packages/maven/foo/bar/baz/mypkg-1.0-SNAPSHOT.jar\\\" \\u003e\\u003e mypkg-1.0-SNAPSHOT.jar\\n+curl --header \\\"Private-Token: \\u003cpersonal_access_token\\u003e\\\" \\\"https://gitlab.example.com/api/v4/groups/1/-/packages/maven/foo/bar/mypkg/1.0-SNAPSHOT/mypkg-1.0-SNAPSHOT.jar\\\" \\u003e\\u003e mypkg-1.0-SNAPSHOT.jar\\n ```\\n \\n This writes the downloaded file to `mypkg-1.0-SNAPSHOT.jar` in the current directory.\\n@@ -90,13 +90,13 @@ GET projects/:id/packages/maven/*path/:file_name\\n | `file_name`  | string | yes | The name of the Maven package file. |\\n \\n ```shell\\n-curl --header \\\"Private-Token: \\u003cpersonal_access_token\\u003e\\\" \\\"https://gitlab.example.com/api/v4/projects/1/packages/maven/foo/bar/baz/mypkg-1.0-SNAPSHOT.jar\\\"\\n+curl --header \\\"Private-Token: \\u003cpersonal_access_token\\u003e\\\" \\\"https://gitlab.example.com/api/v4/projects/1/packages/maven/foo/bar/mypkg/1.0-SNAPSHOT/mypkg-1.0-SNAPSHOT.jar\\\"\\n ```\\n \\n To write the output to file:\\n \\n ```shell\\n-curl --header \\\"Private-Token: \\u003cpersonal_access_token\\u003e\\\" \\\"https://gitlab.example.com/api/v4/projects/1/packages/maven/foo/bar/baz/mypkg-1.0-SNAPSHOT.jar\\\" \\u003e\\u003e mypkg-1.0-SNAPSHOT.jar\\n+curl --header \\\"Private-Token: \\u003cpersonal_access_token\\u003e\\\" \\\"https://gitlab.example.com/api/v4/projects/1/packages/maven/foo/bar/mypkg/1.0-SNAPSHOT/mypkg-1.0-SNAPSHOT.jar\\\" \\u003e\\u003e mypkg-1.0-SNAPSHOT.jar\\n ```\\n \\n This writes the downloaded file to `mypkg-1.0-SNAPSHOT.jar` in the current directory.\\n@@ -120,5 +120,5 @@ PUT projects/:id/packages/maven/*path/:file_name\\n curl --request PUT \\\\\\n      --upload-file path/to/mypkg-1.0-SNAPSHOT.pom \\\\\\n      --header \\\"Private-Token: \\u003cpersonal_access_token\\u003e\\\" \\\\\\n-     \\\"https://gitlab.example.com/api/v4/projects/1/packages/maven/foo/bar/baz/mypkg-1.0-SNAPSHOT.pom\\\"\\n+     \\\"https://gitlab.example.com/api/v4/projects/1/packages/maven/foo/bar/mypkg/1.0-SNAPSHOT/mypkg-1.0-SNAPSHOT.pom\\\"\\n ```\\n\"},{\"old_path\":\"doc/ci/jobs/index.md\",\"new_path\":\"doc/ci/jobs/index.md\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -82,6 +82,20 @@ For example:\\n \\n ![Pipeline mini graph sorting](img/pipelines_mini_graph_sorting.png)\\n \\n+## Unavailable names for jobs\\n+\\n+You can't use these keywords as job names:\\n+\\n+- `image`\\n+- `services`\\n+- `stages`\\n+- `types`\\n+- `before_script`\\n+- `after_script`\\n+- `variables`\\n+- `cache`\\n+- `include`\\n+\\n ## Group jobs in a pipeline\\n \\n If you have many similar jobs, your [pipeline graph](../pipelines/index.md#visualize-pipelines) becomes long and hard\\n\"},{\"old_path\":\"doc/ci/quick_start/index.md\",\"new_path\":\"doc/ci/quick_start/index.md\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -141,7 +141,7 @@ The pipeline starts when the commit is committed.\\n - You can also use [CI/CD configuration visualization](../pipeline_editor/index.md#visualize-ci-configuration) to\\n   view a graphical representation of your `.gitlab-ci.yml` file.\\n - Each job contains scripts and stages:\\n-  - The [`default`](../yaml/index.md#custom-default-keyword-values) keyword is for\\n+  - The [`default`](../yaml/index.md#default) keyword is for\\n     custom defaults, for example with [`before_script`](../yaml/index.md#before_script)\\n     and [`after_script`](../yaml/index.md#after_script).\\n   - [`stage`](../yaml/index.md#stage) describes the sequential execution of jobs.\\n\"},{\"old_path\":\"doc/ci/runners/index.md\",\"new_path\":\"doc/ci/runners/index.md\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -10,7 +10,7 @@ type: reference\\n If you are using self-managed GitLab or you want to use your own runners on GitLab.com, you can\\n [install and configure your own runners](https://docs.gitlab.com/runner/install/).\\n \\n-If you are using GitLab SaaS (GitLab.com), your CI jobs automatically run on runners in the GitLab Build Cloud.\\n+If you are using GitLab SaaS (GitLab.com), your CI jobs automatically run on runners in the GitLab Runner Cloud.\\n No configuration is required. Your jobs can run on:\\n \\n - [Linux runners](build_cloud/linux_build_cloud.md).\\n\"},{\"old_path\":\"doc/ci/runners/runner_cloud/linux_runner_cloud.md\",\"new_path\":\"doc/ci/runners/runner_cloud/linux_runner_cloud.md\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -35,7 +35,7 @@ These runners share a [distributed cache](https://docs.gitlab.com/runner/configu\\n \\n ## Pre-clone script\\n \\n-Build Cloud runners for Linux provide a way to run commands in a CI\\n+Cloud runners for Linux provide a way to run commands in a CI\\n job before the runner attempts to run `git init` and `git fetch` to\\n download a GitLab repository. The\\n [`pre_clone_script`](https://docs.gitlab.com/runner/configuration/advanced-configuration.html#the-runners-section)\\n\"},{\"old_path\":\"doc/ci/runners/runner_cloud/macos/environment.md\",\"new_path\":\"doc/ci/runners/runner_cloud/macos/environment.md\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -4,9 +4,9 @@ group: Runner\\n info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments\\n ---\\n \\n-# VM instances and images for Build Cloud for macOS **(FREE)**\\n+# VM instances and images for Runner Cloud for macOS **(FREE)**\\n \\n-When you use the Build Cloud for macOS:\\n+When you use the Runner Cloud for macOS:\\n \\n - Each of your jobs runs in a newly provisioned VM, which is dedicated to the specific job.\\n - The VM is active only for the duration of the job and immediately deleted.\\n\"},{\"old_path\":\"doc/ci/runners/runner_cloud/macos_runner_cloud.md\",\"new_path\":\"doc/ci/runners/runner_cloud/macos_runner_cloud.md\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -11,12 +11,12 @@ Use these runners to build, test, and deploy apps for the Apple ecosystem (macOS\\n of all the capabilities of the GitLab single DevOps platform and not have to manage or operate a\\n build environment.\\n \\n-Build Cloud runners for macOS are in [Beta](https://about.gitlab.com/handbook/product/gitlab-the-product/#beta)\\n+Cloud runners for macOS are in [Beta](https://about.gitlab.com/handbook/product/gitlab-the-product/#beta)\\n and shouldn't be relied upon for mission-critical production jobs.\\n \\n ## Quickstart\\n \\n-To start using Build Cloud for macOS Beta, you must submit an access request [issue](https://gitlab.com/gitlab-com/macos-buildcloud-runners-beta/-/issues/new?issuable_template=beta_access_request). After your\\n+To start using Runner Cloud for macOS Beta, you must submit an access request [issue](https://gitlab.com/gitlab-com/macos-buildcloud-runners-beta/-/issues/new?issuable_template=beta_access_request). After your\\n access has been granted and your build environment configured, you must configure your\\n `.gitlab-ci.yml` pipeline file:\\n \\n\"},{\"old_path\":\"doc/ci/yaml/includes.md\",\"new_path\":\"doc/ci/yaml/includes.md\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -69,7 +69,7 @@ You can include an array of configuration files:\\n \\n ## Use `default` configuration from an included configuration file\\n \\n-You can define a [`default`](index.md#custom-default-keyword-values) section in a\\n+You can define a [`default`](index.md#default) section in a\\n configuration file. When you use a `default` section with the `include` keyword, the defaults apply to\\n all jobs in the pipeline.\\n \\n\"},{\"old_path\":\"doc/ci/yaml/index.md\",\"new_path\":\"doc/ci/yaml/index.md\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"\"},{\"old_path\":\"doc/ci/yaml/script.md\",\"new_path\":\"doc/ci/yaml/script.md\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -62,7 +62,7 @@ job:\\n ## Set a default `before_script` or `after_script` for all jobs\\n \\n You can use [`before_script`](index.md#before_script) and [`after_script`](index.md#after_script)\\n-with [`default`](index.md#custom-default-keyword-values):\\n+with [`default`](index.md#default):\\n \\n - Use `before_script` with `default` to define a default array of commands that\\n   should run before the `script` commands in all jobs.\\n\"},{\"old_path\":\"doc/development/avoiding_downtime_in_migrations.md\",\"new_path\":\"doc/development/avoiding_downtime_in_migrations.md\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -377,7 +377,181 @@ ensures that no downtime is needed.\\n \\n This operation does not require downtime.\\n \\n-## Data Migrations\\n+## Migrating `integer` primary keys to `bigint`\\n+\\n+To [prevent the overflow risk](https://gitlab.com/groups/gitlab-org/-/epics/4785) for some tables\\n+with `integer` primary key (PK), we have to migrate their PK to `bigint`. The process to do this\\n+without downtime and causing too much load on the database is described below.\\n+\\n+### Initialize the conversion and start migrating existing data (release N)\\n+\\n+To start the process, add a regular migration to create the new `bigint` columns. Use the provided\\n+`initialize_conversion_of_integer_to_bigint` helper. The helper also creates a database trigger\\n+to keep in sync both columns for any new records ([see an example](https://gitlab.com/gitlab-org/gitlab/-/blob/41fbe34a4725a4e357a83fda66afb382828767b2/db/migrate/20210608072312_initialize_conversion_of_ci_stages_to_bigint.rb)):\\n+\\n+```ruby\\n+class InitializeConversionOfCiStagesToBigint \\u003c ActiveRecord::Migration[6.1]\\n+  include Gitlab::Database::MigrationHelpers\\n+\\n+  TABLE = :ci_stages\\n+  COLUMNS = %i(id)\\n+\\n+  def up\\n+    initialize_conversion_of_integer_to_bigint(TABLE, COLUMNS)\\n+  end\\n+\\n+  def down\\n+    revert_initialize_conversion_of_integer_to_bigint(TABLE, COLUMNS)\\n+  end\\n+end\\n+```\\n+\\n+Ignore the new `bigint` columns:\\n+\\n+```ruby\\n+module Ci\\n+  class Stage \\u003c Ci::ApplicationRecord\\n+    include IgnorableColumns\\n+    ignore_column :id_convert_to_bigint, remove_with: '14.2', remove_after: '2021-08-22'\\n+  end\\n+```\\n+\\n+To migrate existing data, we introduced new type of _batched background migrations_.\\n+Unlike the classic background migrations, built on top of Sidekiq, batched background migrations\\n+don't have to enqueue and schedule all the background jobs at the beginning.\\n+They also have other advantages, like automatic tuning of the batch size, better progress visibility,\\n+and collecting metrics. To start the process, use the provided `backfill_conversion_of_integer_to_bigint`\\n+helper ([example](https://gitlab.com/gitlab-org/gitlab/-/blob/41fbe34a4725a4e357a83fda66afb382828767b2/db/migrate/20210608072346_backfill_ci_stages_for_bigint_conversion.rb)):\\n+\\n+```ruby\\n+class BackfillCiStagesForBigintConversion \\u003c ActiveRecord::Migration[6.1]\\n+  include Gitlab::Database::MigrationHelpers\\n+\\n+  TABLE = :ci_stages\\n+  COLUMNS = %i(id)\\n+\\n+  def up\\n+    backfill_conversion_of_integer_to_bigint(TABLE, COLUMNS)\\n+  end\\n+\\n+  def down\\n+    revert_backfill_conversion_of_integer_to_bigint(TABLE, COLUMNS)\\n+  end\\n+end\\n+```\\n+\\n+### Monitor the background migration\\n+\\n+Check how the migration is performing while it's running. Multiple ways to do this are described below.\\n+\\n+#### High-level status of batched background migrations\\n+\\n+See how to [check the status of batched background migrations](../update/index.md#checking-for-background-migrations-before-upgrading).\\n+\\n+#### Query the database\\n+\\n+We can query the related database tables directly. Requires access to read-only replica.\\n+Example queries:\\n+\\n+```sql\\n+-- Get details for batched background migration for given table\\n+SELECT * FROM batched_background_migrations WHERE table_name = 'namespaces'\\\\gx\\n+\\n+-- Get count of batched background migration jobs by status for given table\\n+SELECT\\n+  batched_background_migrations.id, batched_background_migration_jobs.status, COUNT(*)\\n+FROM\\n+  batched_background_migrations\\n+  JOIN batched_background_migration_jobs ON batched_background_migrations.id = batched_background_migration_jobs.batched_background_migration_id\\n+WHERE\\n+  table_name = 'namespaces'\\n+GROUP BY\\n+  batched_background_migrations.id, batched_background_migration_jobs.status;\\n+\\n+-- Batched background migration progress for given table (based on estimated total number of tuples)\\n+SELECT\\n+  m.table_name,\\n+  LEAST(100 * sum(j.batch_size) / pg_class.reltuples, 100) AS percentage_complete\\n+FROM\\n+  batched_background_migrations m\\n+  JOIN batched_background_migration_jobs j ON j.batched_background_migration_id = m.id\\n+  JOIN pg_class ON pg_class.relname = m.table_name\\n+WHERE\\n+  j.status = 3 AND m.table_name = 'namespaces'\\n+GROUP BY m.id, pg_class.reltuples;\\n+```\\n+\\n+#### Sidekiq logs\\n+\\n+We can also use the Sidekiq logs to monitor the worker that executes the batched background\\n+migrations:\\n+\\n+1. Sign in to [Kibana](https://log.gprd.gitlab.net) with a `@gitlab.com` email address.\\n+1. Change the index pattern to `pubsub-sidekiq-inf-gprd*`.\\n+1. Add filter for `json.queue: cronjob:database_batched_background_migration`.\\n+\\n+#### PostgerSQL slow queries log\\n+\\n+Slow queries log keeps track of low queries that took above 1 second to execute. To see them\\n+for batched background migration:\\n+\\n+1. Sign in to [Kibana](https://log.gprd.gitlab.net) with a `@gitlab.com` email address.\\n+1. Change the index pattern to `pubsub-postgres-inf-gprd*`.\\n+1. Add filter for `json.endpoint_id.keyword: Database::BatchedBackgroundMigrationWorker`.\\n+1. Optional. To see only updates, add a filter for `json.command_tag.keyword: UPDATE`.\\n+1. Optional. To see only failed statements, add a filter for `json.error_severiry.keyword: ERROR`.\\n+1. Optional. Add a filter by table name.\\n+\\n+#### Grafana dashboards\\n+\\n+To monitor the health of the database, use these additional metrics:\\n+\\n+- [PostgreSQL Tuple Statistics](https://dashboards.gitlab.net/d/000000167/postgresql-tuple-statistics?orgId=1\\u0026refresh=1m): if you see high rate of updates for the tables being actively converted, or increasing percentage of dead tuples for this table, it might mean that autovacuum cannot keep up.\\n+- [PostgreSQL Overview](https://dashboards.gitlab.net/d/000000144/postgresql-overview?orgId=1): if you see high system usage or transactions per second (TPS) on the primary database server, it might mean that the migration is causing problems.\\n+\\n+### Prometheus metrics\\n+\\n+Number of [metrics](https://gitlab.com/gitlab-org/gitlab/-/blob/294a92484ce4611f660439aa48eee4dfec2230b5/lib/gitlab/database/background_migration/batched_migration_wrapper.rb#L90-128)\\n+for each batched background migration are published to Prometheus. These metrics can be searched for and\\n+visualized in Thanos ([see an example](https://thanos-query.ops.gitlab.net/graph?g0.expr=sum%20(rate(batched_migration_job_updated_tuples_total%7Benv%3D%22gprd%22%7D%5B5m%5D))%20by%20(migration_id)%20\\u0026g0.tab=0\\u0026g0.stacked=0\\u0026g0.range_input=3d\\u0026g0.max_source_resolution=0s\\u0026g0.deduplicate=1\\u0026g0.partial_response=0\\u0026g0.store_matches=%5B%5D\\u0026g0.end_input=2021-06-13%2012%3A18%3A24\\u0026g0.moment_input=2021-06-13%2012%3A18%3A24)).\\n+\\n+### Swap the columns (release N + 1)\\n+\\n+After the background is completed and the new `bigint` columns are populated for all records, we can\\n+swap the columns. Swapping is done with post-deployment migration. The exact process depends on the\\n+table being converted, but in general it's done in the following steps:\\n+\\n+1. Using the provided `ensure_batched_background_migration_is_finished` helper, make sure the batched\\n+migration has finished ([see an example](https://gitlab.com/gitlab-org/gitlab/-/blob/41fbe34a4725a4e357a83fda66afb382828767b2/db/post_migrate/20210707210916_finalize_ci_stages_bigint_conversion.rb#L13-18)).\\n+If the migration has not completed, the subsequent steps fail anyway. By checking in advance we\\n+aim to have more helpful error message.\\n+1. Create indexes using the `bigint` columns that match the existing indexes using the `integer`\\n+column ([see an example](https://gitlab.com/gitlab-org/gitlab/-/blob/41fbe34a4725a4e357a83fda66afb382828767b2/db/post_migrate/20210707210916_finalize_ci_stages_bigint_conversion.rb#L28-34)).\\n+1. Create foreign keys (FK) using the `bigint` columns that match the existing FKs using the\\n+`integer` column. Do this both for FK referencing other tables, and FKs that reference the table\\n+that is being migrated ([see an example](https://gitlab.com/gitlab-org/gitlab/-/blob/41fbe34a4725a4e357a83fda66afb382828767b2/db/post_migrate/20210707210916_finalize_ci_stages_bigint_conversion.rb#L36-43)).\\n+1. Inside a transaction, swap the columns:\\n+    1. Lock the tables involved. To reduce the chance of hitting a deadlock, we recommended to do this in parent to child order ([see an example](https://gitlab.com/gitlab-org/gitlab/-/blob/41fbe34a4725a4e357a83fda66afb382828767b2/db/post_migrate/20210707210916_finalize_ci_stages_bigint_conversion.rb#L47)).\\n+    1. Rename the columns to swap names ([see an example](https://gitlab.com/gitlab-org/gitlab/-/blob/41fbe34a4725a4e357a83fda66afb382828767b2/db/post_migrate/20210707210916_finalize_ci_stages_bigint_conversion.rb#L49-54))\\n+    1. Reset the trigger function ([see an example](https://gitlab.com/gitlab-org/gitlab/-/blob/41fbe34a4725a4e357a83fda66afb382828767b2/db/post_migrate/20210707210916_finalize_ci_stages_bigint_conversion.rb#L56-57)).\\n+    1. Swap the defaults ([see an example](https://gitlab.com/gitlab-org/gitlab/-/blob/41fbe34a4725a4e357a83fda66afb382828767b2/db/post_migrate/20210707210916_finalize_ci_stages_bigint_conversion.rb#L59-62)).\\n+    1. Swap the PK constraint (if any) ([see an example](https://gitlab.com/gitlab-org/gitlab/-/blob/41fbe34a4725a4e357a83fda66afb382828767b2/db/post_migrate/20210707210916_finalize_ci_stages_bigint_conversion.rb#L64-68)).\\n+    1. Remove old indexes and rename new ones ([see an example](https://gitlab.com/gitlab-org/gitlab/-/blob/41fbe34a4725a4e357a83fda66afb382828767b2/db/post_migrate/20210707210916_finalize_ci_stages_bigint_conversion.rb#L70-72)).\\n+    1. Remove old FKs (if still present) and rename new ones ([see an example](https://gitlab.com/gitlab-org/gitlab/-/blob/41fbe34a4725a4e357a83fda66afb382828767b2/db/post_migrate/20210707210916_finalize_ci_stages_bigint_conversion.rb#L74)).\\n+\\n+See example [merge request](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/66088), and [migration](https://gitlab.com/gitlab-org/gitlab/-/blob/41fbe34a4725a4e357a83fda66afb382828767b2/db/post_migrate/20210707210916_finalize_ci_stages_bigint_conversion.rb).\\n+\\n+### Remove the trigger and old `integer` columns (release N + 2)\\n+\\n+Using post-deployment migration and the provided `cleanup_conversion_of_integer_to_bigint` helper,\\n+drop the database trigger and the old `integer` columns ([see an example](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/69714)).\\n+\\n+### Remove ignore rules (release N + 3)\\n+\\n+In the next release after the columns were dropped, remove the ignore rules as we do not need them\\n+anymore ([see an example](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/71161)).\\n+\\n+## Data migrations\\n \\n Data migrations can be tricky. The usual approach to migrate data is to take a 3\\n step approach:\\n\"},{\"old_path\":\"doc/development/cicd/templates.md\",\"new_path\":\"doc/development/cicd/templates.md\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -60,7 +60,7 @@ don't have any other `.gitlab-ci.yml` files.\\n When authoring pipeline templates:\\n \\n - Place any [global keywords](../../ci/yaml/index.md#global-keywords) like `image`\\n-  or `before_script` in a [`default`](../../ci/yaml/index.md#custom-default-keyword-values)\\n+  or `before_script` in a [`default`](../../ci/yaml/index.md#default)\\n   section at the top of the template.\\n - Note clearly in the [code comments](#explain-the-template-with-comments) if the\\n   template is designed to be used with the `includes` keyword in an existing\\n@@ -77,7 +77,7 @@ other pipeline configuration.\\n \\n When authoring job templates:\\n \\n-- Do not use [global](../../ci/yaml/index.md#global-keywords) or [`default`](../../ci/yaml/index.md#custom-default-keyword-values)\\n+- Do not use [global](../../ci/yaml/index.md#global-keywords) or [`default`](../../ci/yaml/index.md#default)\\n   keywords. When a root `.gitlab-ci.yml` includes a template, global or default keywords\\n   might be overridden and cause unexpected behavior. If a job template requires a\\n   specific stage, explain in the code comments that users must manually add the stage\\n\"},{\"old_path\":\"doc/development/migration_style_guide.md\",\"new_path\":\"doc/development/migration_style_guide.md\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -135,6 +135,7 @@ various database operations, such as:\\n - [dropping and renaming columns](avoiding_downtime_in_migrations.md#dropping-columns)\\n - [changing column constraints and types](avoiding_downtime_in_migrations.md#changing-column-constraints)\\n - [adding and dropping indexes, tables, and foreign keys](avoiding_downtime_in_migrations.md#adding-indexes)\\n+- [migrating `integer` primary keys to `bigint`](avoiding_downtime_in_migrations.md#adding-indexes)\\n \\n and explains how to perform them without requiring downtime.\\n \\n\"},{\"old_path\":\"doc/development/pipelines.md\",\"new_path\":\"doc/development/pipelines.md\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -166,6 +166,13 @@ Our current RSpec tests parallelization setup is as follows:\\n \\n After that, the next pipeline uses the up-to-date `knapsack/report-master.json` file.\\n \\n+### Flaky tests\\n+\\n+Tests that are [known to be flaky](testing_guide/flaky_tests.md#automatic-retries-and-flaky-tests-detection) are:\\n+\\n+- skipped if the `$SKIP_FLAKY_TESTS_AUTOMATICALLY` variable is set to `true` (`false` by default)\\n+- run if `$SKIP_FLAKY_TESTS_AUTOMATICALLY` variable is not set to `true` or if the `~\\\"pipeline:run-flaky-tests\\\"` label is set on the MR\\n+\\n ### Monitoring\\n \\n The GitLab test suite is [monitored](performance.md#rspec-profiling) for the `main` branch, and any branch\\n\"},{\"old_path\":\"doc/install/requirements.md\",\"new_path\":\"doc/install/requirements.md\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -302,7 +302,7 @@ The GitLab Runner server requirements depend on:\\n \\n Since the nature of the jobs varies for each use case, you need to experiment by adjusting the job concurrency to get the optimum setting.\\n \\n-For reference, the GitLab.com Build Cloud [auto-scaling runner for Linux](../ci/runners/build_cloud/linux_build_cloud.md) is configured so that a **single job** runs in a **single instance** with:\\n+For reference, the GitLab.com Runner Cloud [auto-scaling runner for Linux](../ci/runners/build_cloud/linux_build_cloud.md) is configured so that a **single job** runs in a **single instance** with:\\n \\n - 1 vCPU.\\n - 3.75 GB of RAM.\\n\"},{\"old_path\":\"doc/integration/jira/index.md\",\"new_path\":\"doc/integration/jira/index.md\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -10,7 +10,7 @@ If your organization uses [Jira](https://www.atlassian.com/software/jira) issues\\n you can [migrate your issues from Jira](../../user/project/import/jira.md) and work\\n exclusively in GitLab. However, if you'd like to continue to use Jira, you can\\n integrate it with GitLab. GitLab offers two types of Jira integrations, and you\\n-can use one or both depending on the capabilities you need. It is recommended that you enable both.\\n+can use one or both depending on the capabilities you need. We recommend you enable both.\\n \\n ## Compare integrations\\n \\n@@ -41,7 +41,7 @@ or the Jira DVCS (distributed version control system) connector,\\n \\n ### Direct feature comparison\\n \\n-| Capability | Jira integration | Jira Development panel integration |\\n+| Capability | Jira integration | Jira development panel integration |\\n |-|-|-|\\n | Mention a Jira issue ID in a GitLab commit or merge request, and a link to the Jira issue is created. | Yes. | No. |\\n | Mention a Jira issue ID in GitLab and the Jira issue shows the GitLab issue or merge request. | Yes. A Jira comment with the GitLab issue or MR title links to GitLab. The first mention is also added to the Jira issue under **Web links**. | Yes, in the issue's [development panel](https://support.atlassian.com/jira-software-cloud/docs/view-development-information-for-an-issue/). |\\n@@ -55,11 +55,11 @@ or the Jira DVCS (distributed version control system) connector,\\n \\n ## Authentication in Jira\\n \\n-The process for configuring Jira depends on whether you host Jira on your own server or on\\n+The authentication method in Jira depends on whether you host Jira on your own server or on\\n [Atlassian cloud](https://www.atlassian.com/cloud):\\n \\n - **Jira Server** supports basic authentication. When connecting, a **username and password** are\\n-  required. Connecting to Jira Server via CAS is not possible. For more information, read\\n+  required. Connecting to Jira Server using the Central Authentication Service (CAS) is not possible. For more information, read\\n   how to [set up a user in Jira Server](jira_server_configuration.md).\\n - **Jira on Atlassian cloud** supports authentication through an API token. When connecting to Jira on\\n   Atlassian cloud, an email and API token are required. For more information, read\\n@@ -72,11 +72,16 @@ actions in GitLab issues and merge requests linked to a Jira issue leak informat\\n about the private project to non-administrator Jira users. If your installation uses Jira Cloud,\\n you can use the [GitLab.com for Jira Cloud app](connect-app.md) to avoid this risk.\\n \\n+## Third-party Jira integrations\\n+\\n+Developers have built several third-party Jira integrations for GitLab that are\\n+listed on the [Atlassian Marketplace](https://marketplace.atlassian.com/search?product=jira\\u0026query=gitlab).\\n+\\n ## Troubleshooting\\n \\n If these features do not work as expected, it is likely due to a problem with the way the integration settings were configured.\\n \\n-### GitLab is unable to comment on a Jira issue\\n+### GitLab cannot comment on a Jira issue\\n \\n If GitLab cannot comment on Jira issues, make sure the Jira user you\\n set up for the integration has permission to:\\n@@ -86,14 +91,16 @@ set up for the integration has permission to:\\n \\n Jira issue references and update comments do not work if the GitLab issue tracker is disabled.\\n \\n-### GitLab is unable to close a Jira issue\\n+### GitLab cannot close a Jira issue\\n+\\n+If GitLab cannot close a Jira issue:\\n \\n-Make sure the `Transition ID` you set in the Jira settings matches the one\\n-your project needs to close an issue.\\n+- Make sure the `Transition ID` you set in the Jira settings matches the one\\n+  your project needs to close an issue.\\n \\n-Make sure that the Jira issue is not already marked as resolved. That is,\\n-the Jira issue resolution field is not set, and the issue is not struck through in\\n-Jira lists.\\n+- Make sure the Jira issue is not already marked as resolved:\\n+  - Check the Jira issue resolution field is not set.\\n+  - Check the issue is not struck through in Jira lists.\\n \\n ### CAPTCHA\\n \\n@@ -104,8 +111,3 @@ authenticate with the Jira site.\\n \\n To fix this error, sign in to your Jira instance\\n and complete the CAPTCHA.\\n-\\n-## Third-party Jira integrations\\n-\\n-Developers have built several third-party Jira integrations for GitLab that are\\n-listed on the [Atlassian Marketplace](https://marketplace.atlassian.com/search?product=jira\\u0026query=gitlab).\\n\"},{\"old_path\":\"doc/user/admin_area/index.md\",\"new_path\":\"doc/user/admin_area/index.md\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -257,6 +257,10 @@ To edit a topic, select **Edit** in that topic's row.\\n To search for topics by name, enter your criteria in the search box. The topic search is case\\n insensitive, and applies partial matching.\\n \\n+NOTE:\\n+Topics are public and visible to everyone, but assignments to projects are not.\\n+Do not include sensitive information in the name or description of a topic.\\n+\\n ### Administering Jobs\\n \\n You can administer all jobs in the GitLab instance from the Admin Area's Jobs page.\\n\"},{\"old_path\":\"doc/user/clusters/agent/install/index.md\",\"new_path\":\"doc/user/clusters/agent/install/index.md\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -85,6 +85,7 @@ the Agent in subsequent steps.\\n \\n In GitLab:\\n \\n+1. Ensure that [GitLab CI/CD is enabled in your project](../../../../ci/enable_or_disable_ci.md#enable-cicd-in-a-project).\\n 1. From your project's sidebar, select **Infrastructure \\u003e Kubernetes clusters**.\\n 1. Select the **GitLab Agent managed clusters** tab.\\n 1. Select **Integrate with the GitLab Agent**.\\n\"},{\"old_path\":\"doc/user/clusters/agent/repository.md\",\"new_path\":\"doc/user/clusters/agent/repository.md\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -173,7 +173,7 @@ To grant projects access to the Agent through the [CI/CD Tunnel](ci_cd_tunnel.md\\n 1. Go to your Agent's configuration project.\\n 1. Edit the Agent's configuration file (`config.yaml`).\\n 1. Add the `projects` attribute into `ci_access`.\\n-1. Identify the new project through its path:\\n+1. Identify the project through its path:\\n \\n    ```yaml\\n    ci_access:\\n\"},{\"old_path\":\"doc/user/clusters/management_project.md\",\"new_path\":\"doc/user/clusters/management_project.md\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -12,6 +12,9 @@ info: To determine the technical writer assigned to the Stage/Group associated w\\n WARNING:\\n This feature was [deprecated](https://gitlab.com/groups/gitlab-org/configure/-/epics/8) in GitLab 14.5.\\n \\n+To manage cluster applications, use the [GitLab Kubernetes Agent](agent/index.md)\\n+with the [Cluster Management Project Template](management_project_template.md).\\n+\\n A project can be designated as the management project for a cluster.\\n A management project can be used to run deployment jobs with\\n Kubernetes\\n@@ -41,8 +44,7 @@ Management projects are restricted to the following:\\n To use a cluster management project to manage your cluster:\\n \\n 1. Create a new project to serve as the cluster management project\\n-for your cluster. We recommend that you\\n-[create this project based on the Cluster Management project template](management_project_template.md#create-a-new-project-based-on-the-cluster-management-template).\\n+for your cluster.\\n 1. [Associate the cluster with the management project](#associate-the-cluster-management-project-with-the-cluster).\\n 1. [Configure your cluster's pipelines](#configuring-your-pipeline).\\n 1. [Set the environment scope](#setting-the-environment-scope).\\n\"},{\"old_path\":\"doc/user/clusters/management_project_template.md\",\"new_path\":\"doc/user/clusters/management_project_template.md\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -4,15 +4,17 @@ group: Configure\\n info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments\\n ---\\n \\n-# Cluster Management project template **(FREE)**\\n+# Manage cluster applications **(FREE)**\\n \\n \\u003e - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/25318) in GitLab 12.10 with Helmfile support via Helm v2.\\n \\u003e - Helm v2 support was [dropped](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/63577) in GitLab 14.0. Use Helm v3 instead.\\n+\\u003e - [Migrated](https://gitlab.com/gitlab-org/project-templates/cluster-management/-/merge_requests/24) to the GitLab Kubernetes Agent in GitLab 14.5.\\n \\n-With a [cluster management project](management_project.md) you can manage\\n-your cluster's deployment and applications through a repository in GitLab.\\n+Use a repository to install, manage, and deploy clusters applications through code.\\n \\n-The Cluster Management project template provides you a baseline to get\\n+## Cluster Management Project Template\\n+\\n+The Cluster Management Project Template provides you a baseline to get\\n started and flexibility to customize your project to your cluster's needs.\\n For instance, you can:\\n \\n@@ -21,49 +23,78 @@ For instance, you can:\\n - Remove the built-in cluster applications you don't need.\\n - Add other cluster applications using the same structure as the ones already available.\\n \\n-The template contains the following [components](#available-components):\\n+The template contains the following [components](#configure-the-available-components):\\n \\n-- A pre-configured GitLab CI/CD file so that you can configure deployment pipelines.\\n+- A pre-configured GitLab CI/CD file so that you can configure CI/CD pipelines using the [CI/CD Tunnel](agent/ci_cd_tunnel.md).\\n - A pre-configured [Helmfile](https://github.com/roboll/helmfile) so that\\n you can manage cluster applications with [Helm v3](https://helm.sh/).\\n - An `applications` directory with a `helmfile.yaml` configured for each\\n application available in the template.\\n \\n-WARNING:\\n-If you used [GitLab Managed Apps](applications.md) to manage your\\n-cluster from GitLab, see how to [migrate from GitLab Managed Apps](migrating_from_gma_to_project_template.md) to the Cluster Management\\n-project.\\n+## Use the Kubernetes Agent with the Cluster Management Project Template\\n+\\n+To use a new project created from the Cluster Management Project Template\\n+with a cluster connected to GitLab through the [GitLab Kubernetes Agent](agent/index.md),\\n+you have two options:\\n+\\n+- [Use one single project](#single-project) to configure the Agent and manage cluster applications.\\n+- [Use separate projects](#separate-projects) - one to configure the Agent and another to manage cluster applications.\\n+\\n+### Single project\\n+\\n+This setup is particularly useful when you haven't connected your cluster\\n+to GitLab through the Agent yet and you want to use the Cluster Management\\n+Project Template to manage cluster applications.\\n+\\n+To use one single project to configure the Agent and to manage cluster applications:\\n+\\n+1. [Create a new project from the Cluster Management Project Template](#create-a-new-project-based-on-the-cluster-management-template).\\n+1. Configure the new project as the [Agent's configuration repository](agent/repository.md)\\n+(where the Agent is registered and its `config.yaml` is stored).\\n+1. From your project's settings, add a [new environment variable](../../ci/variables/index.md#add-a-cicd-variable-to-a-project) `$KUBE_CONTEXT` and set it to `path/to/agent-configuration-project:your-agent-name`.\\n+1. [Configure the components](#configure-the-available-components) inherited from the template.\\n+\\n+### Separate projects\\n+\\n+This setup is particularly useful **when you already have a cluster** connected\\n+to GitLab through the Agent and want to use the Cluster Management\\n+Project Template to manage cluster applications.\\n \\n-## Set up the management project from the Cluster Management project template\\n+To use one project to configure the Agent (\\\"project A\\\") and another project to\\n+manage cluster applications (\\\"project B\\\"), follow the steps below.\\n \\n-To set up your cluster's management project off of the Cluster Management project template:\\n+We assume that you already have a cluster connected through the Agent and\\n+[configured through the Agent's configuration repository](agent/repository.md)\\n+(\\\"project A\\\").\\n \\n-1. [Create a new project based on the Cluster Management template](#create-a-new-project-based-on-the-cluster-management-template).\\n-1. [Associate the cluster management project with your cluster](management_project.md#associate-the-cluster-management-project-with-the-cluster).\\n-1. Use the [available components](#available-components) to manage your cluster.\\n+1. [Create a new project from the Cluster Management Project Template](#create-a-new-project-based-on-the-cluster-management-template).\\n+This new project is \\\"project B\\\".\\n+1. In your \\\"project A\\\", [grant the Agent access to the new project (B) through the CI/CD Tunnel](agent/repository.md#authorize-projects-to-use-an-agent).\\n+1. From the \\\"project's B\\\" settings, add a [new environment variable](../../ci/variables/index.md#add-a-cicd-variable-to-a-project) `$KUBE_CONTEXT` and set it to `path/to/agent-configuration-project:your-agent-name`.\\n+1. In \\\"project B\\\", [configure the components](#configure-the-available-components) inherited from the template.\\n \\n-### Create a new project based on the Cluster Management template\\n+## Create a new project based on the Cluster Management Template\\n \\n To get started, create a new project based on the Cluster Management\\n project template to use as a cluster management project.\\n \\n-You can either create the [new project](../project/working_with_projects.md#create-a-project)\\n-from the template or import the project from the URL. Importing\\n-the project is useful if you are using a GitLab self-managed\\n-instance that may not have the latest version of the template.\\n+You can either create the new project from the template or import the\\n+project from the URL. Importing the project is useful if you are using\\n+a GitLab self-managed instance that may not have the latest version of\\n+the template.\\n \\n-To create the new project:\\n+To [create the new project](../project/working_with_projects.md#create-a-project):\\n \\n - From the template: select the **GitLab Cluster Management** project template.\\n - Importing from the URL: use `https://gitlab.com/gitlab-org/project-templates/cluster-management.git`.\\n \\n-## Available components\\n+## Configure the available components\\n \\n-Use the available components to configure your cluster:\\n+Use the available components to configure your cluster applications:\\n \\n-- [A `.gitlab-ci.yml` file](#the-gitlab-ciyml-file).\\n-- [A main `helmfile.yml` file](#the-main-helmfileyml-file).\\n-- [A directory with built-in applications](#built-in-applications).\\n+- [The `.gitlab-ci.yml` file](#the-gitlab-ciyml-file).\\n+- [The main `helmfile.yml` file](#the-main-helmfileyml-file).\\n+- [The directory with built-in applications](#built-in-applications).\\n \\n ### The `.gitlab-ci.yml` file\\n \\n@@ -107,7 +138,7 @@ The [built-in supported applications](https://gitlab.com/gitlab-org/project-temp\\n - [Sentry](../infrastructure/clusters/manage/management_project_applications/sentry.md)\\n - [Vault](../infrastructure/clusters/manage/management_project_applications/vault.md)\\n \\n-#### How to customize your applications\\n+#### Customize your applications\\n \\n Each app has an `applications/{app}/values.yaml` file (`applications/{app}/values.yaml.gotmpl` in case of GitLab Runner). This is the\\n place where you can define default values for your app's Helm chart. Some apps already have defaults\\n\"},{\"old_path\":\"doc/user/gitlab_com/index.md\",\"new_path\":\"doc/user/gitlab_com/index.md\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -198,11 +198,11 @@ The following limits apply for [Webhooks](../project/integrations/webhooks.md):\\n | [Number of webhooks](../../administration/instance_limits.md#number-of-webhooks) | `100` per project, `50` per group | `100` per project, `50` per group |\\n | Maximum payload size | 25 MB       | 25 MB   |\\n \\n-## Shared Build Cloud runners\\n+## Shared Runner Cloud runners\\n \\n GitLab has shared runners on GitLab.com that you can use to run your CI jobs.\\n \\n-For more information, see [GitLab Build Cloud runners](../../ci/runners/index.md).\\n+For more information, see [GitLab Runner Cloud runners](../../ci/runners/index.md).\\n \\n ## Sidekiq\\n \\n\"},{\"old_path\":\"doc/user/group/saml_sso/scim_setup.md\",\"new_path\":\"doc/user/group/saml_sso/scim_setup.md\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -35,9 +35,10 @@ The following identity providers are supported:\\n \\n Once [Group Single Sign-On](index.md) has been configured, we can:\\n \\n-1. Navigate to the group and click **Administration \\u003e SAML SSO**.\\n-1. Click on the **Generate a SCIM token** button.\\n-1. Save the token and URL so they can be used in the next step.\\n+1. On the top bar, select **Menu \\u003e Groups** and find your group.\\n+1. On the left sidebar, select **Settings \\u003e SAML SSO**.\\n+1. Select **Generate a SCIM token**.\\n+1. Save the token and URL for use in the next step.\\n \\n ![SCIM token configuration](img/scim_token_v13_3.png)\\n \\n@@ -50,14 +51,14 @@ Once [Group Single Sign-On](index.md) has been configured, we can:\\n \\n The SAML application that was created during [Single sign-on](index.md) setup for [Azure](https://docs.microsoft.com/en-us/azure/active-directory/manage-apps/view-applications-portal) now needs to be set up for SCIM.\\n \\n-1. Set up automatic provisioning and administrative credentials by following the\\n+1. Enable automatic provisioning and administrative credentials by following the\\n    [Azure's SCIM setup documentation](https://docs.microsoft.com/en-us/azure/active-directory/app-provisioning/use-scim-to-provision-users-and-groups#provisioning-users-and-groups-to-applications-that-support-scim).\\n \\n During this configuration, note the following:\\n \\n-- The `Tenant URL` and `secret token` are the ones retrieved in the\\n+- The `Tenant URL` and `secret token` are the items retrieved in the\\n   [previous step](#gitlab-configuration).\\n-- It is recommended to set a notification email and check the **Send an email notification when a failure occurs** checkbox.\\n+- We recommend setting a notification email and selecting the **Send an email notification when a failure occurs** checkbox.\\n - For mappings, we only leave `Synchronize Azure Active Directory Users to AppName` enabled.\\n   `Synchronize Azure Active Directory Groups to AppName` is usually disabled. However, this\\n   does not mean Azure AD users cannot be provisioned in groups. Leaving it enabled does not break\\n@@ -113,29 +114,27 @@ Make sure that the Okta setup matches our documentation exactly, especially the\\n configuration. Otherwise, the Okta SCIM app may not work properly.\\n \\n 1. Sign in to Okta.\\n-1. If you see an **Admin** button in the top right, click the button. This will\\n-   ensure you are in the Admin area.\\n+1. Ensure you are in the Admin section by selecting the **Admin** button located in the top right. The admin button is not visible from the admin page.\\n \\n    NOTE:\\n-   If you're using the Developer Console, click **Developer Console** in the top\\n-   bar and select **Classic UI**. Otherwise, you may not see the buttons described\\n-   in the following steps:\\n+   If you're using the Developer Console, select **Developer Console** in the top\\n+   bar and then select **Classic UI**. Otherwise, you may not see the buttons described in the following steps:\\n \\n-1. In the **Application** tab, click **Add Application**.\\n-1. Search for **GitLab**, find and click on the 'GitLab' application.\\n-1. On the GitLab application overview page, click **Add**.\\n+1. In the **Application** tab, select **Add Application**.\\n+1. Search for **GitLab**, find and select on the 'GitLab' application.\\n+1. On the GitLab application overview page, select **Add**.\\n 1. Under **Application Visibility** select both checkboxes. Currently the GitLab application does not support SAML authentication so the icon should not be shown to users.\\n-1. Click **Done** to finish adding the application.\\n-1. In the **Provisioning** tab, click **Configure API integration**.\\n+1. Select **Done** to finish adding the application.\\n+1. In the **Provisioning** tab, select **Configure API integration**.\\n 1. Select **Enable API integration**.\\n     - For **Base URL** enter the URL obtained from the GitLab SCIM configuration page\\n     - For **API Token** enter the SCIM token obtained from the GitLab SCIM configuration page\\n-1. Click 'Test API Credentials' to verify configuration.\\n-1. Click **Save** to apply the settings.\\n-1. After saving the API integration details, new settings tabs appear on the left. Choose **To App**.\\n-1. Click **Edit**.\\n-1. Check the box to **Enable** for both **Create Users** and **Deactivate Users**.\\n-1. Click **Save**.\\n+1. Select 'Test API Credentials' to verify configuration.\\n+1. Select **Save** to apply the settings.\\n+1. After saving the API integration details, new settings tabs appear on the left. Select **To App**.\\n+1. Select **Edit**.\\n+1. Select the **Enable** checkbox for both **Create Users** and **Deactivate Users**.\\n+1. Select **Save**.\\n 1. Assign users in the **Assignments** tab. Assigned users are created and\\n    managed in your GitLab group.\\n \\n@@ -147,8 +146,8 @@ application described above.\\n \\n ### OneLogin\\n \\n-OneLogin provides a \\\"GitLab (SaaS)\\\" app in their catalog, which includes a SCIM integration.\\n-As the app is developed by OneLogin, please reach out to OneLogin if you encounter issues.\\n+As the developers of this app, OneLogin provides a \\\"GitLab (SaaS)\\\" app in their catalog, which includes a SCIM integration.\\n+Please reach out to OneLogin if you encounter issues.\\n \\n ## User access and linking setup\\n \\n@@ -177,8 +176,8 @@ As long as [Group SAML](index.md) has been configured, existing GitLab.com users\\n - By following these steps:\\n \\n   1. Sign in to GitLab.com if needed.\\n-  1. Click on the GitLab app in the identity provider's dashboard or visit the **GitLab single sign-on URL**.\\n-  1. Click on the **Authorize** button.\\n+  1. In the identity provider's dashboard select the GitLab app or visit the **GitLab single sign-on URL**.\\n+  1. Select the **Authorize**.\\n \\n We recommend users do this prior to turning on sync, because while synchronization is active, there may be provisioning errors for existing users.\\n \\n\"},{\"old_path\":\"doc/user/infrastructure/clusters/index.md\",\"new_path\":\"doc/user/infrastructure/clusters/index.md\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -8,7 +8,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w\\n \\n To connect clusters to GitLab, use the [GitLab Kubernetes Agent](../../clusters/agent/index.md).\\n \\n-## Certificate-based Kubernetes integration (DEPRECATED) **(FREE)**\\n+## Certificate-based Kubernetes integration (DEPRECATED)\\n \\n WARNING:\\n In GitLab 14.5, the certificate-based method to connect Kubernetes clusters\\n\"},{\"old_path\":\"doc/user/infrastructure/clusters/manage/clusters_health.md\",\"new_path\":\"doc/user/infrastructure/clusters/manage/clusters_health.md\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -6,8 +6,8 @@ info: To determine the technical writer assigned to the Stage/Group associated w\\n \\n # Clusters health **(FREE)**\\n \\n-\\u003e - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/4701) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 10.6.\\n-\\u003e - [Moved](https://gitlab.com/gitlab-org/gitlab/-/issues/208224) to GitLab Free in 13.2.\\n+\\u003e - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/4701) in GitLab 10.6.\\n+\\u003e - [Moved](https://gitlab.com/gitlab-org/gitlab/-/issues/208224) from GitLab Ultimate to GitLab Free in 13.2.\\n \\n When [the Prometheus cluster integration is enabled](../../../clusters/integrations.md#prometheus-cluster-integration), GitLab monitors the cluster's health. At the top of the cluster settings page, CPU and Memory utilization is displayed, along with the total amount available. Keeping an eye on cluster resources can be important, if the cluster runs out of memory pods may be shutdown or fail to start.\\n \\n\"},{\"old_path\":\"doc/user/infrastructure/clusters/manage/management_project_applications/certmanager.md\",\"new_path\":\"doc/user/infrastructure/clusters/manage/management_project_applications/certmanager.md\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -4,7 +4,7 @@ group: Configure\\n info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments\\n ---\\n \\n-# Install cert-manager with a cluster management project\\n+# Install cert-manager with a cluster management project **(FREE)**\\n \\n \\u003e - [Introduced](https://gitlab.com/gitlab-org/project-templates/cluster-management/-/merge_requests/5) in GitLab 14.0.\\n \\u003e - Support for cert-manager v1.4 was [introduced](https://gitlab.com/gitlab-org/project-templates/cluster-management/-/merge_requests/69405) in GitLab 14.3.\\n\"},{\"old_path\":\"doc/user/infrastructure/clusters/manage/management_project_applications/ingress.md\",\"new_path\":\"doc/user/infrastructure/clusters/manage/management_project_applications/ingress.md\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -4,7 +4,7 @@ group: Configure\\n info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments\\n ---\\n \\n-# Install Ingress with a cluster management project\\n+# Install Ingress with a cluster management project **(FREE)**\\n \\n \\u003e [Introduced](https://gitlab.com/gitlab-org/project-templates/cluster-management/-/merge_requests/5) in GitLab 14.0.\\n \\n\"},{\"old_path\":\"doc/user/infrastructure/index.md\",\"new_path\":\"doc/user/infrastructure/index.md\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -29,13 +29,12 @@ Learn more about how GitLab can help you run [Infrastructure as Code](iac/index.\\n \\n ## Integrated Kubernetes management\\n \\n-GitLab has special integrations with Kubernetes to help you deploy, manage and troubleshoot\\n-third-party or custom applications in Kubernetes clusters. Auto DevOps provides a full\\n-DevSecOps pipeline by default targeted at Kubernetes based deployments. To support\\n-all the GitLab features, GitLab offers a cluster management project for easy onboarding.\\n-The deploy boards provide quick insights into your cluster, including pod logs tailing.\\n+The GitLab integration with Kubernetes helps you to install, configure, manage, deploy, and troubleshoot\\n+cluster applications. With the GitLab Kubernetes Agent, you can connect clusters behind a firewall,\\n+have real-time access to API endpoints, perform pull-beased or push-based deployments for production\\n+and non-production environments, and much more.\\n \\n-Learn more about the [GitLab integration with Kubernetes](clusters/index.md).\\n+Learn more about the [GitLab Kubernetes Agent](../clusters/agent/index.md).\\n \\n ## Runbooks in GitLab\\n \\n\"},{\"old_path\":\"doc/user/packages/composer_repository/index.md\",\"new_path\":\"doc/user/packages/composer_repository/index.md\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -149,7 +149,7 @@ Do not save unless you want to overwrite the existing CI/CD file.\\n When you publish:\\n \\n - The same package with different data, it overwrites the existing package.\\n-- The same package with the same data, a `404 Bad request` error occurs.\\n+- The same package with the same data, a `400 Bad request` error occurs.\\n \\n ## Install a Composer package\\n \\n\"},{\"old_path\":\"doc/user/project/settings/img/import_export_download_export.png\",\"new_path\":\"doc/user/project/settings/img/import_export_download_export.png\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"Binary files a/doc/user/project/settings/img/import_export_download_export.png and b/doc/user/project/settings/img/import_export_download_export.png differ\\n\"},{\"old_path\":\"doc/user/project/settings/img/import_export_export_button.png\",\"new_path\":\"doc/user/project/settings/img/import_export_export_button.png\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"Binary files a/doc/user/project/settings/img/import_export_export_button.png and b/doc/user/project/settings/img/import_export_export_button.png differ\\n\"},{\"old_path\":\"doc/user/project/settings/import_export.md\",\"new_path\":\"doc/user/project/settings/import_export.md\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -160,6 +160,8 @@ To export a project and its data, follow these steps:\\n \\n 1. Select **Settings** in the sidebar.\\n \\n+1. Scroll down and expand the **Advanced** section.\\n+\\n 1. Scroll down to find the **Export project** button:\\n \\n    ![Export button](img/import_export_export_button.png)\\n\"},{\"old_path\":\"lib/api/github/entities.rb\",\"new_path\":\"lib/api/github/entities.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -59,8 +59,8 @@ class RepoCommit \\u003c Grape::Entity\\n         expose :parents do |commit|\\n           commit.parent_ids.map { |id| { sha: id } }\\n         end\\n-        expose :files do |commit|\\n-          commit.diffs.diff_files.flat_map do |diff|\\n+        expose :files do |_commit, options|\\n+          options[:diff_files].flat_map do |diff|\\n             additions = diff.added_lines\\n             deletions = diff.removed_lines\\n \\n\"},{\"old_path\":\"lib/api/v3/github.rb\",\"new_path\":\"lib/api/v3/github.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -20,6 +20,9 @@ class Github \\u003c ::API::Base\\n       # Jira Server user agent format: Jira DVCS Connector/version\\n       JIRA_DVCS_CLOUD_USER_AGENT = 'Jira DVCS Connector Vertigo'\\n \\n+      GITALY_TIMEOUT_CACHE_KEY = 'api:v3:Gitaly-timeout-cache-key'\\n+      GITALY_TIMEOUT_CACHE_EXPIRY = 1.day\\n+\\n       include PaginationParams\\n \\n       feature_category :integrations\\n@@ -93,6 +96,32 @@ def find_notes(noteable)\\n           notes.select { |n| n.readable_by?(current_user) }\\n         end\\n         # rubocop: enable CodeReuse/ActiveRecord\\n+\\n+        # Returns an empty Array instead of the Commit diff files for a period\\n+        # of time after a Gitaly timeout, to mitigate frequent Gitaly timeouts\\n+        # for some Commit diffs.\\n+        def diff_files(commit)\\n+          return commit.diffs.diff_files unless Feature.enabled?(:api_v3_commits_skip_diff_files, commit.project)\\n+\\n+          cache_key = [\\n+            GITALY_TIMEOUT_CACHE_KEY,\\n+            commit.project.id,\\n+            commit.cache_key\\n+          ].join(':')\\n+\\n+          return [] if Rails.cache.read(cache_key).present?\\n+\\n+          begin\\n+            commit.diffs.diff_files\\n+          rescue GRPC::DeadlineExceeded =\\u003e error\\n+            # Gitaly fails to load diffs consistently for some commits. The other information\\n+            # is still valuable for Jira. So we skip the loading and respond with a 200 excluding diffs\\n+            # Remove this when https://gitlab.com/gitlab-org/gitaly/-/issues/3741 is fixed.\\n+            Rails.cache.write(cache_key, 1, expires_in: GITALY_TIMEOUT_CACHE_EXPIRY)\\n+            Gitlab::ErrorTracking.track_exception(error)\\n+            []\\n+          end\\n+        end\\n       end\\n \\n       resource :orgs do\\n@@ -228,10 +257,9 @@ def find_notes(noteable)\\n           user_project = find_project_with_access(params)\\n \\n           commit = user_project.commit(params[:sha])\\n-\\n           not_found! 'Commit' unless commit\\n \\n-          present commit, with: ::API::Github::Entities::RepoCommit\\n+          present commit, with: ::API::Github::Entities::RepoCommit, diff_files: diff_files(commit)\\n         end\\n       end\\n     end\\n\"},{\"old_path\":\"lib/gitlab/background_migration/migrate_requirements_to_work_items.rb\",\"new_path\":\"lib/gitlab/background_migration/migrate_requirements_to_work_items.rb\",\"a_mode\":\"0\",\"b_mode\":\"100644\",\"new_file\":true,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -0,0 +1,13 @@\\n+# frozen_string_literal: true\\n+\\n+module Gitlab\\n+  module BackgroundMigration\\n+    # No op on CE\\n+    class MigrateRequirementsToWorkItems\\n+      def perform(start_id, end_id)\\n+      end\\n+    end\\n+  end\\n+end\\n+\\n+Gitlab::BackgroundMigration::MigrateRequirementsToWorkItems.prepend_mod_with('Gitlab::BackgroundMigration::MigrateRequirementsToWorkItems')\\n\"},{\"old_path\":\"lib/gitlab/ci/artifact_file_reader.rb\",\"new_path\":\"lib/gitlab/ci/artifact_file_reader.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -45,14 +45,6 @@ def validate!\\n       end\\n \\n       def read_zip_file!(file_path)\\n-        if ::Feature.enabled?(:ci_new_artifact_file_reader, job.project, default_enabled: :yaml)\\n-          read_with_new_artifact_file_reader(file_path)\\n-        else\\n-          read_with_legacy_artifact_file_reader(file_path)\\n-        end\\n-      end\\n-\\n-      def read_with_new_artifact_file_reader(file_path)\\n         job.artifacts_file.use_open_file do |file|\\n           zip_file = Zip::File.new(file, false, true)\\n           entry = zip_file.find_entry(file_path)\\n@@ -69,25 +61,6 @@ def read_with_new_artifact_file_reader(file_path)\\n         end\\n       end\\n \\n-      def read_with_legacy_artifact_file_reader(file_path)\\n-        job.artifacts_file.use_file do |archive_path|\\n-          Zip::File.open(archive_path) do |zip_file|\\n-            entry = zip_file.find_entry(file_path)\\n-            unless entry\\n-              raise Error, \\\"Path `#{file_path}` does not exist inside the `#{job.name}` artifacts archive!\\\"\\n-            end\\n-\\n-            if entry.name_is_directory?\\n-              raise Error, \\\"Path `#{file_path}` was expected to be a file but it was a directory!\\\"\\n-            end\\n-\\n-            zip_file.get_input_stream(entry) do |is|\\n-              is.read\\n-            end\\n-          end\\n-        end\\n-      end\\n-\\n       def max_archive_size_in_mb\\n         ActiveSupport::NumberHelper.number_to_human_size(MAX_ARCHIVE_SIZE)\\n       end\\n\"},{\"old_path\":\"lib/gitlab/database/gitlab_schema.rb\",\"new_path\":\"lib/gitlab/database/gitlab_schema.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -8,17 +8,84 @@\\n # - gitlab_shared - defines a set of tables that are found on all databases (data accessed is dependent on connection)\\n # - gitlab_main / gitlab_ci - defines a set of tables that can only exist on a given database\\n #\\n+# Tables for the purpose of tests should be prefixed with `_test_my_table_name`\\n \\n module Gitlab\\n   module Database\\n     module GitlabSchema\\n+      # These tables are deleted/renamed, but still referenced by migrations.\\n+      # This is needed for now, but should be removed in the future\\n+      DELETED_TABLES = {\\n+        # main tables\\n+        'alerts_service_data' =\\u003e :gitlab_main,\\n+        'analytics_devops_adoption_segment_selections' =\\u003e :gitlab_main,\\n+        'analytics_repository_file_commits' =\\u003e :gitlab_main,\\n+        'analytics_repository_file_edits' =\\u003e :gitlab_main,\\n+        'analytics_repository_files' =\\u003e :gitlab_main,\\n+        'audit_events_archived' =\\u003e :gitlab_main,\\n+        'backup_labels' =\\u003e :gitlab_main,\\n+        'clusters_applications_fluentd' =\\u003e :gitlab_main,\\n+        'forked_project_links' =\\u003e :gitlab_main,\\n+        'issue_milestones' =\\u003e :gitlab_main,\\n+        'merge_request_milestones' =\\u003e :gitlab_main,\\n+        'namespace_onboarding_actions' =\\u003e :gitlab_main,\\n+        'services' =\\u003e :gitlab_main,\\n+        'terraform_state_registry' =\\u003e :gitlab_main,\\n+        'tmp_fingerprint_sha256_migration' =\\u003e :gitlab_main, # used by lib/gitlab/background_migration/migrate_fingerprint_sha256_within_keys.rb\\n+        'web_hook_logs_archived' =\\u003e :gitlab_main,\\n+        'vulnerability_export_registry' =\\u003e :gitlab_main,\\n+        'vulnerability_finding_fingerprints' =\\u003e :gitlab_main,\\n+        'vulnerability_export_verification_status' =\\u003e :gitlab_main,\\n+\\n+        # CI tables\\n+        'ci_build_trace_sections' =\\u003e :gitlab_ci,\\n+        'ci_build_trace_section_names' =\\u003e :gitlab_ci,\\n+        'ci_daily_report_results' =\\u003e :gitlab_ci,\\n+        'ci_test_cases' =\\u003e :gitlab_ci,\\n+        'ci_test_case_failures' =\\u003e :gitlab_ci,\\n+\\n+        # leftovers from early implementation of partitioning\\n+        'audit_events_part_5fc467ac26' =\\u003e :gitlab_main,\\n+        'web_hook_logs_part_0c5294f417' =\\u003e :gitlab_main\\n+      }.freeze\\n+\\n       def self.table_schemas(tables)\\n         tables.map { |table| table_schema(table) }.to_set\\n       end\\n \\n       def self.table_schema(name)\\n+        schema_name, table_name = name.split('.', 2) # Strip schema name like: `public.`\\n+\\n+        # Most of names do not have schemas, ensure that this is table\\n+        unless table_name\\n+          table_name = schema_name\\n+          schema_name = nil\\n+        end\\n+\\n+        # strip partition number of a form `loose_foreign_keys_deleted_records_1`\\n+        table_name.gsub!(/_[0-9]+$/, '')\\n+\\n+        # Tables that are properly mapped\\n+        if gitlab_schema = tables_to_schema[table_name]\\n+          return gitlab_schema\\n+        end\\n+\\n+        # Tables that are deleted, but we still need to reference them\\n+        if gitlab_schema = DELETED_TABLES[table_name]\\n+          return gitlab_schema\\n+        end\\n+\\n+        # All tables from `information_schema.` are `:gitlab_shared`\\n+        return :gitlab_shared if schema_name == 'information_schema'\\n+\\n+        # All tables that start with `_test_` are shared and ignored\\n+        return :gitlab_shared if table_name.start_with?('_test_')\\n+\\n+        # All `pg_` tables are marked as `shared`\\n+        return :gitlab_shared if table_name.start_with?('pg_')\\n+\\n         # When undefined it's best to return a unique name so that we don't incorrectly assume that 2 undefined schemas belong on the same database\\n-        tables_to_schema[name] || :\\\"undefined_#{name}\\\"\\n+        :\\\"undefined_#{table_name}\\\"\\n       end\\n \\n       def self.tables_to_schema\\n\"},{\"old_path\":\"lib/gitlab/database/load_balancing/configuration.rb\",\"new_path\":\"lib/gitlab/database/load_balancing/configuration.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -77,6 +77,10 @@ def primary_connection_specification_name\\n           (@primary_model || @model).connection_specification_name\\n         end\\n \\n+        def primary_db_config\\n+          (@primary_model || @model).connection_db_config\\n+        end\\n+\\n         def replica_db_config\\n           @model.connection_db_config\\n         end\\n\"},{\"old_path\":\"lib/gitlab/database/query_analyzer.rb\",\"new_path\":\"lib/gitlab/database/query_analyzer.rb\",\"a_mode\":\"0\",\"b_mode\":\"100644\",\"new_file\":true,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -0,0 +1,60 @@\\n+# frozen_string_literal: true\\n+\\n+module Gitlab\\n+  module Database\\n+    # The purpose of this class is to implement a various query analyzers based on `pg_query`\\n+    # And process them all via `Gitlab::Database::QueryAnalyzers::*`\\n+    class QueryAnalyzer\\n+      ANALYZERS = [].freeze\\n+\\n+      Parsed = Struct.new(\\n+        :sql, :connection, :pg\\n+      )\\n+\\n+      def hook!\\n+        @subscriber = ActiveSupport::Notifications.subscribe('sql.active_record') do |event|\\n+          process_sql(event.payload[:sql], event.payload[:connection])\\n+        end\\n+      end\\n+\\n+      private\\n+\\n+      def process_sql(sql, connection)\\n+        analyzers = enabled_analyzers(connection)\\n+        return unless analyzers.any?\\n+\\n+        parsed = parse(sql, connection)\\n+        return unless parsed\\n+\\n+        analyzers.each do |analyzer|\\n+          analyzer.analyze(parsed)\\n+        rescue =\\u003e e # rubocop:disable Style/RescueStandardError\\n+          # We catch all standard errors to prevent validation errors to introduce fatal errors in production\\n+          Gitlab::ErrorTracking.track_and_raise_for_dev_exception(e)\\n+        end\\n+      end\\n+\\n+      def enabled_analyzers(connection)\\n+        ANALYZERS.select do |analyzer|\\n+          analyzer.enabled?(connection)\\n+        rescue StandardError =\\u003e e # rubocop:disable Style/RescueStandardError\\n+          # We catch all standard errors to prevent validation errors to introduce fatal errors in production\\n+          Gitlab::ErrorTracking.track_and_raise_for_dev_exception(e)\\n+        end\\n+      end\\n+\\n+      def parse(sql, connection)\\n+        parsed = PgQuery.parse(sql)\\n+        return unless parsed\\n+\\n+        normalized = PgQuery.normalize(sql)\\n+        Parsed.new(normalized, connection, parsed)\\n+      rescue PgQuery::ParseError =\\u003e e\\n+        # Ignore PgQuery parse errors (due to depth limit or other reasons)\\n+        Gitlab::ErrorTracking.track_exception(e)\\n+\\n+        nil\\n+      end\\n+    end\\n+  end\\n+end\\n\"},{\"old_path\":\"lib/gitlab/database/query_analyzers/base.rb\",\"new_path\":\"lib/gitlab/database/query_analyzers/base.rb\",\"a_mode\":\"0\",\"b_mode\":\"100644\",\"new_file\":true,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -0,0 +1,17 @@\\n+# frozen_string_literal: true\\n+\\n+module Gitlab\\n+  module Database\\n+    module QueryAnalyzers\\n+      class Base\\n+        def self.enabled?(connection)\\n+          raise NotImplementedError\\n+        end\\n+\\n+        def self.analyze(parsed)\\n+          raise NotImplementedError\\n+        end\\n+      end\\n+    end\\n+  end\\n+end\\n\"},{\"old_path\":\"lib/gitlab/graphql/known_operations.rb\",\"new_path\":\"lib/gitlab/graphql/known_operations.rb\",\"a_mode\":\"0\",\"b_mode\":\"100644\",\"new_file\":true,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -0,0 +1,40 @@\\n+# frozen_string_literal: true\\n+\\n+module Gitlab\\n+  module Graphql\\n+    class KnownOperations\\n+      Operation = Struct.new(:name) do\\n+        def to_caller_id\\n+          \\\"graphql:#{name}\\\"\\n+        end\\n+      end\\n+\\n+      ANONYMOUS = Operation.new(\\\"anonymous\\\").freeze\\n+      UNKNOWN = Operation.new(\\\"unknown\\\").freeze\\n+\\n+      def self.default\\n+        @default ||= self.new(Gitlab::Webpack::GraphqlKnownOperations.load)\\n+      end\\n+\\n+      def initialize(operation_names)\\n+        @operation_hash = operation_names\\n+          .map { |name| Operation.new(name).freeze }\\n+          .concat([ANONYMOUS, UNKNOWN])\\n+          .index_by(\\u0026:name)\\n+      end\\n+\\n+      # Returns the known operation from the given ::GraphQL::Query object\\n+      def from_query(query)\\n+        operation_name = query.selected_operation_name\\n+\\n+        return ANONYMOUS unless operation_name\\n+\\n+        @operation_hash[operation_name] || UNKNOWN\\n+      end\\n+\\n+      def operations\\n+        @operation_hash.values\\n+      end\\n+    end\\n+  end\\n+end\\n\"},{\"old_path\":\"lib/gitlab/sidekiq_config.rb\",\"new_path\":\"lib/gitlab/sidekiq_config.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -6,11 +6,13 @@ module Gitlab\\n   module SidekiqConfig\\n     FOSS_QUEUE_CONFIG_PATH = 'app/workers/all_queues.yml'\\n     EE_QUEUE_CONFIG_PATH = 'ee/app/workers/all_queues.yml'\\n+    JH_QUEUE_CONFIG_PATH = 'jh/app/workers/all_queues.yml'\\n     SIDEKIQ_QUEUES_PATH = 'config/sidekiq_queues.yml'\\n \\n     QUEUE_CONFIG_PATHS = [\\n       FOSS_QUEUE_CONFIG_PATH,\\n-      (EE_QUEUE_CONFIG_PATH if Gitlab.ee?)\\n+      (EE_QUEUE_CONFIG_PATH if Gitlab.ee?),\\n+      (JH_QUEUE_CONFIG_PATH if Gitlab.jh?)\\n     ].compact.freeze\\n \\n     # This maps workers not in our application code to queues. We need\\n@@ -33,7 +35,7 @@ module SidekiqConfig\\n         weight: 2,\\n         tags: []\\n       )\\n-    }.transform_values { |worker| Gitlab::SidekiqConfig::Worker.new(worker, ee: false) }.freeze\\n+    }.transform_values { |worker| Gitlab::SidekiqConfig::Worker.new(worker, ee: false, jh: false) }.freeze\\n \\n     class \\u003c\\u003c self\\n       include Gitlab::SidekiqConfig::CliMethods\\n@@ -58,10 +60,14 @@ def workers\\n         @workers ||= begin\\n           result = []\\n           result.concat(DEFAULT_WORKERS.values)\\n-          result.concat(find_workers(Rails.root.join('app', 'workers'), ee: false))\\n+          result.concat(find_workers(Rails.root.join('app', 'workers'), ee: false, jh: false))\\n \\n           if Gitlab.ee?\\n-            result.concat(find_workers(Rails.root.join('ee', 'app', 'workers'), ee: true))\\n+            result.concat(find_workers(Rails.root.join('ee', 'app', 'workers'), ee: true, jh: false))\\n+          end\\n+\\n+          if Gitlab.jh?\\n+            result.concat(find_workers(Rails.root.join('jh', 'app', 'workers'), ee: false, jh: true))\\n           end\\n \\n           result\\n@@ -69,16 +75,26 @@ def workers\\n       end\\n \\n       def workers_for_all_queues_yml\\n-        workers.partition(\\u0026:ee?).reverse.map(\\u0026:sort)\\n+        workers.each_with_object([[], [], []]) do |worker, array|\\n+          if worker.jh?\\n+            array[2].push(worker)\\n+          elsif worker.ee?\\n+            array[1].push(worker)\\n+          else\\n+            array[0].push(worker)\\n+          end\\n+        end.map(\\u0026:sort)\\n       end\\n \\n       # YAML.load_file is OK here as we control the file contents\\n       def all_queues_yml_outdated?\\n-        foss_workers, ee_workers = workers_for_all_queues_yml\\n+        foss_workers, ee_workers, jh_workers = workers_for_all_queues_yml\\n \\n         return true if foss_workers != YAML.load_file(FOSS_QUEUE_CONFIG_PATH)\\n \\n-        Gitlab.ee? \\u0026\\u0026 ee_workers != YAML.load_file(EE_QUEUE_CONFIG_PATH)\\n+        return true if Gitlab.ee? \\u0026\\u0026 ee_workers != YAML.load_file(EE_QUEUE_CONFIG_PATH)\\n+\\n+        Gitlab.jh? \\u0026\\u0026 File.exist?(JH_QUEUE_CONFIG_PATH) \\u0026\\u0026 jh_workers != YAML.load_file(JH_QUEUE_CONFIG_PATH)\\n       end\\n \\n       def queues_for_sidekiq_queues_yml\\n@@ -120,14 +136,14 @@ def current_worker_queue_mappings\\n \\n       private\\n \\n-      def find_workers(root, ee:)\\n+      def find_workers(root, ee:, jh:)\\n         concerns = root.join('concerns').to_s\\n \\n         Dir[root.join('**', '*.rb')]\\n           .reject { |path| path.start_with?(concerns) }\\n           .map { |path| worker_from_path(path, root) }\\n           .select { |worker| worker \\u003c Sidekiq::Worker }\\n-          .map { |worker| Gitlab::SidekiqConfig::Worker.new(worker, ee: ee) }\\n+          .map { |worker| Gitlab::SidekiqConfig::Worker.new(worker, ee: ee, jh: jh) }\\n       end\\n \\n       def worker_from_path(path, root)\\n\"},{\"old_path\":\"lib/gitlab/sidekiq_config/cli_methods.rb\",\"new_path\":\"lib/gitlab/sidekiq_config/cli_methods.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -18,6 +18,7 @@ module CliMethods\\n       QUEUE_CONFIG_PATHS = begin\\n         result = %w[app/workers/all_queues.yml]\\n         result \\u003c\\u003c 'ee/app/workers/all_queues.yml' if Gitlab.ee?\\n+        result \\u003c\\u003c 'jh/app/workers/all_queues.yml' if Gitlab.jh?\\n         result\\n       end.freeze\\n \\n\"},{\"old_path\":\"lib/gitlab/sidekiq_config/worker.rb\",\"new_path\":\"lib/gitlab/sidekiq_config/worker.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -13,15 +13,20 @@ class Worker\\n                :worker_has_external_dependencies?,\\n                to: :klass\\n \\n-      def initialize(klass, ee:)\\n+      def initialize(klass, ee:, jh: false)\\n         @klass = klass\\n         @ee = ee\\n+        @jh = jh\\n       end\\n \\n       def ee?\\n         @ee\\n       end\\n \\n+      def jh?\\n+        @jh\\n+      end\\n+\\n       def ==(other)\\n         to_yaml == case other\\n                    when self.class\\n\"},{\"old_path\":\"lib/gitlab/webpack/file_loader.rb\",\"new_path\":\"lib/gitlab/webpack/file_loader.rb\",\"a_mode\":\"0\",\"b_mode\":\"100644\",\"new_file\":true,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -0,0 +1,65 @@\\n+# frozen_string_literal: true\\n+\\n+require 'net/http'\\n+require 'uri'\\n+\\n+module Gitlab\\n+  module Webpack\\n+    class FileLoader\\n+      class BaseError \\u003c StandardError\\n+        attr_reader :original_error, :uri\\n+\\n+        def initialize(uri, orig)\\n+          super orig.message\\n+          @uri = uri.to_s\\n+          @original_error = orig\\n+        end\\n+      end\\n+\\n+      StaticLoadError = Class.new(BaseError)\\n+      DevServerLoadError = Class.new(BaseError)\\n+      DevServerSSLError = Class.new(BaseError)\\n+\\n+      def self.load(path)\\n+        if Gitlab.config.webpack.dev_server.enabled\\n+          self.load_from_dev_server(path)\\n+        else\\n+          self.load_from_static(path)\\n+        end\\n+      end\\n+\\n+      def self.load_from_dev_server(path)\\n+        host = Gitlab.config.webpack.dev_server.host\\n+        port = Gitlab.config.webpack.dev_server.port\\n+        scheme = Gitlab.config.webpack.dev_server.https ? 'https' : 'http'\\n+        uri = Addressable::URI.new(scheme: scheme, host: host, port: port, path: self.dev_server_path(path))\\n+\\n+        # localhost could be blocked via Gitlab::HTTP\\n+        response = HTTParty.get(uri.to_s, verify: false) # rubocop:disable Gitlab/HTTParty\\n+\\n+        return response.body if response.code == 200\\n+\\n+        raise \\\"HTTP error #{response.code}\\\"\\n+      rescue OpenSSL::SSL::SSLError, EOFError =\\u003e e\\n+        raise DevServerSSLError.new(uri, e)\\n+      rescue StandardError =\\u003e e\\n+        raise DevServerLoadError.new(uri, e)\\n+      end\\n+\\n+      def self.load_from_static(path)\\n+        file_uri = ::Rails.root.join(\\n+          Gitlab.config.webpack.output_dir,\\n+          path\\n+        )\\n+\\n+        File.read(file_uri)\\n+      rescue StandardError =\\u003e e\\n+        raise StaticLoadError.new(file_uri, e)\\n+      end\\n+\\n+      def self.dev_server_path(path)\\n+        \\\"/#{Gitlab.config.webpack.public_path}/#{path}\\\"\\n+      end\\n+    end\\n+  end\\n+end\\n\"},{\"old_path\":\"lib/gitlab/webpack/graphql_known_operations.rb\",\"new_path\":\"lib/gitlab/webpack/graphql_known_operations.rb\",\"a_mode\":\"0\",\"b_mode\":\"100644\",\"new_file\":true,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -0,0 +1,25 @@\\n+# frozen_string_literal: true\\n+\\n+module Gitlab\\n+  module Webpack\\n+    class GraphqlKnownOperations\\n+      class \\u003c\\u003c self\\n+        include Gitlab::Utils::StrongMemoize\\n+\\n+        def clear_memoization!\\n+          clear_memoization(:graphql_known_operations)\\n+        end\\n+\\n+        def load\\n+          strong_memoize(:graphql_known_operations) do\\n+            data = ::Gitlab::Webpack::FileLoader.load(\\\"graphql_known_operations.yml\\\")\\n+\\n+            YAML.safe_load(data)\\n+          rescue StandardError\\n+            []\\n+          end\\n+        end\\n+      end\\n+    end\\n+  end\\n+end\\n\"},{\"old_path\":\"lib/gitlab/webpack/manifest.rb\",\"new_path\":\"lib/gitlab/webpack/manifest.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -1,8 +1,5 @@\\n # frozen_string_literal: true\\n \\n-require 'net/http'\\n-require 'uri'\\n-\\n module Gitlab\\n   module Webpack\\n     class Manifest\\n@@ -78,49 +75,16 @@ def manifest\\n         end\\n \\n         def load_manifest\\n-          data = if Gitlab.config.webpack.dev_server.enabled\\n-                   load_dev_server_manifest\\n-                 else\\n-                   load_static_manifest\\n-                 end\\n+          data = Gitlab::Webpack::FileLoader.load(Gitlab.config.webpack.manifest_filename)\\n \\n           Gitlab::Json.parse(data)\\n-        end\\n-\\n-        def load_dev_server_manifest\\n-          host = Gitlab.config.webpack.dev_server.host\\n-          port = Gitlab.config.webpack.dev_server.port\\n-          scheme = Gitlab.config.webpack.dev_server.https ? 'https' : 'http'\\n-          uri = Addressable::URI.new(scheme: scheme, host: host, port: port, path: dev_server_path)\\n-\\n-          # localhost could be blocked via Gitlab::HTTP\\n-          response = HTTParty.get(uri.to_s, verify: false) # rubocop:disable Gitlab/HTTParty\\n-\\n-          return response.body if response.code == 200\\n-\\n-          raise \\\"HTTP error #{response.code}\\\"\\n-        rescue OpenSSL::SSL::SSLError, EOFError =\\u003e e\\n+        rescue Gitlab::Webpack::FileLoader::StaticLoadError =\\u003e e\\n+          raise ManifestLoadError.new(\\\"Could not load compiled manifest from #{e.uri}.\\\\n\\\\nHave you run `rake gitlab:assets:compile`?\\\", e.original_error)\\n+        rescue Gitlab::Webpack::FileLoader::DevServerSSLError =\\u003e e\\n           ssl_status = Gitlab.config.webpack.dev_server.https ? ' over SSL' : ''\\n-          raise ManifestLoadError.new(\\\"Could not connect to webpack-dev-server at #{uri}#{ssl_status}.\\\\n\\\\nIs SSL enabled? Check that settings in `gitlab.yml` and webpack-dev-server match.\\\", e)\\n-        rescue StandardError =\\u003e e\\n-          raise ManifestLoadError.new(\\\"Could not load manifest from webpack-dev-server at #{uri}.\\\\n\\\\nIs webpack-dev-server running? Try running `gdk status webpack` or `gdk tail webpack`.\\\", e)\\n-        end\\n-\\n-        def load_static_manifest\\n-          File.read(static_manifest_path)\\n-        rescue StandardError =\\u003e e\\n-          raise ManifestLoadError.new(\\\"Could not load compiled manifest from #{static_manifest_path}.\\\\n\\\\nHave you run `rake gitlab:assets:compile`?\\\", e)\\n-        end\\n-\\n-        def static_manifest_path\\n-          ::Rails.root.join(\\n-            Gitlab.config.webpack.output_dir,\\n-            Gitlab.config.webpack.manifest_filename\\n-          )\\n-        end\\n-\\n-        def dev_server_path\\n-          \\\"/#{Gitlab.config.webpack.public_path}/#{Gitlab.config.webpack.manifest_filename}\\\"\\n+          raise ManifestLoadError.new(\\\"Could not connect to webpack-dev-server at #{e.uri}#{ssl_status}.\\\\n\\\\nIs SSL enabled? Check that settings in `gitlab.yml` and webpack-dev-server match.\\\", e.original_error)\\n+        rescue Gitlab::Webpack::FileLoader::DevServerLoadError =\\u003e e\\n+          raise ManifestLoadError.new(\\\"Could not load manifest from webpack-dev-server at #{e.uri}.\\\\n\\\\nIs webpack-dev-server running? Try running `gdk status webpack` or `gdk tail webpack`.\\\", e.original_error)\\n         end\\n       end\\n     end\\n\"},{\"old_path\":\"lib/sidebars/projects/menus/infrastructure_menu.rb\",\"new_path\":\"lib/sidebars/projects/menus/infrastructure_menu.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -100,7 +100,7 @@ def google_cloud_menu_item\\n           ::Sidebars::MenuItem.new(\\n             title: _('Google Cloud'),\\n             link: project_google_cloud_index_path(context.project),\\n-            active_routes: {},\\n+            active_routes: { controller: :google_cloud },\\n             item_id: :google_cloud\\n           )\\n         end\\n\"},{\"old_path\":\"lib/tasks/gitlab/sidekiq.rake\",\"new_path\":\"lib/tasks/gitlab/sidekiq.rake\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -36,13 +36,17 @@ namespace :gitlab do\\n           # Do not edit it manually!\\n         BANNER\\n \\n-        foss_workers, ee_workers = Gitlab::SidekiqConfig.workers_for_all_queues_yml\\n+        foss_workers, ee_workers, jh_workers = Gitlab::SidekiqConfig.workers_for_all_queues_yml\\n \\n         write_yaml(Gitlab::SidekiqConfig::FOSS_QUEUE_CONFIG_PATH, banner, foss_workers)\\n \\n         if Gitlab.ee?\\n           write_yaml(Gitlab::SidekiqConfig::EE_QUEUE_CONFIG_PATH, banner, ee_workers)\\n         end\\n+\\n+        if Gitlab.jh?\\n+          write_yaml(Gitlab::SidekiqConfig::JH_QUEUE_CONFIG_PATH, banner, jh_workers)\\n+        end\\n       end\\n \\n       desc 'GitLab | Sidekiq | Validate that all_queues.yml matches worker definitions'\\n@@ -57,6 +61,7 @@ namespace :gitlab do\\n \\n             - #{Gitlab::SidekiqConfig::FOSS_QUEUE_CONFIG_PATH}\\n             - #{Gitlab::SidekiqConfig::EE_QUEUE_CONFIG_PATH}\\n+            #{\\\"- \\\" + Gitlab::SidekiqConfig::JH_QUEUE_CONFIG_PATH if Gitlab.jh?}\\n \\n           MSG\\n         end\\n\"},{\"old_path\":\"locale/gitlab.pot\",\"new_path\":\"locale/gitlab.pot\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -1107,9 +1107,6 @@ msgstr \\\"\\\"\\n msgid \\\"(check progress)\\\"\\n msgstr \\\"\\\"\\n \\n-msgid \\\"(commits will be squashed)\\\"\\n-msgstr \\\"\\\"\\n-\\n msgid \\\"(deleted)\\\"\\n msgstr \\\"\\\"\\n \\n@@ -1128,6 +1125,11 @@ msgstr \\\"\\\"\\n msgid \\\"(revoked)\\\"\\n msgstr \\\"\\\"\\n \\n+msgid \\\"(squashes %d commit)\\\"\\n+msgid_plural \\\"(squashes %d commits)\\\"\\n+msgstr[0] \\\"\\\"\\n+msgstr[1] \\\"\\\"\\n+\\n msgid \\\"(this user)\\\"\\n msgstr \\\"\\\"\\n \\n@@ -4037,6 +4039,9 @@ msgid_plural \\\"ApplicationSettings|Approve %d users\\\"\\n msgstr[0] \\\"\\\"\\n msgstr[1] \\\"\\\"\\n \\n+msgid \\\"ApplicationSettings|Approve users\\\"\\n+msgstr \\\"\\\"\\n+\\n msgid \\\"ApplicationSettings|Approve users in the pending approval status?\\\"\\n msgstr \\\"\\\"\\n \\n@@ -4045,6 +4050,9 @@ msgid_plural \\\"ApplicationSettings|By making this change, you will automatically\\n msgstr[0] \\\"\\\"\\n msgstr[1] \\\"\\\"\\n \\n+msgid \\\"ApplicationSettings|By making this change, you will automatically approve all users in pending approval status.\\\"\\n+msgstr \\\"\\\"\\n+\\n msgid \\\"ApplicationSettings|Denied domains for sign-ups\\\"\\n msgstr \\\"\\\"\\n \\n@@ -6547,6 +6555,9 @@ msgstr \\\"\\\"\\n msgid \\\"Changes to the title have not been saved\\\"\\n msgstr \\\"\\\"\\n \\n+msgid \\\"Changing any setting here requires an application restart\\\"\\n+msgstr \\\"\\\"\\n+\\n msgid \\\"Changing group URL can have unintended side effects.\\\"\\n msgstr \\\"\\\"\\n \\n@@ -7190,6 +7201,9 @@ msgstr \\\"\\\"\\n msgid \\\"Clients\\\"\\n msgstr \\\"\\\"\\n \\n+msgid \\\"Clientside DSN\\\"\\n+msgstr \\\"\\\"\\n+\\n msgid \\\"Clone\\\"\\n msgstr \\\"\\\"\\n \\n@@ -8683,6 +8697,9 @@ msgstr \\\"\\\"\\n msgid \\\"Configure Secret Detection in `.gitlab-ci.yml`, creating this file if it does not already exist\\\"\\n msgstr \\\"\\\"\\n \\n+msgid \\\"Configure Sentry integration for error tracking\\\"\\n+msgstr \\\"\\\"\\n+\\n msgid \\\"Configure Tracing\\\"\\n msgstr \\\"\\\"\\n \\n@@ -10391,6 +10408,9 @@ msgstr \\\"\\\"\\n msgid \\\"DORA4Metrics|The chart displays the median time between a merge request being merged and deployed to production environment(s) that are based on the %{linkStart}deployment_tier%{linkEnd} value.\\\"\\n msgstr \\\"\\\"\\n \\n+msgid \\\"DSN\\\"\\n+msgstr \\\"\\\"\\n+\\n msgid \\\"Dashboard\\\"\\n msgstr \\\"\\\"\\n \\n@@ -11158,6 +11178,12 @@ msgstr \\\"\\\"\\n msgid \\\"Deleted projects cannot be restored!\\\"\\n msgstr \\\"\\\"\\n \\n+msgid \\\"Deletes the source branch\\\"\\n+msgstr \\\"\\\"\\n+\\n+msgid \\\"Deletes the source branch.\\\"\\n+msgstr \\\"\\\"\\n+\\n msgid \\\"Deleting\\\"\\n msgstr \\\"\\\"\\n \\n@@ -12208,6 +12234,9 @@ msgstr \\\"\\\"\\n msgid \\\"Does not apply to projects in personal namespaces, which are deleted immediately on request.\\\"\\n msgstr \\\"\\\"\\n \\n+msgid \\\"Does not delete the source branch.\\\"\\n+msgstr \\\"\\\"\\n+\\n msgid \\\"Domain\\\"\\n msgstr \\\"\\\"\\n \\n@@ -12703,6 +12732,9 @@ msgstr \\\"\\\"\\n msgid \\\"Enable SSL verification\\\"\\n msgstr \\\"\\\"\\n \\n+msgid \\\"Enable Sentry error tracking\\\"\\n+msgstr \\\"\\\"\\n+\\n msgid \\\"Enable Service Ping\\\"\\n msgstr \\\"\\\"\\n \\n@@ -16109,6 +16141,9 @@ msgstr \\\"\\\"\\n msgid \\\"GraphViewType|Stage\\\"\\n msgstr \\\"\\\"\\n \\n+msgid \\\"Graphs\\\"\\n+msgstr \\\"\\\"\\n+\\n msgid \\\"Gravatar\\\"\\n msgstr \\\"\\\"\\n \\n@@ -29691,6 +29726,9 @@ msgstr \\\"\\\"\\n msgid \\\"Runners|New runner, has not connected yet\\\"\\n msgstr \\\"\\\"\\n \\n+msgid \\\"Runners|No recent contact from this runner; last contact was %{timeAgo}\\\"\\n+msgstr \\\"\\\"\\n+\\n msgid \\\"Runners|Not available to run jobs\\\"\\n msgstr \\\"\\\"\\n \\n@@ -29742,6 +29780,9 @@ msgstr \\\"\\\"\\n msgid \\\"Runners|Runner #%{runner_id}\\\"\\n msgstr \\\"\\\"\\n \\n+msgid \\\"Runners|Runner ID\\\"\\n+msgstr \\\"\\\"\\n+\\n msgid \\\"Runners|Runner assigned to project.\\\"\\n msgstr \\\"\\\"\\n \\n@@ -29751,6 +29792,9 @@ msgstr \\\"\\\"\\n msgid \\\"Runners|Runner is online, last contact was %{runner_contact} ago\\\"\\n msgstr \\\"\\\"\\n \\n+msgid \\\"Runners|Runner is online; last contact was %{timeAgo}\\\"\\n+msgstr \\\"\\\"\\n+\\n msgid \\\"Runners|Runner is paused, last contact was %{runner_contact} ago\\\"\\n msgstr \\\"\\\"\\n \\n@@ -29781,12 +29825,18 @@ msgstr \\\"\\\"\\n msgid \\\"Runners|Something went wrong while fetching the tags suggestions\\\"\\n msgstr \\\"\\\"\\n \\n+msgid \\\"Runners|Status\\\"\\n+msgstr \\\"\\\"\\n+\\n msgid \\\"Runners|Stop the runner from accepting new jobs.\\\"\\n msgstr \\\"\\\"\\n \\n msgid \\\"Runners|Tags\\\"\\n msgstr \\\"\\\"\\n \\n+msgid \\\"Runners|This runner has never connected to this instance\\\"\\n+msgstr \\\"\\\"\\n+\\n msgid \\\"Runners|This runner is associated with one or more projects.\\\"\\n msgstr \\\"\\\"\\n \\n@@ -29853,6 +29903,15 @@ msgstr \\\"\\\"\\n msgid \\\"Runners|locked\\\"\\n msgstr \\\"\\\"\\n \\n+msgid \\\"Runners|not connected\\\"\\n+msgstr \\\"\\\"\\n+\\n+msgid \\\"Runners|offline\\\"\\n+msgstr \\\"\\\"\\n+\\n+msgid \\\"Runners|online\\\"\\n+msgstr \\\"\\\"\\n+\\n msgid \\\"Runners|paused\\\"\\n msgstr \\\"\\\"\\n \\n@@ -32473,12 +32532,6 @@ msgstr \\\"\\\"\\n msgid \\\"Source branch\\\"\\n msgstr \\\"\\\"\\n \\n-msgid \\\"Source branch will be deleted.\\\"\\n-msgstr \\\"\\\"\\n-\\n-msgid \\\"Source branch will not be deleted.\\\"\\n-msgstr \\\"\\\"\\n-\\n msgid \\\"Source branch: %{source_branch_open}%{source_branch}%{source_branch_close}\\\"\\n msgstr \\\"\\\"\\n \\n@@ -34136,7 +34189,7 @@ msgstr \\\"\\\"\\n msgid \\\"The connection will time out after %{timeout}. For repositories that take longer, use a clone/push combination.\\\"\\n msgstr \\\"\\\"\\n \\n-msgid \\\"The contact does not belong to the same group as the issue.\\\"\\n+msgid \\\"The contact does not belong to the same group as the issue\\\"\\n msgstr \\\"\\\"\\n \\n msgid \\\"The content of this page is not encoded in UTF-8. Edits can only be made via the Git repository.\\\"\\n@@ -34474,9 +34527,6 @@ msgstr \\\"\\\"\\n msgid \\\"The snippet is visible to any logged in user except external users.\\\"\\n msgstr \\\"\\\"\\n \\n-msgid \\\"The source branch will be deleted\\\"\\n-msgstr \\\"\\\"\\n-\\n msgid \\\"The specified tab is invalid, please select another\\\"\\n msgstr \\\"\\\"\\n \\n@@ -40986,13 +41036,13 @@ msgstr \\\"\\\"\\n msgid \\\"most recent deployment\\\"\\n msgstr \\\"\\\"\\n \\n-msgid \\\"mrWidgetCommitsAdded|%{commitCount} and %{mergeCommitCount} will be added to %{targetBranch}%{squashedCommits}.\\\"\\n+msgid \\\"mrWidgetCommitsAdded|1 merge commit\\\"\\n msgstr \\\"\\\"\\n \\n-msgid \\\"mrWidgetCommitsAdded|%{commitCount} will be added to %{targetBranch}.\\\"\\n+msgid \\\"mrWidgetCommitsAdded|Adds %{commitCount} and %{mergeCommitCount} to %{targetBranch}%{squashedCommits}.\\\"\\n msgstr \\\"\\\"\\n \\n-msgid \\\"mrWidgetCommitsAdded|1 merge commit\\\"\\n+msgid \\\"mrWidgetCommitsAdded|Adds %{commitCount} to %{targetBranch}.\\\"\\n msgstr \\\"\\\"\\n \\n msgid \\\"mrWidgetNothingToMerge|This merge request contains no changes.\\\"\\n@@ -41102,6 +41152,9 @@ msgstr \\\"\\\"\\n msgid \\\"mrWidget|Delete source branch\\\"\\n msgstr \\\"\\\"\\n \\n+msgid \\\"mrWidget|Deletes the source branch\\\"\\n+msgstr \\\"\\\"\\n+\\n msgid \\\"mrWidget|Deployment statistics are not available currently\\\"\\n msgstr \\\"\\\"\\n \\n@@ -41111,6 +41164,9 @@ msgstr \\\"\\\"\\n msgid \\\"mrWidget|Dismiss\\\"\\n msgstr \\\"\\\"\\n \\n+msgid \\\"mrWidget|Does not delete the source branch\\\"\\n+msgstr \\\"\\\"\\n+\\n msgid \\\"mrWidget|Email patches\\\"\\n msgstr \\\"\\\"\\n \\n@@ -41167,6 +41223,9 @@ msgstr \\\"\\\"\\n msgid \\\"mrWidget|Merged by\\\"\\n msgstr \\\"\\\"\\n \\n+msgid \\\"mrWidget|Merges changes into\\\"\\n+msgstr \\\"\\\"\\n+\\n msgid \\\"mrWidget|Merging! Changes are being shipped…\\\"\\n msgstr \\\"\\\"\\n \\n@@ -41251,9 +41310,6 @@ msgstr \\\"\\\"\\n msgid \\\"mrWidget|The changes were not merged into\\\"\\n msgstr \\\"\\\"\\n \\n-msgid \\\"mrWidget|The changes will be merged into\\\"\\n-msgstr \\\"\\\"\\n-\\n msgid \\\"mrWidget|The pipeline for this merge request did not complete. Push a new commit to fix the failure, or check the %{linkStart}troubleshooting documentation%{linkEnd} to see other possible actions.\\\"\\n msgstr \\\"\\\"\\n \\n@@ -41269,12 +41325,6 @@ msgstr \\\"\\\"\\n msgid \\\"mrWidget|The source branch is being deleted\\\"\\n msgstr \\\"\\\"\\n \\n-msgid \\\"mrWidget|The source branch will be deleted\\\"\\n-msgstr \\\"\\\"\\n-\\n-msgid \\\"mrWidget|The source branch will not be deleted\\\"\\n-msgstr \\\"\\\"\\n-\\n msgid \\\"mrWidget|There are merge conflicts\\\"\\n msgstr \\\"\\\"\\n \\n\"},{\"old_path\":\"qa/Gemfile.lock\",\"new_path\":\"qa/Gemfile.lock\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -227,10 +227,10 @@ GEM\\n     watir (6.19.1)\\n       regexp_parser (\\u003e= 1.2, \\u003c 3)\\n       selenium-webdriver (\\u003e= 3.142.7)\\n-    webdrivers (4.7.0)\\n+    webdrivers (5.0.0)\\n       nokogiri (~\\u003e 1.6)\\n       rubyzip (\\u003e= 1.3.0)\\n-      selenium-webdriver (\\u003e 3.141, \\u003c 5.0)\\n+      selenium-webdriver (~\\u003e 4.0)\\n     xpath (3.2.0)\\n       nokogiri (~\\u003e 1.8)\\n     zeitwerk (2.4.2)\\n@@ -263,10 +263,10 @@ DEPENDENCIES\\n   rspec-retry (~\\u003e 0.6.1)\\n   rspec_junit_formatter (~\\u003e 0.4.1)\\n   ruby-debug-ide (~\\u003e 0.7.0)\\n-  selenium-webdriver (~\\u003e 4.0.0.rc1)\\n+  selenium-webdriver (~\\u003e 4.0)\\n   timecop (~\\u003e 0.9.1)\\n-  webdrivers (~\\u003e 4.6)\\n+  webdrivers (~\\u003e 5.0)\\n   zeitwerk (~\\u003e 2.4)\\n \\n BUNDLED WITH\\n-   2.2.29\\n+   2.2.30\\n\"},{\"old_path\":\"qa/qa/page/group/settings/package_registries.rb\",\"new_path\":\"qa/qa/page/group/settings/package_registries.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -1,5 +1,4 @@\\n # frozen_string_literal: true\\n-\\n module QA\\n   module Page\\n     module Group\\n@@ -20,22 +19,33 @@ class PackageRegistries \\u003c QA::Page::Base\\n \\n           def set_allow_duplicates_disabled\\n             expand_content :package_registry_settings_content do\\n-              click_element(:allow_duplicates_toggle) if duplicates_enabled?\\n+              click_on_allow_duplicates_button if duplicates_enabled?\\n             end\\n           end\\n \\n           def set_allow_duplicates_enabled\\n             expand_content :package_registry_settings_content do\\n-              click_element(:allow_duplicates_toggle) if duplicates_disabled?\\n+              click_on_allow_duplicates_button unless duplicates_enabled?\\n+            end\\n+          end\\n+\\n+          def click_on_allow_duplicates_button\\n+            with_allow_duplicates_button do |button|\\n+              button.click\\n             end\\n           end\\n \\n           def duplicates_enabled?\\n-            has_element?(:allow_duplicates_label, text: 'Allow duplicates')\\n+            with_allow_duplicates_button do |button|\\n+              button[:class].include?('is-checked')\\n+            end\\n           end\\n \\n-          def duplicates_disabled?\\n-            has_element?(:allow_duplicates_label, text: 'Do not allow duplicates')\\n+          def with_allow_duplicates_button\\n+            within_element :allow_duplicates_toggle do\\n+              toggle = find('button.gl-toggle')\\n+              yield(toggle)\\n+            end\\n           end\\n \\n           def has_dependency_proxy_enabled?\\n\"},{\"old_path\":\"qa/qa/specs/features/browser_ui/1_manage/login/maintain_log_in_mixed_env_spec.rb\",\"new_path\":\"qa/qa/specs/features/browser_ui/1_manage/login/maintain_log_in_mixed_env_spec.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -1,7 +1,7 @@\\n # frozen_string_literal: true\\n \\n module QA\\n-  RSpec.describe 'Manage', :mixed_env, :smoke, only: { subdomain: :staging } do\\n+  RSpec.describe 'Manage', only: { subdomain: :staging }, quarantine: { issue: 'https://gitlab.com/gitlab-org/gitlab/-/issues/344213', type: :stale } do\\n     describe 'basic user' do\\n       it 'remains logged in when redirected from canary to non-canary node', testcase: 'https://gitlab.com/gitlab-org/quality/testcases/-/quality/test_cases/2251' do\\n         Runtime::Browser.visit(:gitlab, Page::Main::Login)\\n\"},{\"old_path\":\"scripts/rspec_helpers.sh\",\"new_path\":\"scripts/rspec_helpers.sh\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -166,6 +166,7 @@ function rspec_paralellized_job() {\\n     export SUITE_FLAKY_RSPEC_REPORT_PATH=\\\"${FLAKY_RSPEC_SUITE_REPORT_PATH}\\\"\\n     export FLAKY_RSPEC_REPORT_PATH=\\\"rspec_flaky/all_${report_name}_report.json\\\"\\n     export NEW_FLAKY_RSPEC_REPORT_PATH=\\\"rspec_flaky/new_${report_name}_report.json\\\"\\n+    export SKIPPED_FLAKY_TESTS_REPORT_PATH=\\\"rspec_flaky/skipped_flaky_tests_${report_name}_report.txt\\\"\\n \\n     if [[ ! -f $FLAKY_RSPEC_REPORT_PATH ]]; then\\n       echo \\\"{}\\\" \\u003e \\\"${FLAKY_RSPEC_REPORT_PATH}\\\"\\n\"},{\"old_path\":\"lib/gitlab/sidekiq_cluster/cli.rb\",\"new_path\":\"sidekiq_cluster/cli.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":true,\"deleted_file\":false,\"diff\":\"@@ -4,27 +4,21 @@\\n require 'logger'\\n require 'time'\\n \\n+# In environments where code is preloaded and cached such as `spring`,\\n+# we may run into \\\"already initialized\\\" warnings, hence the check.\\n+require_relative '../lib/gitlab' unless Object.const_defined?('Gitlab')\\n+require_relative '../lib/gitlab/utils'\\n+require_relative '../lib/gitlab/sidekiq_config/cli_methods'\\n+require_relative '../lib/gitlab/sidekiq_config/worker_matcher'\\n+require_relative '../lib/gitlab/sidekiq_logging/json_formatter'\\n+require_relative 'sidekiq_cluster'\\n+\\n module Gitlab\\n   module SidekiqCluster\\n     class CLI\\n-      CHECK_TERMINATE_INTERVAL_SECONDS = 1\\n-\\n-      # How long to wait when asking for a clean termination.\\n-      # It maps the Sidekiq default timeout:\\n-      # https://github.com/mperham/sidekiq/wiki/Signals#term\\n-      #\\n-      # This value is passed to Sidekiq's `-t` if none\\n-      # is given through arguments.\\n-      DEFAULT_SOFT_TIMEOUT_SECONDS = 25\\n-\\n-      # After surpassing the soft timeout.\\n-      DEFAULT_HARD_TIMEOUT_SECONDS = 5\\n-\\n       CommandError = Class.new(StandardError)\\n \\n       def initialize(log_output = $stderr)\\n-        require_relative '../../../lib/gitlab/sidekiq_logging/json_formatter'\\n-\\n         # As recommended by https://github.com/mperham/sidekiq/wiki/Advanced-Options#concurrency\\n         @max_concurrency = 50\\n         @min_concurrency = 0\\n\"},{\"old_path\":\"sidekiq_cluster/dependencies.rb\",\"new_path\":\"sidekiq_cluster/dependencies.rb\",\"a_mode\":\"0\",\"b_mode\":\"100644\",\"new_file\":true,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -0,0 +1,6 @@\\n+# rubocop:disable Naming/FileName\\n+# frozen_string_literal: true\\n+\\n+require 'shellwords'\\n+\\n+# rubocop:enable Naming/FileName\\n\"},{\"old_path\":\"lib/gitlab/sidekiq_cluster.rb\",\"new_path\":\"sidekiq_cluster/sidekiq_cluster.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":true,\"deleted_file\":false,\"diff\":\"@@ -1,9 +1,22 @@\\n # frozen_string_literal: true\\n \\n-require 'shellwords'\\n+require_relative 'dependencies'\\n \\n module Gitlab\\n   module SidekiqCluster\\n+    CHECK_TERMINATE_INTERVAL_SECONDS = 1\\n+\\n+    # How long to wait when asking for a clean termination.\\n+    # It maps the Sidekiq default timeout:\\n+    # https://github.com/mperham/sidekiq/wiki/Signals#term\\n+    #\\n+    # This value is passed to Sidekiq's `-t` if none\\n+    # is given through arguments.\\n+    DEFAULT_SOFT_TIMEOUT_SECONDS = 25\\n+\\n+    # After surpassing the soft timeout.\\n+    DEFAULT_HARD_TIMEOUT_SECONDS = 5\\n+\\n     # The signals that should terminate both the master and workers.\\n     TERMINATE_SIGNALS = %i(INT TERM).freeze\\n \\n@@ -62,7 +75,7 @@ def self.signal_processes(pids, signal)\\n     # directory - The directory of the Rails application.\\n     #\\n     # Returns an Array containing the PIDs of the started processes.\\n-    def self.start(queues, env: :development, directory: Dir.pwd, max_concurrency: 50, min_concurrency: 0, timeout: CLI::DEFAULT_SOFT_TIMEOUT_SECONDS, dryrun: false)\\n+    def self.start(queues, env: :development, directory: Dir.pwd, max_concurrency: 50, min_concurrency: 0, timeout: DEFAULT_SOFT_TIMEOUT_SECONDS, dryrun: false)\\n       queues.map.with_index do |pair, index|\\n         start_sidekiq(pair, env: env,\\n                             directory: directory,\\n\"},{\"old_path\":\"spec/lib/gitlab/sidekiq_cluster/cli_spec.rb\",\"new_path\":\"spec/commands/sidekiq_cluster/cli_spec.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":true,\"deleted_file\":false,\"diff\":\"@@ -3,9 +3,11 @@\\n require 'fast_spec_helper'\\n require 'rspec-parameterized'\\n \\n-RSpec.describe Gitlab::SidekiqCluster::CLI do\\n+require_relative '../../../sidekiq_cluster/cli'\\n+\\n+RSpec.describe Gitlab::SidekiqCluster::CLI do # rubocop:disable RSpec/FilePath\\n   let(:cli) { described_class.new('/dev/null') }\\n-  let(:timeout) { described_class::DEFAULT_SOFT_TIMEOUT_SECONDS }\\n+  let(:timeout) { Gitlab::SidekiqCluster::DEFAULT_SOFT_TIMEOUT_SECONDS }\\n   let(:default_options) do\\n     { env: 'test', directory: Dir.pwd, max_concurrency: 50, min_concurrency: 0, dryrun: false, timeout: timeout }\\n   end\\n@@ -103,7 +105,7 @@\\n \\n         it 'when not given', 'starts Sidekiq workers with default timeout' do\\n           expect(Gitlab::SidekiqCluster).to receive(:start)\\n-            .with([['foo']], default_options.merge(timeout: described_class::DEFAULT_SOFT_TIMEOUT_SECONDS))\\n+            .with([['foo']], default_options.merge(timeout: Gitlab::SidekiqCluster::DEFAULT_SOFT_TIMEOUT_SECONDS))\\n \\n           cli.run(%w(foo))\\n         end\\n@@ -271,7 +273,7 @@\\n       expect(Gitlab::SidekiqCluster).to receive(:signal_processes)\\n         .with([], \\\"-KILL\\\")\\n \\n-      stub_const(\\\"Gitlab::SidekiqCluster::CLI::CHECK_TERMINATE_INTERVAL_SECONDS\\\", 0.1)\\n+      stub_const(\\\"Gitlab::SidekiqCluster::CHECK_TERMINATE_INTERVAL_SECONDS\\\", 0.1)\\n       allow(cli).to receive(:terminate_timeout_seconds) { 1 }\\n \\n       cli.wait_for_termination\\n@@ -301,7 +303,7 @@\\n \\n         cli.run(%w(foo))\\n \\n-        stub_const(\\\"Gitlab::SidekiqCluster::CLI::CHECK_TERMINATE_INTERVAL_SECONDS\\\", 0.1)\\n+        stub_const(\\\"Gitlab::SidekiqCluster::CHECK_TERMINATE_INTERVAL_SECONDS\\\", 0.1)\\n         allow(cli).to receive(:terminate_timeout_seconds) { 1 }\\n \\n         cli.wait_for_termination\\n\"},{\"old_path\":\"spec/controllers/concerns/renders_commits_spec.rb\",\"new_path\":\"spec/controllers/concerns/renders_commits_spec.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -64,6 +64,12 @@ def go\\n         subject.prepare_commits_for_rendering(merge_request.commits.take(1))\\n       end\\n \\n+      # Populate Banzai::Filter::References::ReferenceCache\\n+      subject.prepare_commits_for_rendering(merge_request.commits)\\n+\\n+      # Reset lazy_latest_pipeline cache to simulate a new request\\n+      BatchLoader::Executor.clear_current\\n+\\n       expect do\\n         subject.prepare_commits_for_rendering(merge_request.commits)\\n         merge_request.commits.each(\\u0026:latest_pipeline)\\n\"},{\"old_path\":\"spec/features/graphql_known_operations_spec.rb\",\"new_path\":\"spec/features/graphql_known_operations_spec.rb\",\"a_mode\":\"0\",\"b_mode\":\"100644\",\"new_file\":true,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -0,0 +1,29 @@\\n+# frozen_string_literal: true\\n+\\n+require 'spec_helper'\\n+\\n+# We need to distinguish between known and unknown GraphQL operations. This spec\\n+# tests that we set up Gitlab::Graphql::KnownOperations.default which requires\\n+# integration of FE queries, webpack plugin, and BE.\\n+RSpec.describe 'Graphql known operations', :js do\\n+  around do |example|\\n+    # Let's make sure we aren't receiving or leaving behind any side-effects\\n+    # https://gitlab.com/gitlab-org/gitlab/-/jobs/1743294100\\n+    ::Gitlab::Graphql::KnownOperations.instance_variable_set(:@default, nil)\\n+    ::Gitlab::Webpack::GraphqlKnownOperations.clear_memoization!\\n+\\n+    example.run\\n+\\n+    ::Gitlab::Graphql::KnownOperations.instance_variable_set(:@default, nil)\\n+    ::Gitlab::Webpack::GraphqlKnownOperations.clear_memoization!\\n+  end\\n+\\n+  it 'collects known Graphql operations from the code', :aggregate_failures do\\n+    # Check that we include some arbitrary operation name we expect\\n+    known_operations = Gitlab::Graphql::KnownOperations.default.operations.map(\\u0026:name)\\n+\\n+    expect(known_operations).to include(\\\"searchProjects\\\")\\n+    expect(known_operations.length).to be \\u003e 20\\n+    expect(known_operations).to all( match(%r{^[a-z]+}i) )\\n+  end\\n+end\\n\"},{\"old_path\":\"spec/features/issues/form_spec.rb\",\"new_path\":\"spec/features/issues/form_spec.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -4,25 +4,29 @@\\n \\n RSpec.describe 'New/edit issue', :js do\\n   include ActionView::Helpers::JavaScriptHelper\\n-  include FormHelper\\n \\n   let_it_be(:project)   { create(:project) }\\n-  let_it_be(:user)      { create(:user)}\\n-  let_it_be(:user2)     { create(:user)}\\n+  let_it_be(:user)      { create(:user) }\\n+  let_it_be(:user2)     { create(:user) }\\n   let_it_be(:milestone) { create(:milestone, project: project) }\\n   let_it_be(:label)     { create(:label, project: project) }\\n   let_it_be(:label2)    { create(:label, project: project) }\\n   let_it_be(:issue)     { create(:issue, project: project, assignees: [user], milestone: milestone) }\\n \\n-  before do\\n-    stub_licensed_features(multiple_issue_assignees: false, issue_weights: false)\\n+  let(:current_user) { user }\\n \\n+  before_all do\\n     project.add_maintainer(user)\\n     project.add_maintainer(user2)\\n-    sign_in(user)\\n   end\\n \\n-  context 'new issue' do\\n+  before do\\n+    stub_licensed_features(multiple_issue_assignees: false, issue_weights: false)\\n+\\n+    sign_in(current_user)\\n+  end\\n+\\n+  describe 'new issue' do\\n     before do\\n       visit new_project_issue_path(project)\\n     end\\n@@ -235,29 +239,42 @@\\n     end\\n \\n     describe 'displays issue type options in the dropdown' do\\n+      shared_examples 'type option is visible' do |label:, identifier:|\\n+        it \\\"shows #{identifier} option\\\", :aggregate_failures do\\n+          page.within('[data-testid=\\\"issue-type-select-dropdown\\\"]') do\\n+            expect(page).to have_selector(%([data-testid=\\\"issue-type-#{identifier}-icon\\\"]))\\n+            expect(page).to have_content(label)\\n+          end\\n+        end\\n+      end\\n+\\n       before do\\n         page.within('.issue-form') do\\n           click_button 'Issue'\\n         end\\n       end\\n \\n-      it 'correctly displays the Issue type option with an icon', :aggregate_failures do\\n-        page.within('[data-testid=\\\"issue-type-select-dropdown\\\"]') do\\n-          expect(page).to have_selector('[data-testid=\\\"issue-type-issue-icon\\\"]')\\n-          expect(page).to have_content('Issue')\\n-        end\\n-      end\\n+      it_behaves_like 'type option is visible', label: 'Issue', identifier: :issue\\n+      it_behaves_like 'type option is visible', label: 'Incident', identifier: :incident\\n \\n-      it 'correctly displays the Incident type option with an icon', :aggregate_failures do\\n-        page.within('[data-testid=\\\"issue-type-select-dropdown\\\"]') do\\n-          expect(page).to have_selector('[data-testid=\\\"issue-type-incident-icon\\\"]')\\n-          expect(page).to have_content('Incident')\\n+      context 'when user is guest' do\\n+        let_it_be(:guest) { create(:user) }\\n+\\n+        let(:current_user) { guest }\\n+\\n+        before_all do\\n+          project.add_guest(guest)\\n         end\\n+\\n+        it_behaves_like 'type option is visible', label: 'Issue', identifier: :issue\\n+        it_behaves_like 'type option is visible', label: 'Incident', identifier: :incident\\n       end\\n     end\\n \\n     describe 'milestone' do\\n-      let!(:milestone) { create(:milestone, title: '\\\"\\u003e\\u0026lt;img src=x onerror=alert(document.domain)\\u0026gt;', project: project) }\\n+      let!(:milestone) do\\n+        create(:milestone, title: '\\\"\\u003e\\u0026lt;img src=x onerror=alert(document.domain)\\u0026gt;', project: project)\\n+      end\\n \\n       it 'escapes milestone' do\\n         click_button 'Milestone'\\n@@ -274,7 +291,7 @@\\n     end\\n   end\\n \\n-  context 'edit issue' do\\n+  describe 'edit issue' do\\n     before do\\n       visit edit_project_issue_path(project, issue)\\n     end\\n@@ -329,7 +346,7 @@\\n     end\\n   end\\n \\n-  context 'inline edit' do\\n+  describe 'inline edit' do\\n     before do\\n       visit project_issue_path(project, issue)\\n     end\\n\"},{\"old_path\":\"spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb\",\"new_path\":\"spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -36,7 +36,7 @@\\n           click_button \\\"Merge when pipeline succeeds\\\"\\n \\n           expect(page).to have_content \\\"Set by #{user.name} to be merged automatically when the pipeline succeeds\\\"\\n-          expect(page).to have_content \\\"The source branch will not be deleted\\\"\\n+          expect(page).to have_content \\\"Does not delete the source branch\\\"\\n           expect(page).to have_selector \\\".js-cancel-auto-merge\\\"\\n           visit project_merge_request_path(project, merge_request) # Needed to refresh the page\\n           expect(page).to have_content /enabled an automatic merge when the pipeline for \\\\h{8} succeeds/i\\n@@ -126,7 +126,7 @@\\n     it 'allows to delete source branch' do\\n       click_button \\\"Delete source branch\\\"\\n \\n-      expect(page).to have_content \\\"The source branch will be deleted\\\"\\n+      expect(page).to have_content \\\"Deletes the source branch\\\"\\n     end\\n \\n     context 'when pipeline succeeds' do\\n\"},{\"old_path\":\"spec/features/merge_request/user_sees_merge_widget_spec.rb\",\"new_path\":\"spec/features/merge_request/user_sees_merge_widget_spec.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -426,7 +426,7 @@\\n \\n     it 'user cannot remove source branch', :sidekiq_might_not_need_inline do\\n       expect(page).not_to have_field('remove-source-branch-input')\\n-      expect(page).to have_content('The source branch will be deleted')\\n+      expect(page).to have_content('Deletes the source branch')\\n     end\\n   end\\n \\n\"},{\"old_path\":\"spec/frontend/admin/analytics/devops_score/components/devops_score_callout_spec.js\",\"new_path\":\"spec/frontend/admin/analytics/devops_score/components/devops_score_callout_spec.js\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -1,7 +1,7 @@\\n import { GlBanner } from '@gitlab/ui';\\n import { shallowMount } from '@vue/test-utils';\\n-import DevopsScoreCallout from '~/analytics/devops_report/components/devops_score_callout.vue';\\n-import { INTRO_COOKIE_KEY } from '~/analytics/devops_report/constants';\\n+import DevopsScoreCallout from '~/analytics/devops_reports/components/devops_score_callout.vue';\\n+import { INTRO_COOKIE_KEY } from '~/analytics/devops_reports/constants';\\n import * as utils from '~/lib/utils/common_utils';\\n import { devopsReportDocsPath, devopsScoreIntroImagePath } from '../mock_data';\\n \\n\"},{\"old_path\":\"spec/frontend/admin/analytics/devops_score/components/devops_score_spec.js\",\"new_path\":\"spec/frontend/admin/analytics/devops_score/components/devops_score_spec.js\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -2,8 +2,8 @@ import { GlTable, GlBadge, GlEmptyState } from '@gitlab/ui';\\n import { GlSingleStat } from '@gitlab/ui/dist/charts';\\n import { mount } from '@vue/test-utils';\\n import { extendedWrapper } from 'helpers/vue_test_utils_helper';\\n-import DevopsScore from '~/analytics/devops_report/components/devops_score.vue';\\n-import DevopsScoreCallout from '~/analytics/devops_report/components/devops_score_callout.vue';\\n+import DevopsScore from '~/analytics/devops_reports/components/devops_score.vue';\\n+import DevopsScoreCallout from '~/analytics/devops_reports/components/devops_score_callout.vue';\\n import { devopsScoreMetricsData, noDataImagePath, devopsScoreTableHeaders } from '../mock_data';\\n \\n describe('DevopsScore', () =\\u003e {\\n\"},{\"old_path\":\"spec/frontend/analytics/devops_report/components/service_ping_disabled_spec.js\",\"new_path\":\"spec/frontend/analytics/devops_reports/components/service_ping_disabled_spec.js\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":true,\"deleted_file\":false,\"diff\":\"@@ -1,9 +1,9 @@\\n import { GlEmptyState, GlSprintf } from '@gitlab/ui';\\n import { TEST_HOST } from 'helpers/test_constants';\\n import { mountExtended } from 'helpers/vue_test_utils_helper';\\n-import ServicePingDisabled from '~/analytics/devops_report/components/service_ping_disabled.vue';\\n+import ServicePingDisabled from '~/analytics/devops_reports/components/service_ping_disabled.vue';\\n \\n-describe('~/analytics/devops_report/components/service_ping_disabled.vue', () =\\u003e {\\n+describe('~/analytics/devops_reports/components/service_ping_disabled.vue', () =\\u003e {\\n   let wrapper;\\n \\n   afterEach(() =\\u003e {\\n\"},{\"old_path\":\"spec/frontend/diffs/components/diff_discussions_spec.js\",\"new_path\":\"spec/frontend/diffs/components/diff_discussions_spec.js\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -1,6 +1,7 @@\\n import { GlIcon } from '@gitlab/ui';\\n import { mount, createLocalVue } from '@vue/test-utils';\\n import DiffDiscussions from '~/diffs/components/diff_discussions.vue';\\n+import { discussionIntersectionObserverHandlerFactory } from '~/diffs/utils/discussions';\\n import { createStore } from '~/mr_notes/stores';\\n import DiscussionNotes from '~/notes/components/discussion_notes.vue';\\n import NoteableDiscussion from '~/notes/components/noteable_discussion.vue';\\n@@ -19,6 +20,9 @@ describe('DiffDiscussions', () =\\u003e {\\n     store = createStore();\\n     wrapper = mount(localVue.extend(DiffDiscussions), {\\n       store,\\n+      provide: {\\n+        discussionObserverHandler: discussionIntersectionObserverHandlerFactory(),\\n+      },\\n       propsData: {\\n         discussions: getDiscussionsMockData(),\\n         ...props,\\n\"},{\"old_path\":\"spec/frontend/diffs/utils/discussions_spec.js\",\"new_path\":\"spec/frontend/diffs/utils/discussions_spec.js\",\"a_mode\":\"0\",\"b_mode\":\"100644\",\"new_file\":true,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -0,0 +1,133 @@\\n+import { discussionIntersectionObserverHandlerFactory } from '~/diffs/utils/discussions';\\n+\\n+describe('Diff Discussions Utils', () =\\u003e {\\n+  describe('discussionIntersectionObserverHandlerFactory', () =\\u003e {\\n+    it('creates a handler function', () =\\u003e {\\n+      expect(discussionIntersectionObserverHandlerFactory()).toBeInstanceOf(Function);\\n+    });\\n+\\n+    describe('intersection observer handler', () =\\u003e {\\n+      const functions = {\\n+        setCurrentDiscussionId: jest.fn(),\\n+        getPreviousUnresolvedDiscussionId: jest.fn().mockImplementation((id) =\\u003e {\\n+          return Number(id) - 1;\\n+        }),\\n+      };\\n+      const defaultProcessableWrapper = {\\n+        entry: {\\n+          time: 0,\\n+          isIntersecting: true,\\n+          rootBounds: {\\n+            bottom: 0,\\n+          },\\n+          boundingClientRect: {\\n+            top: 0,\\n+          },\\n+        },\\n+        currentDiscussion: {\\n+          id: 1,\\n+        },\\n+        isFirstUnresolved: false,\\n+        isDiffsPage: true,\\n+      };\\n+      let handler;\\n+      let getMock;\\n+      let setMock;\\n+\\n+      beforeEach(() =\\u003e {\\n+        functions.setCurrentDiscussionId.mockClear();\\n+        functions.getPreviousUnresolvedDiscussionId.mockClear();\\n+\\n+        defaultProcessableWrapper.functions = functions;\\n+\\n+        setMock = functions.setCurrentDiscussionId.mock;\\n+        getMock = functions.getPreviousUnresolvedDiscussionId.mock;\\n+        handler = discussionIntersectionObserverHandlerFactory();\\n+      });\\n+\\n+      it('debounces multiple simultaneous requests into one queue', () =\\u003e {\\n+        handler(defaultProcessableWrapper);\\n+        handler(defaultProcessableWrapper);\\n+        handler(defaultProcessableWrapper);\\n+        handler(defaultProcessableWrapper);\\n+\\n+        expect(setTimeout).toHaveBeenCalledTimes(4);\\n+        expect(clearTimeout).toHaveBeenCalledTimes(3);\\n+\\n+        // By only advancing to one timer, we ensure it's all being batched into one queue\\n+        jest.advanceTimersToNextTimer();\\n+\\n+        expect(functions.setCurrentDiscussionId).toHaveBeenCalledTimes(4);\\n+      });\\n+\\n+      it('properly processes, sorts and executes the correct actions for a set of observed intersections', () =\\u003e {\\n+        handler(defaultProcessableWrapper);\\n+        handler({\\n+          // This observation is here to be filtered out because it's a scrollDown\\n+          ...defaultProcessableWrapper,\\n+          entry: {\\n+            ...defaultProcessableWrapper.entry,\\n+            isIntersecting: false,\\n+            boundingClientRect: { top: 10 },\\n+            rootBounds: { bottom: 100 },\\n+          },\\n+        });\\n+        handler({\\n+          ...defaultProcessableWrapper,\\n+          entry: {\\n+            ...defaultProcessableWrapper.entry,\\n+            time: 101,\\n+            isIntersecting: false,\\n+            rootBounds: { bottom: -100 },\\n+          },\\n+          currentDiscussion: { id: 20 },\\n+        });\\n+        handler({\\n+          ...defaultProcessableWrapper,\\n+          entry: {\\n+            ...defaultProcessableWrapper.entry,\\n+            time: 100,\\n+            isIntersecting: false,\\n+            boundingClientRect: { top: 100 },\\n+          },\\n+          currentDiscussion: { id: 30 },\\n+          isDiffsPage: false,\\n+        });\\n+        handler({\\n+          ...defaultProcessableWrapper,\\n+          isFirstUnresolved: true,\\n+          entry: {\\n+            ...defaultProcessableWrapper.entry,\\n+            time: 100,\\n+            isIntersecting: false,\\n+            boundingClientRect: { top: 200 },\\n+          },\\n+        });\\n+\\n+        jest.advanceTimersToNextTimer();\\n+\\n+        expect(setMock.calls.length).toBe(4);\\n+        expect(setMock.calls[0]).toEqual([1]);\\n+        expect(setMock.calls[1]).toEqual([29]);\\n+        expect(setMock.calls[2]).toEqual([null]);\\n+        expect(setMock.calls[3]).toEqual([19]);\\n+\\n+        expect(getMock.calls.length).toBe(2);\\n+        expect(getMock.calls[0]).toEqual([30, false]);\\n+        expect(getMock.calls[1]).toEqual([20, true]);\\n+\\n+        [\\n+          setMock.invocationCallOrder[0],\\n+          getMock.invocationCallOrder[0],\\n+          setMock.invocationCallOrder[1],\\n+          setMock.invocationCallOrder[2],\\n+          getMock.invocationCallOrder[1],\\n+          setMock.invocationCallOrder[3],\\n+        ].forEach((order, idx, list) =\\u003e {\\n+          // Compare each invocation sequence to the one before it (except the first one)\\n+          expect(list[idx - 1] || -1).toBeLessThan(order);\\n+        });\\n+      });\\n+    });\\n+  });\\n+});\\n\"},{\"old_path\":\"spec/frontend/notes/components/discussion_notes_spec.js\",\"new_path\":\"spec/frontend/notes/components/discussion_notes_spec.js\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -1,6 +1,7 @@\\n import { getByRole } from '@testing-library/dom';\\n import { shallowMount, mount } from '@vue/test-utils';\\n import '~/behaviors/markdown/render_gfm';\\n+import { discussionIntersectionObserverHandlerFactory } from '~/diffs/utils/discussions';\\n import DiscussionNotes from '~/notes/components/discussion_notes.vue';\\n import NoteableNote from '~/notes/components/noteable_note.vue';\\n import { SYSTEM_NOTE } from '~/notes/constants';\\n@@ -26,6 +27,9 @@ describe('DiscussionNotes', () =\\u003e {\\n   const createComponent = (props, mountingMethod = shallowMount) =\\u003e {\\n     wrapper = mountingMethod(DiscussionNotes, {\\n       store,\\n+      provide: {\\n+        discussionObserverHandler: discussionIntersectionObserverHandlerFactory(),\\n+      },\\n       propsData: {\\n         discussion: discussionMock,\\n         isExpanded: false,\\n\"},{\"old_path\":\"spec/frontend/notes/components/noteable_discussion_spec.js\",\"new_path\":\"spec/frontend/notes/components/noteable_discussion_spec.js\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -3,6 +3,7 @@ import { nextTick } from 'vue';\\n import discussionWithTwoUnresolvedNotes from 'test_fixtures/merge_requests/resolved_diff_discussion.json';\\n import { trimText } from 'helpers/text_helper';\\n import mockDiffFile from 'jest/diffs/mock_data/diff_file';\\n+import { discussionIntersectionObserverHandlerFactory } from '~/diffs/utils/discussions';\\n import DiscussionNotes from '~/notes/components/discussion_notes.vue';\\n import ReplyPlaceholder from '~/notes/components/discussion_reply_placeholder.vue';\\n import ResolveWithIssueButton from '~/notes/components/discussion_resolve_with_issue_button.vue';\\n@@ -31,6 +32,9 @@ describe('noteable_discussion component', () =\\u003e {\\n \\n     wrapper = mount(NoteableDiscussion, {\\n       store,\\n+      provide: {\\n+        discussionObserverHandler: discussionIntersectionObserverHandlerFactory(),\\n+      },\\n       propsData: { discussion: discussionMock },\\n     });\\n   });\\n@@ -167,6 +171,9 @@ describe('noteable_discussion component', () =\\u003e {\\n \\n         wrapper = mount(NoteableDiscussion, {\\n           store,\\n+          provide: {\\n+            discussionObserverHandler: discussionIntersectionObserverHandlerFactory(),\\n+          },\\n           propsData: { discussion: discussionMock },\\n         });\\n       });\\n@@ -185,6 +192,9 @@ describe('noteable_discussion component', () =\\u003e {\\n \\n         wrapper = mount(NoteableDiscussion, {\\n           store,\\n+          provide: {\\n+            discussionObserverHandler: discussionIntersectionObserverHandlerFactory(),\\n+          },\\n           propsData: { discussion: discussionMock },\\n         });\\n       });\\n\"},{\"old_path\":\"spec/frontend/notes/components/notes_app_spec.js\",\"new_path\":\"spec/frontend/notes/components/notes_app_spec.js\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -9,6 +9,7 @@ import DraftNote from '~/batch_comments/components/draft_note.vue';\\n import batchComments from '~/batch_comments/stores/modules/batch_comments';\\n import axios from '~/lib/utils/axios_utils';\\n import * as urlUtility from '~/lib/utils/url_utility';\\n+import { discussionIntersectionObserverHandlerFactory } from '~/diffs/utils/discussions';\\n import CommentForm from '~/notes/components/comment_form.vue';\\n import NotesApp from '~/notes/components/notes_app.vue';\\n import * as constants from '~/notes/constants';\\n@@ -78,6 +79,9 @@ describe('note_app', () =\\u003e {\\n           \\u003c/div\\u003e`,\\n         },\\n         {\\n+          provide: {\\n+            discussionObserverHandler: discussionIntersectionObserverHandlerFactory(),\\n+          },\\n           propsData,\\n           store,\\n         },\\n\"},{\"old_path\":\"spec/frontend/runner/components/cells/runner_actions_cell_spec.js\",\"new_path\":\"spec/frontend/runner/components/cells/runner_actions_cell_spec.js\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -8,12 +8,11 @@ import RunnerActionCell from '~/runner/components/cells/runner_actions_cell.vue'\\n import getGroupRunnersQuery from '~/runner/graphql/get_group_runners.query.graphql';\\n import getRunnersQuery from '~/runner/graphql/get_runners.query.graphql';\\n import runnerDeleteMutation from '~/runner/graphql/runner_delete.mutation.graphql';\\n-import runnerUpdateMutation from '~/runner/graphql/runner_update.mutation.graphql';\\n+import runnerActionsUpdateMutation from '~/runner/graphql/runner_actions_update.mutation.graphql';\\n import { captureException } from '~/runner/sentry_utils';\\n-import { runnersData, runnerData } from '../../mock_data';\\n+import { runnersData } from '../../mock_data';\\n \\n const mockRunner = runnersData.data.runners.nodes[0];\\n-const mockRunnerDetails = runnerData.data.runner;\\n \\n const getRunnersQueryName = getRunnersQuery.definitions[0].name.value;\\n const getGroupRunnersQueryName = getGroupRunnersQuery.definitions[0].name.value;\\n@@ -27,7 +26,7 @@ jest.mock('~/runner/sentry_utils');\\n describe('RunnerTypeCell', () =\\u003e {\\n   let wrapper;\\n   const runnerDeleteMutationHandler = jest.fn();\\n-  const runnerUpdateMutationHandler = jest.fn();\\n+  const runnerActionsUpdateMutationHandler = jest.fn();\\n \\n   const findEditBtn = () =\\u003e wrapper.findByTestId('edit-runner');\\n   const findToggleActiveBtn = () =\\u003e wrapper.findByTestId('toggle-active-runner');\\n@@ -46,7 +45,7 @@ describe('RunnerTypeCell', () =\\u003e {\\n         localVue,\\n         apolloProvider: createMockApollo([\\n           [runnerDeleteMutation, runnerDeleteMutationHandler],\\n-          [runnerUpdateMutation, runnerUpdateMutationHandler],\\n+          [runnerActionsUpdateMutation, runnerActionsUpdateMutationHandler],\\n         ]),\\n         ...options,\\n       }),\\n@@ -62,10 +61,10 @@ describe('RunnerTypeCell', () =\\u003e {\\n       },\\n     });\\n \\n-    runnerUpdateMutationHandler.mockResolvedValue({\\n+    runnerActionsUpdateMutationHandler.mockResolvedValue({\\n       data: {\\n         runnerUpdate: {\\n-          runner: mockRunnerDetails,\\n+          runner: mockRunner,\\n           errors: [],\\n         },\\n       },\\n@@ -74,7 +73,7 @@ describe('RunnerTypeCell', () =\\u003e {\\n \\n   afterEach(() =\\u003e {\\n     runnerDeleteMutationHandler.mockReset();\\n-    runnerUpdateMutationHandler.mockReset();\\n+    runnerActionsUpdateMutationHandler.mockReset();\\n \\n     wrapper.destroy();\\n   });\\n@@ -116,12 +115,12 @@ describe('RunnerTypeCell', () =\\u003e {\\n \\n     describe(`When clicking on the ${icon} button`, () =\\u003e {\\n       it(`The apollo mutation to set active to ${newActiveValue} is called`, async () =\\u003e {\\n-        expect(runnerUpdateMutationHandler).toHaveBeenCalledTimes(0);\\n+        expect(runnerActionsUpdateMutationHandler).toHaveBeenCalledTimes(0);\\n \\n         await findToggleActiveBtn().vm.$emit('click');\\n \\n-        expect(runnerUpdateMutationHandler).toHaveBeenCalledTimes(1);\\n-        expect(runnerUpdateMutationHandler).toHaveBeenCalledWith({\\n+        expect(runnerActionsUpdateMutationHandler).toHaveBeenCalledTimes(1);\\n+        expect(runnerActionsUpdateMutationHandler).toHaveBeenCalledWith({\\n           input: {\\n             id: mockRunner.id,\\n             active: newActiveValue,\\n@@ -145,7 +144,7 @@ describe('RunnerTypeCell', () =\\u003e {\\n         const mockErrorMsg = 'Update error!';\\n \\n         beforeEach(async () =\\u003e {\\n-          runnerUpdateMutationHandler.mockRejectedValueOnce(new Error(mockErrorMsg));\\n+          runnerActionsUpdateMutationHandler.mockRejectedValueOnce(new Error(mockErrorMsg));\\n \\n           await findToggleActiveBtn().vm.$emit('click');\\n         });\\n@@ -167,10 +166,10 @@ describe('RunnerTypeCell', () =\\u003e {\\n         const mockErrorMsg2 = 'User not allowed!';\\n \\n         beforeEach(async () =\\u003e {\\n-          runnerUpdateMutationHandler.mockResolvedValue({\\n+          runnerActionsUpdateMutationHandler.mockResolvedValue({\\n             data: {\\n               runnerUpdate: {\\n-                runner: runnerData.data.runner,\\n+                runner: mockRunner,\\n                 errors: [mockErrorMsg, mockErrorMsg2],\\n               },\\n             },\\n\"},{\"old_path\":\"spec/frontend/runner/components/cells/runner_type_cell_spec.js\",\"new_path\":\"spec/frontend/runner/components/cells/runner_status_cell_spec.js\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":true,\"deleted_file\":false,\"diff\":\"@@ -1,20 +1,20 @@\\n import { GlBadge } from '@gitlab/ui';\\n import { mount } from '@vue/test-utils';\\n-import RunnerTypeCell from '~/runner/components/cells/runner_type_cell.vue';\\n-import { INSTANCE_TYPE } from '~/runner/constants';\\n+import RunnerStatusCell from '~/runner/components/cells/runner_status_cell.vue';\\n+import { INSTANCE_TYPE, STATUS_ONLINE, STATUS_OFFLINE } from '~/runner/constants';\\n \\n describe('RunnerTypeCell', () =\\u003e {\\n   let wrapper;\\n \\n-  const findBadges = () =\\u003e wrapper.findAllComponents(GlBadge);\\n+  const findBadgeAt = (i) =\\u003e wrapper.findAllComponents(GlBadge).at(i);\\n \\n   const createComponent = ({ runner = {} } = {}) =\\u003e {\\n-    wrapper = mount(RunnerTypeCell, {\\n+    wrapper = mount(RunnerStatusCell, {\\n       propsData: {\\n         runner: {\\n           runnerType: INSTANCE_TYPE,\\n           active: true,\\n-          locked: false,\\n+          status: STATUS_ONLINE,\\n           ...runner,\\n         },\\n       },\\n@@ -25,24 +25,45 @@ describe('RunnerTypeCell', () =\\u003e {\\n     wrapper.destroy();\\n   });\\n \\n-  it('Displays the runner type', () =\\u003e {\\n+  it('Displays online status', () =\\u003e {\\n     createComponent();\\n \\n-    expect(findBadges()).toHaveLength(1);\\n-    expect(findBadges().at(0).text()).toBe('shared');\\n+    expect(wrapper.text()).toMatchInterpolatedText('online');\\n+    expect(findBadgeAt(0).text()).toBe('online');\\n   });\\n \\n-  it('Displays locked and paused states', () =\\u003e {\\n+  it('Displays offline status', () =\\u003e {\\n+    createComponent({\\n+      runner: {\\n+        status: STATUS_OFFLINE,\\n+      },\\n+    });\\n+\\n+    expect(wrapper.text()).toMatchInterpolatedText('offline');\\n+    expect(findBadgeAt(0).text()).toBe('offline');\\n+  });\\n+\\n+  it('Displays paused status', () =\\u003e {\\n     createComponent({\\n       runner: {\\n         active: false,\\n-        locked: true,\\n+        status: STATUS_ONLINE,\\n+      },\\n+    });\\n+\\n+    expect(wrapper.text()).toMatchInterpolatedText('online paused');\\n+\\n+    expect(findBadgeAt(0).text()).toBe('online');\\n+    expect(findBadgeAt(1).text()).toBe('paused');\\n+  });\\n+\\n+  it('Is empty when data is missing', () =\\u003e {\\n+    createComponent({\\n+      runner: {\\n+        status: null,\\n       },\\n     });\\n \\n-    expect(findBadges()).toHaveLength(3);\\n-    expect(findBadges().at(0).text()).toBe('shared');\\n-    expect(findBadges().at(1).text()).toBe('locked');\\n-    expect(findBadges().at(2).text()).toBe('paused');\\n+    expect(wrapper.text()).toBe('');\\n   });\\n });\\n\"},{\"old_path\":\"spec/frontend/runner/components/cells/runner_summary_cell_spec.js\",\"new_path\":\"spec/frontend/runner/components/cells/runner_summary_cell_spec.js\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -1,5 +1,6 @@\\n-import { mount } from '@vue/test-utils';\\n+import { mountExtended } from 'helpers/vue_test_utils_helper';\\n import RunnerSummaryCell from '~/runner/components/cells/runner_summary_cell.vue';\\n+import { INSTANCE_TYPE, PROJECT_TYPE } from '~/runner/constants';\\n \\n const mockId = '1';\\n const mockShortSha = '2P6oDVDm';\\n@@ -8,13 +9,17 @@ const mockDescription = 'runner-1';\\n describe('RunnerTypeCell', () =\\u003e {\\n   let wrapper;\\n \\n-  const createComponent = (options) =\\u003e {\\n-    wrapper = mount(RunnerSummaryCell, {\\n+  const findLockIcon = () =\\u003e wrapper.findByTestId('lock-icon');\\n+\\n+  const createComponent = (runner, options) =\\u003e {\\n+    wrapper = mountExtended(RunnerSummaryCell, {\\n       propsData: {\\n         runner: {\\n           id: `gid://gitlab/Ci::Runner/${mockId}`,\\n           shortSha: mockShortSha,\\n           description: mockDescription,\\n+          runnerType: INSTANCE_TYPE,\\n+          ...runner,\\n         },\\n       },\\n       ...options,\\n@@ -33,6 +38,23 @@ describe('RunnerTypeCell', () =\\u003e {\\n     expect(wrapper.text()).toContain(`#${mockId} (${mockShortSha})`);\\n   });\\n \\n+  it('Displays the runner type', () =\\u003e {\\n+    expect(wrapper.text()).toContain('shared');\\n+  });\\n+\\n+  it('Does not display the locked icon', () =\\u003e {\\n+    expect(findLockIcon().exists()).toBe(false);\\n+  });\\n+\\n+  it('Displays the locked icon for locked runners', () =\\u003e {\\n+    createComponent({\\n+      runnerType: PROJECT_TYPE,\\n+      locked: true,\\n+    });\\n+\\n+    expect(findLockIcon().exists()).toBe(true);\\n+  });\\n+\\n   it('Displays the runner description', () =\\u003e {\\n     expect(wrapper.text()).toContain(mockDescription);\\n   });\\n@@ -40,11 +62,14 @@ describe('RunnerTypeCell', () =\\u003e {\\n   it('Displays a custom slot', () =\\u003e {\\n     const slotContent = 'My custom runner summary';\\n \\n-    createComponent({\\n-      slots: {\\n-        'runner-name': slotContent,\\n+    createComponent(\\n+      {},\\n+      {\\n+        slots: {\\n+          'runner-name': slotContent,\\n+        },\\n       },\\n-    });\\n+    );\\n \\n     expect(wrapper.text()).toContain(slotContent);\\n   });\\n\"},{\"old_path\":\"spec/frontend/runner/components/runner_contacted_state_badge_spec.js\",\"new_path\":\"spec/frontend/runner/components/runner_contacted_state_badge_spec.js\",\"a_mode\":\"0\",\"b_mode\":\"100644\",\"new_file\":true,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -0,0 +1,86 @@\\n+import { GlBadge } from '@gitlab/ui';\\n+import { shallowMount } from '@vue/test-utils';\\n+import RunnerContactedStateBadge from '~/runner/components/runner_contacted_state_badge.vue';\\n+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';\\n+import { STATUS_ONLINE, STATUS_OFFLINE, STATUS_NOT_CONNECTED } from '~/runner/constants';\\n+\\n+describe('RunnerTypeBadge', () =\\u003e {\\n+  let wrapper;\\n+\\n+  const findBadge = () =\\u003e wrapper.findComponent(GlBadge);\\n+  const getTooltip = () =\\u003e getBinding(findBadge().element, 'gl-tooltip');\\n+\\n+  const createComponent = ({ runner = {} } = {}) =\\u003e {\\n+    wrapper = shallowMount(RunnerContactedStateBadge, {\\n+      propsData: {\\n+        runner: {\\n+          contactedAt: '2021-01-01T00:00:00Z',\\n+          status: STATUS_ONLINE,\\n+          ...runner,\\n+        },\\n+      },\\n+      directives: {\\n+        GlTooltip: createMockDirective(),\\n+      },\\n+    });\\n+  };\\n+\\n+  beforeEach(() =\\u003e {\\n+    jest.useFakeTimers('modern');\\n+  });\\n+\\n+  afterEach(() =\\u003e {\\n+    jest.useFakeTimers('legacy');\\n+\\n+    wrapper.destroy();\\n+  });\\n+\\n+  it('renders online state', () =\\u003e {\\n+    jest.setSystemTime(new Date('2021-01-01T00:01:00Z'));\\n+\\n+    createComponent();\\n+\\n+    expect(wrapper.text()).toBe('online');\\n+    expect(findBadge().props('variant')).toBe('success');\\n+    expect(getTooltip().value).toBe('Runner is online; last contact was 1 minute ago');\\n+  });\\n+\\n+  it('renders offline state', () =\\u003e {\\n+    jest.setSystemTime(new Date('2021-01-02T00:00:00Z'));\\n+\\n+    createComponent({\\n+      runner: {\\n+        status: STATUS_OFFLINE,\\n+      },\\n+    });\\n+\\n+    expect(wrapper.text()).toBe('offline');\\n+    expect(findBadge().props('variant')).toBe('muted');\\n+    expect(getTooltip().value).toBe(\\n+      'No recent contact from this runner; last contact was 1 day ago',\\n+    );\\n+  });\\n+\\n+  it('renders not connected state', () =\\u003e {\\n+    createComponent({\\n+      runner: {\\n+        contactedAt: null,\\n+        status: STATUS_NOT_CONNECTED,\\n+      },\\n+    });\\n+\\n+    expect(wrapper.text()).toBe('not connected');\\n+    expect(findBadge().props('variant')).toBe('muted');\\n+    expect(getTooltip().value).toMatch('This runner has never connected');\\n+  });\\n+\\n+  it('does not fail when data is missing', () =\\u003e {\\n+    createComponent({\\n+      runner: {\\n+        status: null,\\n+      },\\n+    });\\n+\\n+    expect(wrapper.text()).toBe('');\\n+  });\\n+});\\n\"},{\"old_path\":\"spec/frontend/runner/components/runner_list_spec.js\",\"new_path\":\"spec/frontend/runner/components/runner_list_spec.js\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -42,8 +42,8 @@ describe('RunnerList', () =\\u003e {\\n     const headerLabels = findHeaders().wrappers.map((w) =\\u003e w.text());\\n \\n     expect(headerLabels).toEqual([\\n-      'Type/State',\\n-      'Runner',\\n+      'Status',\\n+      'Runner ID',\\n       'Version',\\n       'IP Address',\\n       'Tags',\\n@@ -62,7 +62,7 @@ describe('RunnerList', () =\\u003e {\\n     const { id, description, version, ipAddress, shortSha } = mockRunners[0];\\n \\n     // Badges\\n-    expect(findCell({ fieldKey: 'type' }).text()).toMatchInterpolatedText('specific paused');\\n+    expect(findCell({ fieldKey: 'status' }).text()).toMatchInterpolatedText('not connected paused');\\n \\n     // Runner summary\\n     expect(findCell({ fieldKey: 'summary' }).text()).toContain(\\n\"},{\"old_path\":\"spec/frontend/runner/components/runner_state_paused_badge_spec.js\",\"new_path\":\"spec/frontend/runner/components/runner_paused_badge_spec.js\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":true,\"deleted_file\":false,\"diff\":\"@@ -1,6 +1,6 @@\\n import { GlBadge } from '@gitlab/ui';\\n import { shallowMount } from '@vue/test-utils';\\n-import RunnerStatePausedBadge from '~/runner/components/runner_state_paused_badge.vue';\\n+import RunnerStatePausedBadge from '~/runner/components/runner_paused_badge.vue';\\n import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';\\n \\n describe('RunnerTypeBadge', () =\\u003e {\\n\"},{\"old_path\":\"spec/frontend/runner/components/runner_state_locked_badge_spec.js\",\"new_path\":\"spec/frontend/runner/components/runner_state_locked_badge_spec.js\",\"a_mode\":\"100644\",\"b_mode\":\"0\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":true,\"diff\":\"@@ -1,45 +0,0 @@\\n-import { GlBadge } from '@gitlab/ui';\\n-import { shallowMount } from '@vue/test-utils';\\n-import RunnerStateLockedBadge from '~/runner/components/runner_state_locked_badge.vue';\\n-import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';\\n-\\n-describe('RunnerTypeBadge', () =\\u003e {\\n-  let wrapper;\\n-\\n-  const findBadge = () =\\u003e wrapper.findComponent(GlBadge);\\n-  const getTooltip = () =\\u003e getBinding(findBadge().element, 'gl-tooltip');\\n-\\n-  const createComponent = ({ props = {} } = {}) =\\u003e {\\n-    wrapper = shallowMount(RunnerStateLockedBadge, {\\n-      propsData: {\\n-        ...props,\\n-      },\\n-      directives: {\\n-        GlTooltip: createMockDirective(),\\n-      },\\n-    });\\n-  };\\n-\\n-  beforeEach(() =\\u003e {\\n-    createComponent();\\n-  });\\n-\\n-  afterEach(() =\\u003e {\\n-    wrapper.destroy();\\n-  });\\n-\\n-  it('renders locked state', () =\\u003e {\\n-    expect(wrapper.text()).toBe('locked');\\n-    expect(findBadge().props('variant')).toBe('warning');\\n-  });\\n-\\n-  it('renders tooltip', () =\\u003e {\\n-    expect(getTooltip().value).toBeDefined();\\n-  });\\n-\\n-  it('passes arbitrary attributes to the badge', () =\\u003e {\\n-    createComponent({ props: { size: 'sm' } });\\n-\\n-    expect(findBadge().props('size')).toBe('sm');\\n-  });\\n-});\\n\"},{\"old_path\":\"spec/frontend/runner/components/runner_type_alert_spec.js\",\"new_path\":\"spec/frontend/runner/components/runner_type_alert_spec.js\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -23,11 +23,11 @@ describe('RunnerTypeAlert', () =\\u003e {\\n   });\\n \\n   describe.each`\\n-    type             | exampleText                                                            | anchor                 | variant\\n-    ${INSTANCE_TYPE} | ${'This runner is available to all groups and projects'}               | ${'#shared-runners'}   | ${'success'}\\n-    ${GROUP_TYPE}    | ${'This runner is available to all projects and subgroups in a group'} | ${'#group-runners'}    | ${'success'}\\n-    ${PROJECT_TYPE}  | ${'This runner is associated with one or more projects'}               | ${'#specific-runners'} | ${'info'}\\n-  `('When it is an $type level runner', ({ type, exampleText, anchor, variant }) =\\u003e {\\n+    type             | exampleText                                                            | anchor\\n+    ${INSTANCE_TYPE} | ${'This runner is available to all groups and projects'}               | ${'#shared-runners'}\\n+    ${GROUP_TYPE}    | ${'This runner is available to all projects and subgroups in a group'} | ${'#group-runners'}\\n+    ${PROJECT_TYPE}  | ${'This runner is associated with one or more projects'}               | ${'#specific-runners'}\\n+  `('When it is an $type level runner', ({ type, exampleText, anchor }) =\\u003e {\\n     beforeEach(() =\\u003e {\\n       createComponent({ props: { type } });\\n     });\\n@@ -36,8 +36,8 @@ describe('RunnerTypeAlert', () =\\u003e {\\n       expect(wrapper.text()).toMatch(exampleText);\\n     });\\n \\n-    it(`Shows a ${variant} variant`, () =\\u003e {\\n-      expect(findAlert().props('variant')).toBe(variant);\\n+    it(`Shows an \\\"info\\\" variant`, () =\\u003e {\\n+      expect(findAlert().props('variant')).toBe('info');\\n     });\\n \\n     it(`Links to anchor \\\"${anchor}\\\"`, () =\\u003e {\\n\"},{\"old_path\":\"spec/frontend/runner/components/runner_type_badge_spec.js\",\"new_path\":\"spec/frontend/runner/components/runner_type_badge_spec.js\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -26,18 +26,18 @@ describe('RunnerTypeBadge', () =\\u003e {\\n   });\\n \\n   describe.each`\\n-    type             | text          | variant\\n-    ${INSTANCE_TYPE} | ${'shared'}   | ${'success'}\\n-    ${GROUP_TYPE}    | ${'group'}    | ${'success'}\\n-    ${PROJECT_TYPE}  | ${'specific'} | ${'info'}\\n-  `('displays $type runner', ({ type, text, variant }) =\\u003e {\\n+    type             | text\\n+    ${INSTANCE_TYPE} | ${'shared'}\\n+    ${GROUP_TYPE}    | ${'group'}\\n+    ${PROJECT_TYPE}  | ${'specific'}\\n+  `('displays $type runner', ({ type, text }) =\\u003e {\\n     beforeEach(() =\\u003e {\\n       createComponent({ props: { type } });\\n     });\\n \\n-    it(`as \\\"${text}\\\" with a ${variant} variant`, () =\\u003e {\\n+    it(`as \\\"${text}\\\" with an \\\"info\\\" variant`, () =\\u003e {\\n       expect(findBadge().text()).toBe(text);\\n-      expect(findBadge().props('variant')).toBe(variant);\\n+      expect(findBadge().props('variant')).toBe('info');\\n     });\\n \\n     it('with a tooltip', () =\\u003e {\\n\"},{\"old_path\":\"spec/frontend/vue_mr_widget/components/states/__snapshots__/mr_widget_auto_merge_enabled_spec.js.snap\",\"new_path\":\"spec/frontend/vue_mr_widget/components/states/__snapshots__/mr_widget_auto_merge_enabled_spec.js.snap\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -50,7 +50,7 @@ exports[`MRWidgetAutoMergeEnabled when graphql is disabled template should have\\n         \\u003cspan\\n           class=\\\"gl-mr-3\\\"\\n         \\u003e\\n-          The source branch will not be deleted\\n+          Does not delete the source branch\\n         \\u003c/span\\u003e\\n          \\n         \\u003cgl-button-stub\\n@@ -122,7 +122,7 @@ exports[`MRWidgetAutoMergeEnabled when graphql is enabled template should have c\\n         \\u003cspan\\n           class=\\\"gl-mr-3\\\"\\n         \\u003e\\n-          The source branch will not be deleted\\n+          Does not delete the source branch\\n         \\u003c/span\\u003e\\n          \\n         \\u003cgl-button-stub\\n\"},{\"old_path\":\"spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_enabled_spec.js\",\"new_path\":\"spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_enabled_spec.js\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -270,8 +270,8 @@ describe('MRWidgetAutoMergeEnabled', () =\\u003e {\\n \\n           const normalizedText = wrapper.text().replace(/\\\\s+/g, ' ');\\n \\n-          expect(normalizedText).toContain('The source branch will be deleted');\\n-          expect(normalizedText).not.toContain('The source branch will not be deleted');\\n+          expect(normalizedText).toContain('Deletes the source branch');\\n+          expect(normalizedText).not.toContain('Does not delete the source branch');\\n         });\\n \\n         it('should not show delete source branch button when user not able to delete source branch', () =\\u003e {\\n\"},{\"old_path\":\"spec/frontend/vue_mr_widget/components/states/mr_widget_merging_spec.js\",\"new_path\":\"spec/frontend/vue_mr_widget/components/states/mr_widget_merging_spec.js\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -42,7 +42,7 @@ describe('MRWidgetMerging', () =\\u003e {\\n         .trim()\\n         .replace(/\\\\s\\\\s+/g, ' ')\\n         .replace(/[\\\\r\\\\n]+/g, ' '),\\n-    ).toEqual('The changes will be merged into branch');\\n+    ).toEqual('Merges changes into branch');\\n \\n     expect(wrapper.find('a').attributes('href')).toBe('/branch-path');\\n   });\\n\"},{\"old_path\":\"spec/frontend/vue_mr_widget/mr_widget_options_spec.js\",\"new_path\":\"spec/frontend/vue_mr_widget/mr_widget_options_spec.js\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -543,7 +543,7 @@ describe('MrWidgetOptions', () =\\u003e {\\n         nextTick(() =\\u003e {\\n           const tooltip = wrapper.find('[data-testid=\\\"question-o-icon\\\"]');\\n \\n-          expect(wrapper.text()).toContain('The source branch will be deleted');\\n+          expect(wrapper.text()).toContain('Deletes the source branch');\\n           expect(tooltip.attributes('title')).toBe(\\n             'A user with write access to the source branch selected this option',\\n           );\\n@@ -559,7 +559,7 @@ describe('MrWidgetOptions', () =\\u003e {\\n \\n         nextTick(() =\\u003e {\\n           expect(wrapper.text()).toContain('The source branch has been deleted');\\n-          expect(wrapper.text()).not.toContain('The source branch will be deleted');\\n+          expect(wrapper.text()).not.toContain('Deletes the source branch');\\n \\n           done();\\n         });\\n\"},{\"old_path\":\"spec/lib/gitlab/ci/artifact_file_reader_spec.rb\",\"new_path\":\"spec/lib/gitlab/ci/artifact_file_reader_spec.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -18,17 +18,6 @@\\n         expect(YAML.safe_load(subject).keys).to contain_exactly('rspec', 'time', 'custom')\\n       end\\n \\n-      context 'when FF ci_new_artifact_file_reader is disabled' do\\n-        before do\\n-          stub_feature_flags(ci_new_artifact_file_reader: false)\\n-        end\\n-\\n-        it 'returns the content at the path' do\\n-          is_expected.to be_present\\n-          expect(YAML.safe_load(subject).keys).to contain_exactly('rspec', 'time', 'custom')\\n-        end\\n-      end\\n-\\n       context 'when path does not exist' do\\n         let(:path) { 'file/does/not/exist.txt' }\\n         let(:expected_error) do\\n\"},{\"old_path\":\"spec/lib/gitlab/database/gitlab_schema_spec.rb\",\"new_path\":\"spec/lib/gitlab/database/gitlab_schema_spec.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -35,4 +35,24 @@\\n       end\\n     end\\n   end\\n+\\n+  describe '.table_schema' do\\n+    using RSpec::Parameterized::TableSyntax\\n+\\n+    where(:name, :classification) do\\n+      'ci_builds'                       | :gitlab_ci\\n+      'my_schema.ci_builds'             | :gitlab_ci\\n+      'information_schema.columns'      | :gitlab_shared\\n+      'audit_events_part_5fc467ac26'    | :gitlab_main\\n+      '_test_my_table'                  | :gitlab_shared\\n+      'pg_attribute'                    | :gitlab_shared\\n+      'my_other_table'                  | :undefined_my_other_table\\n+    end\\n+\\n+    with_them do\\n+      subject { described_class.table_schema(name) }\\n+\\n+      it { is_expected.to eq(classification) }\\n+    end\\n+  end\\n end\\n\"},{\"old_path\":\"spec/lib/gitlab/database/query_analyzer_spec.rb\",\"new_path\":\"spec/lib/gitlab/database/query_analyzer_spec.rb\",\"a_mode\":\"0\",\"b_mode\":\"100644\",\"new_file\":true,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -0,0 +1,72 @@\\n+# frozen_string_literal: true\\n+\\n+require 'spec_helper'\\n+\\n+RSpec.describe Gitlab::Database::QueryAnalyzer do\\n+  let(:analyzer) { double(:query_analyzer) }\\n+\\n+  before do\\n+    stub_const('Gitlab::Database::QueryAnalyzer::ANALYZERS', [analyzer])\\n+  end\\n+\\n+  context 'the hook is enabled by default in specs' do\\n+    it 'does process queries and gets normalized SQL' do\\n+      expect(analyzer).to receive(:enabled?).and_return(true)\\n+      expect(analyzer).to receive(:analyze) do |parsed|\\n+        expect(parsed.sql).to include(\\\"SELECT $1 FROM projects\\\")\\n+        expect(parsed.pg.tables).to eq(%w[projects])\\n+      end\\n+\\n+      Project.connection.execute(\\\"SELECT 1 FROM projects\\\")\\n+    end\\n+  end\\n+\\n+  describe '#process_sql' do\\n+    it 'does not analyze query if not enabled' do\\n+      expect(analyzer).to receive(:enabled?).and_return(false)\\n+      expect(analyzer).not_to receive(:analyze)\\n+\\n+      process_sql(\\\"SELECT 1 FROM projects\\\")\\n+    end\\n+\\n+    it 'does analyze query if enabled' do\\n+      expect(analyzer).to receive(:enabled?).and_return(true)\\n+      expect(analyzer).to receive(:analyze) do |parsed|\\n+        expect(parsed.sql).to eq(\\\"SELECT $1 FROM projects\\\")\\n+        expect(parsed.pg.tables).to eq(%w[projects])\\n+      end\\n+\\n+      process_sql(\\\"SELECT 1 FROM projects\\\")\\n+    end\\n+\\n+    it 'does track exception if query cannot be parsed' do\\n+      expect(analyzer).to receive(:enabled?).and_return(true)\\n+      expect(analyzer).not_to receive(:analyze)\\n+      expect(Gitlab::ErrorTracking).to receive(:track_exception)\\n+\\n+      expect { process_sql(\\\"invalid query\\\") }.not_to raise_error\\n+    end\\n+\\n+    it 'does track exception if analyzer raises exception on enabled?' do\\n+      expect(analyzer).to receive(:enabled?).and_raise('exception')\\n+      expect(analyzer).not_to receive(:analyze)\\n+      expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)\\n+\\n+      expect { process_sql(\\\"SELECT 1 FROM projects\\\") }.not_to raise_error\\n+    end\\n+\\n+    it 'does track exception if analyzer raises exception on analyze' do\\n+      expect(analyzer).to receive(:enabled?).and_return(true)\\n+      expect(analyzer).to receive(:analyze).and_raise('exception')\\n+      expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)\\n+\\n+      expect { process_sql(\\\"SELECT 1 FROM projects\\\") }.not_to raise_error\\n+    end\\n+\\n+    def process_sql(sql)\\n+      ApplicationRecord.connection.load_balancer.read_write do |connection|\\n+        described_class.new.send(:process_sql, sql, connection)\\n+      end\\n+    end\\n+  end\\n+end\\n\"},{\"old_path\":\"spec/lib/gitlab/graphql/known_operations_spec.rb\",\"new_path\":\"spec/lib/gitlab/graphql/known_operations_spec.rb\",\"a_mode\":\"0\",\"b_mode\":\"100644\",\"new_file\":true,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -0,0 +1,72 @@\\n+# frozen_string_literal: true\\n+\\n+require 'fast_spec_helper'\\n+require 'rspec-parameterized'\\n+require \\\"support/graphql/fake_query_type\\\"\\n+\\n+RSpec.describe Gitlab::Graphql::KnownOperations do\\n+  using RSpec::Parameterized::TableSyntax\\n+\\n+  # Include duplicated operation names to test that we are unique-ifying them\\n+  let(:fake_operations) { %w(foo foo bar bar) }\\n+  let(:fake_schema) do\\n+    Class.new(GraphQL::Schema) do\\n+      query Graphql::FakeQueryType\\n+    end\\n+  end\\n+\\n+  subject { described_class.new(fake_operations) }\\n+\\n+  describe \\\"#from_query\\\" do\\n+    where(:query_string, :expected) do\\n+      \\\"query { helloWorld }\\\"         | described_class::ANONYMOUS\\n+      \\\"query fuzzyyy { helloWorld }\\\" | described_class::UNKNOWN\\n+      \\\"query foo { helloWorld }\\\"     | described_class::Operation.new(\\\"foo\\\")\\n+    end\\n+\\n+    with_them do\\n+      it \\\"returns known operation name from GraphQL Query\\\" do\\n+        query = ::GraphQL::Query.new(fake_schema, query_string)\\n+\\n+        expect(subject.from_query(query)).to eq(expected)\\n+      end\\n+    end\\n+  end\\n+\\n+  describe \\\"#operations\\\" do\\n+    it \\\"returns array of known operations\\\" do\\n+      expect(subject.operations.map(\\u0026:name)).to match_array(%w(anonymous unknown foo bar))\\n+    end\\n+  end\\n+\\n+  describe \\\"Operation#to_caller_id\\\" do\\n+    where(:query_string, :expected) do\\n+      \\\"query { helloWorld }\\\"         | \\\"graphql:#{described_class::ANONYMOUS.name}\\\"\\n+      \\\"query foo { helloWorld }\\\"     | \\\"graphql:foo\\\"\\n+    end\\n+\\n+    with_them do\\n+      it \\\"formats operation name for caller_id metric property\\\" do\\n+        query = ::GraphQL::Query.new(fake_schema, query_string)\\n+\\n+        expect(subject.from_query(query).to_caller_id).to eq(expected)\\n+      end\\n+    end\\n+  end\\n+\\n+  describe \\\".default\\\" do\\n+    it \\\"returns a memoization of values from webpack\\\", :aggregate_failures do\\n+      # .default could have been referenced in another spec, so we need to clean it up here\\n+      described_class.instance_variable_set(:@default, nil)\\n+\\n+      expect(Gitlab::Webpack::GraphqlKnownOperations).to receive(:load).once.and_return(fake_operations)\\n+\\n+      2.times { described_class.default }\\n+\\n+      # Uses reference equality to verify memoization\\n+      expect(described_class.default).to equal(described_class.default)\\n+      expect(described_class.default).to be_a(described_class)\\n+      expect(described_class.default.operations.map(\\u0026:name)).to include(*fake_operations)\\n+    end\\n+  end\\n+end\\n\"},{\"old_path\":\"spec/lib/gitlab/sidekiq_config/cli_methods_spec.rb\",\"new_path\":\"spec/lib/gitlab/sidekiq_config/cli_methods_spec.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -11,12 +11,12 @@ def expand_path(path)\\n     end\\n \\n     def stub_exists(exists: true)\\n-      ['app/workers/all_queues.yml', 'ee/app/workers/all_queues.yml'].each do |path|\\n+      ['app/workers/all_queues.yml', 'ee/app/workers/all_queues.yml', 'jh/app/workers/all_queues.yml'].each do |path|\\n         allow(File).to receive(:exist?).with(expand_path(path)).and_return(exists)\\n       end\\n     end\\n \\n-    def stub_contents(foss_queues, ee_queues)\\n+    def stub_contents(foss_queues, ee_queues, jh_queues)\\n       allow(YAML).to receive(:load_file)\\n                        .with(expand_path('app/workers/all_queues.yml'))\\n                        .and_return(foss_queues)\\n@@ -24,6 +24,10 @@ def stub_contents(foss_queues, ee_queues)\\n       allow(YAML).to receive(:load_file)\\n                        .with(expand_path('ee/app/workers/all_queues.yml'))\\n                        .and_return(ee_queues)\\n+\\n+      allow(YAML).to receive(:load_file)\\n+                       .with(expand_path('jh/app/workers/all_queues.yml'))\\n+                       .and_return(jh_queues)\\n     end\\n \\n     before do\\n@@ -45,8 +49,9 @@ def stub_contents(foss_queues, ee_queues)\\n         end\\n \\n         it 'flattens and joins the contents' do\\n-          expected_queues = %w[queue_a queue_b]\\n-          expected_queues = expected_queues.first(1) unless Gitlab.ee?\\n+          expected_queues = %w[queue_a]\\n+          expected_queues \\u003c\\u003c 'queue_b' if Gitlab.ee?\\n+          expected_queues \\u003c\\u003c 'queue_c' if Gitlab.jh?\\n \\n           expect(described_class.worker_queues(dummy_root))\\n             .to match_array(expected_queues)\\n@@ -55,7 +60,7 @@ def stub_contents(foss_queues, ee_queues)\\n \\n       context 'when the file contains an array of hashes' do\\n         before do\\n-          stub_contents([{ name: 'queue_a' }], [{ name: 'queue_b' }])\\n+          stub_contents([{ name: 'queue_a' }], [{ name: 'queue_b' }], [{ name: 'queue_c' }])\\n         end\\n \\n         include_examples 'valid file contents'\\n\"},{\"old_path\":\"spec/lib/gitlab/sidekiq_config/worker_spec.rb\",\"new_path\":\"spec/lib/gitlab/sidekiq_config/worker_spec.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -18,19 +18,26 @@ def create_worker(queue:, **attributes)\\n       get_tags: attributes[:tags]\\n     )\\n \\n-    described_class.new(inner_worker, ee: false)\\n+    described_class.new(inner_worker, ee: false, jh: false)\\n   end\\n \\n   describe '#ee?' do\\n     it 'returns the EE status set on creation' do\\n-      expect(described_class.new(double, ee: true)).to be_ee\\n-      expect(described_class.new(double, ee: false)).not_to be_ee\\n+      expect(described_class.new(double, ee: true, jh: false)).to be_ee\\n+      expect(described_class.new(double, ee: false, jh: false)).not_to be_ee\\n+    end\\n+  end\\n+\\n+  describe '#jh?' do\\n+    it 'returns the JH status set on creation' do\\n+      expect(described_class.new(double, ee: false, jh: true)).to be_jh\\n+      expect(described_class.new(double, ee: false, jh: false)).not_to be_jh\\n     end\\n   end\\n \\n   describe '#==' do\\n     def worker_with_yaml(yaml)\\n-      described_class.new(double, ee: false).tap do |worker|\\n+      described_class.new(double, ee: false, jh: false).tap do |worker|\\n         allow(worker).to receive(:to_yaml).and_return(yaml)\\n       end\\n     end\\n@@ -57,7 +64,7 @@ def worker_with_yaml(yaml)\\n \\n         expect(worker).to receive(meth)\\n \\n-        described_class.new(worker, ee: false).send(meth)\\n+        described_class.new(worker, ee: false, jh: false).send(meth)\\n       end\\n     end\\n   end\\n\"},{\"old_path\":\"spec/lib/gitlab/webpack/file_loader_spec.rb\",\"new_path\":\"spec/lib/gitlab/webpack/file_loader_spec.rb\",\"a_mode\":\"0\",\"b_mode\":\"100644\",\"new_file\":true,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -0,0 +1,79 @@\\n+# frozen_string_literal: true\\n+\\n+require 'fast_spec_helper'\\n+require 'support/helpers/file_read_helpers'\\n+require 'support/webmock'\\n+\\n+RSpec.describe Gitlab::Webpack::FileLoader do\\n+  include FileReadHelpers\\n+  include WebMock::API\\n+\\n+  let(:error_file_path) { \\\"error.yml\\\" }\\n+  let(:file_path) { \\\"my_test_file.yml\\\" }\\n+  let(:file_contents) do\\n+    \\u003c\\u003c-EOF\\n+    - hello\\n+    - world\\n+    - test\\n+    EOF\\n+  end\\n+\\n+  before do\\n+    allow(Gitlab.config.webpack.dev_server).to receive_messages(host: 'hostname', port: 2000, https: false)\\n+    allow(Gitlab.config.webpack).to receive(:public_path).and_return('public_path')\\n+    allow(Gitlab.config.webpack).to receive(:output_dir).and_return('webpack_output')\\n+  end\\n+\\n+  context \\\"with dev server enabled\\\" do\\n+    before do\\n+      allow(Gitlab.config.webpack.dev_server).to receive(:enabled).and_return(true)\\n+\\n+      stub_request(:get, \\\"http://hostname:2000/public_path/not_found\\\").to_return(status: 404)\\n+      stub_request(:get, \\\"http://hostname:2000/public_path/#{file_path}\\\").to_return(body: file_contents, status: 200)\\n+      stub_request(:get, \\\"http://hostname:2000/public_path/#{error_file_path}\\\").to_raise(StandardError)\\n+    end\\n+\\n+    it \\\"returns content when respondes succesfully\\\" do\\n+      expect(Gitlab::Webpack::FileLoader.load(file_path)).to be(file_contents)\\n+    end\\n+\\n+    it \\\"raises error when 404\\\" do\\n+      expect { Gitlab::Webpack::FileLoader.load(\\\"not_found\\\") }.to raise_error(\\\"HTTP error 404\\\")\\n+    end\\n+\\n+    it \\\"raises error when errors out\\\" do\\n+      expect { Gitlab::Webpack::FileLoader.load(error_file_path) }.to raise_error(Gitlab::Webpack::FileLoader::DevServerLoadError)\\n+    end\\n+  end\\n+\\n+  context \\\"with dev server enabled and https\\\" do\\n+    before do\\n+      allow(Gitlab.config.webpack.dev_server).to receive(:enabled).and_return(true)\\n+      allow(Gitlab.config.webpack.dev_server).to receive(:https).and_return(true)\\n+\\n+      stub_request(:get, \\\"https://hostname:2000/public_path/#{error_file_path}\\\").to_raise(EOFError)\\n+    end\\n+\\n+    it \\\"raises error if catches SSLError\\\" do\\n+      expect { Gitlab::Webpack::FileLoader.load(error_file_path) }.to raise_error(Gitlab::Webpack::FileLoader::DevServerSSLError)\\n+    end\\n+  end\\n+\\n+  context \\\"with dev server disabled\\\" do\\n+    before do\\n+      allow(Gitlab.config.webpack.dev_server).to receive(:enabled).and_return(false)\\n+      stub_file_read(::Rails.root.join(\\\"webpack_output/#{file_path}\\\"), content: file_contents)\\n+      stub_file_read(::Rails.root.join(\\\"webpack_output/#{error_file_path}\\\"), error: Errno::ENOENT)\\n+    end\\n+\\n+    describe \\\".load\\\" do\\n+      it \\\"returns file content from file path\\\" do\\n+        expect(Gitlab::Webpack::FileLoader.load(file_path)).to be(file_contents)\\n+      end\\n+\\n+      it \\\"throws error if file cannot be read\\\" do\\n+        expect { Gitlab::Webpack::FileLoader.load(error_file_path) }.to raise_error(Gitlab::Webpack::FileLoader::StaticLoadError)\\n+      end\\n+    end\\n+  end\\n+end\\n\"},{\"old_path\":\"spec/lib/gitlab/webpack/graphql_known_operations_spec.rb\",\"new_path\":\"spec/lib/gitlab/webpack/graphql_known_operations_spec.rb\",\"a_mode\":\"0\",\"b_mode\":\"100644\",\"new_file\":true,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -0,0 +1,47 @@\\n+# frozen_string_literal: true\\n+\\n+require 'fast_spec_helper'\\n+\\n+RSpec.describe Gitlab::Webpack::GraphqlKnownOperations do\\n+  let(:content) do\\n+    \\u003c\\u003c-EOF\\n+    - hello\\n+    - world\\n+    - test\\n+    EOF\\n+  end\\n+\\n+  around do |example|\\n+    described_class.clear_memoization!\\n+\\n+    example.run\\n+\\n+    described_class.clear_memoization!\\n+  end\\n+\\n+  describe \\\".load\\\" do\\n+    context \\\"when file loader returns\\\" do\\n+      before do\\n+        allow(::Gitlab::Webpack::FileLoader).to receive(:load).with(\\\"graphql_known_operations.yml\\\").and_return(content)\\n+      end\\n+\\n+      it \\\"returns memoized value\\\" do\\n+        expect(::Gitlab::Webpack::FileLoader).to receive(:load).once\\n+\\n+        2.times { ::Gitlab::Webpack::GraphqlKnownOperations.load }\\n+\\n+        expect(::Gitlab::Webpack::GraphqlKnownOperations.load).to eq(%w(hello world test))\\n+      end\\n+    end\\n+\\n+    context \\\"when file loader errors\\\" do\\n+      before do\\n+        allow(::Gitlab::Webpack::FileLoader).to receive(:load).and_raise(StandardError.new(\\\"test\\\"))\\n+      end\\n+\\n+      it \\\"returns empty array\\\" do\\n+        expect(::Gitlab::Webpack::GraphqlKnownOperations.load).to eq([])\\n+      end\\n+    end\\n+  end\\n+end\\n\"},{\"old_path\":\"spec/lib/sidebars/projects/menus/infrastructure_menu_spec.rb\",\"new_path\":\"spec/lib/sidebars/projects/menus/infrastructure_menu_spec.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -51,6 +51,16 @@\\n         it 'menu link points to Terraform page' do\\n           expect(subject.link).to eq find_menu_item(:terraform).link\\n         end\\n+\\n+        context 'when Terraform menu is not visible' do\\n+          before do\\n+            subject.renderable_items.delete(find_menu_item(:terraform))\\n+          end\\n+\\n+          it 'menu link points to Google Cloud page' do\\n+            expect(subject.link).to eq find_menu_item(:google_cloud).link\\n+          end\\n+        end\\n       end\\n     end\\n \\n@@ -89,5 +99,11 @@ def find_menu_item(menu_item)\\n \\n       it_behaves_like 'access rights checks'\\n     end\\n+\\n+    describe 'Google Cloud' do\\n+      let(:item_id) { :google_cloud }\\n+\\n+      it_behaves_like 'access rights checks'\\n+    end\\n   end\\n end\\n\"},{\"old_path\":\"spec/models/acts_as_taggable_on/tag_spec.rb\",\"new_path\":\"spec/models/acts_as_taggable_on/tag_spec.rb\",\"a_mode\":\"0\",\"b_mode\":\"100644\",\"new_file\":true,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -0,0 +1,16 @@\\n+# frozen_string_literal: true\\n+\\n+require 'spec_helper'\\n+\\n+RSpec.describe ActsAsTaggableOn::Tag do\\n+  it 'has the same connection as Ci::ApplicationRecord' do\\n+    query = 'select current_database()'\\n+\\n+    expect(described_class.connection.execute(query).first).to eq(Ci::ApplicationRecord.connection.execute(query).first)\\n+    expect(described_class.retrieve_connection.execute(query).first).to eq(Ci::ApplicationRecord.retrieve_connection.execute(query).first)\\n+  end\\n+\\n+  it 'has the same sticking as Ci::ApplicationRecord' do\\n+    expect(described_class.sticking).to eq(Ci::ApplicationRecord.sticking)\\n+  end\\n+end\\n\"},{\"old_path\":\"spec/models/acts_as_taggable_on/tagging_spec.rb\",\"new_path\":\"spec/models/acts_as_taggable_on/tagging_spec.rb\",\"a_mode\":\"0\",\"b_mode\":\"100644\",\"new_file\":true,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -0,0 +1,16 @@\\n+# frozen_string_literal: true\\n+\\n+require 'spec_helper'\\n+\\n+RSpec.describe ActsAsTaggableOn::Tagging do\\n+  it 'has the same connection as Ci::ApplicationRecord' do\\n+    query = 'select current_database()'\\n+\\n+    expect(described_class.connection.execute(query).first).to eq(Ci::ApplicationRecord.connection.execute(query).first)\\n+    expect(described_class.retrieve_connection.execute(query).first).to eq(Ci::ApplicationRecord.retrieve_connection.execute(query).first)\\n+  end\\n+\\n+  it 'has the same sticking as Ci::ApplicationRecord' do\\n+    expect(described_class.sticking).to eq(Ci::ApplicationRecord.sticking)\\n+  end\\n+end\\n\"},{\"old_path\":\"spec/models/group_spec.rb\",\"new_path\":\"spec/models/group_spec.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -2648,14 +2648,6 @@ def setup_group_members(group)\\n     end\\n \\n     it_behaves_like 'returns namespaces with disabled email'\\n-\\n-    context 'when feature flag :linear_group_ancestor_scopes is disabled' do\\n-      before do\\n-        stub_feature_flags(linear_group_ancestor_scopes: false)\\n-      end\\n-\\n-      it_behaves_like 'returns namespaces with disabled email'\\n-    end\\n   end\\n \\n   describe '.timelogs' do\\n\"},{\"old_path\":\"spec/models/namespace_spec.rb\",\"new_path\":\"spec/models/namespace_spec.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -284,7 +284,7 @@\\n       end\\n     end\\n \\n-    context 'creating a default Namespace' do\\n+    context 'creating a Namespace with nil type' do\\n       let(:namespace_type) { nil }\\n \\n       it 'is the correct type of namespace' do\\n@@ -295,7 +295,7 @@\\n     end\\n \\n     context 'creating an unknown Namespace type' do\\n-      let(:namespace_type) { 'One' }\\n+      let(:namespace_type) { 'nonsense' }\\n \\n       it 'creates a default Namespace' do\\n         expect(Namespace.find(namespace.id)).to be_a(Namespace)\\n\"},{\"old_path\":\"spec/requests/api/graphql/mutations/issues/set_crm_contacts_spec.rb\",\"new_path\":\"spec/requests/api/graphql/mutations/issues/set_crm_contacts_spec.rb\",\"a_mode\":\"0\",\"b_mode\":\"100644\",\"new_file\":true,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -0,0 +1,161 @@\\n+# frozen_string_literal: true\\n+\\n+require 'spec_helper'\\n+\\n+RSpec.describe 'Setting issues crm contacts' do\\n+  include GraphqlHelpers\\n+\\n+  let_it_be(:user) { create(:user) }\\n+  let_it_be(:group) { create(:group) }\\n+  let_it_be(:project) { create(:project, group: group) }\\n+  let_it_be(:contacts) { create_list(:contact, 4, group: group) }\\n+\\n+  let(:issue) { create(:issue, project: project) }\\n+  let(:operation_mode) { Types::MutationOperationModeEnum.default_mode }\\n+  let(:crm_contact_ids) { [global_id_of(contacts[1]), global_id_of(contacts[2])] }\\n+  let(:does_not_exist_or_no_permission) { \\\"The resource that you are attempting to access does not exist or you don't have permission to perform this action\\\" }\\n+\\n+  let(:mutation) do\\n+    variables = {\\n+      project_path: issue.project.full_path,\\n+      iid: issue.iid.to_s,\\n+      operation_mode: operation_mode,\\n+      crm_contact_ids: crm_contact_ids\\n+    }\\n+\\n+    graphql_mutation(:issue_set_crm_contacts, variables,\\n+                     \\u003c\\u003c-QL.strip_heredoc\\n+                       clientMutationId\\n+                       errors\\n+                       issue {\\n+                         customerRelationsContacts {\\n+                           nodes {\\n+                             id\\n+                           }\\n+                         }\\n+                       }\\n+                     QL\\n+    )\\n+  end\\n+\\n+  def mutation_response\\n+    graphql_mutation_response(:issue_set_crm_contacts)\\n+  end\\n+\\n+  before do\\n+    create(:issue_customer_relations_contact, issue: issue, contact: contacts[0])\\n+    create(:issue_customer_relations_contact, issue: issue, contact: contacts[1])\\n+  end\\n+\\n+  context 'when the user has no permission' do\\n+    it 'returns expected error' do\\n+      error = Gitlab::Graphql::Authorize::AuthorizeResource::RESOURCE_ACCESS_ERROR\\n+      post_graphql_mutation(mutation, current_user: user)\\n+\\n+      expect(graphql_errors).to include(a_hash_including('message' =\\u003e error))\\n+    end\\n+  end\\n+\\n+  context 'when the user has permission' do\\n+    before do\\n+      group.add_reporter(user)\\n+    end\\n+\\n+    context 'when the feature is disabled' do\\n+      before do\\n+        stub_feature_flags(customer_relations: false)\\n+      end\\n+\\n+      it 'raises expected error' do\\n+        post_graphql_mutation(mutation, current_user: user)\\n+\\n+        expect(graphql_errors).to include(a_hash_including('message' =\\u003e 'Feature disabled'))\\n+      end\\n+    end\\n+\\n+    context 'replace' do\\n+      it 'updates the issue with correct contacts' do\\n+        post_graphql_mutation(mutation, current_user: user)\\n+\\n+        expect(graphql_data_at(:issue_set_crm_contacts, :issue, :customer_relations_contacts, :nodes, :id))\\n+          .to match_array([global_id_of(contacts[1]), global_id_of(contacts[2])])\\n+      end\\n+    end\\n+\\n+    context 'append' do\\n+      let(:crm_contact_ids) { [global_id_of(contacts[3])] }\\n+      let(:operation_mode) { Types::MutationOperationModeEnum.enum[:append] }\\n+\\n+      it 'updates the issue with correct contacts' do\\n+        post_graphql_mutation(mutation, current_user: user)\\n+\\n+        expect(graphql_data_at(:issue_set_crm_contacts, :issue, :customer_relations_contacts, :nodes, :id))\\n+          .to match_array([global_id_of(contacts[0]), global_id_of(contacts[1]), global_id_of(contacts[3])])\\n+      end\\n+    end\\n+\\n+    context 'remove' do\\n+      let(:crm_contact_ids) { [global_id_of(contacts[0])] }\\n+      let(:operation_mode) { Types::MutationOperationModeEnum.enum[:remove] }\\n+\\n+      it 'updates the issue with correct contacts' do\\n+        post_graphql_mutation(mutation, current_user: user)\\n+\\n+        expect(graphql_data_at(:issue_set_crm_contacts, :issue, :customer_relations_contacts, :nodes, :id))\\n+          .to match_array([global_id_of(contacts[1])])\\n+      end\\n+    end\\n+\\n+    context 'when the contact does not exist' do\\n+      let(:crm_contact_ids) { [\\\"gid://gitlab/CustomerRelations::Contact/#{non_existing_record_id}\\\"] }\\n+\\n+      it 'returns expected error' do\\n+        post_graphql_mutation(mutation, current_user: user)\\n+\\n+        expect(graphql_data_at(:issue_set_crm_contacts, :errors))\\n+          .to match_array([\\\"Issue customer relations contacts #{non_existing_record_id}: #{does_not_exist_or_no_permission}\\\"])\\n+      end\\n+    end\\n+\\n+    context 'when the contact belongs to a different group' do\\n+      let(:group2) { create(:group) }\\n+      let(:contact) { create(:contact, group: group2) }\\n+      let(:crm_contact_ids) { [global_id_of(contact)] }\\n+\\n+      before do\\n+        group2.add_reporter(user)\\n+      end\\n+\\n+      it 'returns expected error' do\\n+        post_graphql_mutation(mutation, current_user: user)\\n+\\n+        expect(graphql_data_at(:issue_set_crm_contacts, :errors))\\n+        .to match_array([\\\"Issue customer relations contacts #{contact.id}: #{does_not_exist_or_no_permission}\\\"])\\n+      end\\n+    end\\n+\\n+    context 'when attempting to add more than 6' do\\n+      let(:operation_mode) { Types::MutationOperationModeEnum.enum[:append] }\\n+      let(:gid) { global_id_of(contacts[0]) }\\n+      let(:crm_contact_ids) { [gid, gid, gid, gid, gid, gid, gid] }\\n+\\n+      it 'returns expected error' do\\n+        post_graphql_mutation(mutation, current_user: user)\\n+\\n+        expect(graphql_data_at(:issue_set_crm_contacts, :errors))\\n+          .to match_array([\\\"You can only add up to 6 contacts at one time\\\"])\\n+      end\\n+    end\\n+\\n+    context 'when trying to remove non-existent contact' do\\n+      let(:operation_mode) { Types::MutationOperationModeEnum.enum[:remove] }\\n+      let(:crm_contact_ids) { [\\\"gid://gitlab/CustomerRelations::Contact/#{non_existing_record_id}\\\"] }\\n+\\n+      it 'raises expected error' do\\n+        post_graphql_mutation(mutation, current_user: user)\\n+\\n+        expect(graphql_data_at(:issue_set_crm_contacts, :errors)).to be_empty\\n+      end\\n+    end\\n+  end\\n+end\\n\"},{\"old_path\":\"spec/requests/api/settings_spec.rb\",\"new_path\":\"spec/requests/api/settings_spec.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -612,5 +612,46 @@\\n         expect(json_response.slice(*settings.keys)).to eq(settings)\\n       end\\n     end\\n+\\n+    context 'Sentry settings' do\\n+      let(:settings) do\\n+        {\\n+          sentry_enabled: true,\\n+          sentry_dsn: 'http://sentry.example.com',\\n+          sentry_clientside_dsn: 'http://sentry.example.com',\\n+          sentry_environment: 'production'\\n+        }\\n+      end\\n+\\n+      let(:attribute_names) { settings.keys.map(\\u0026:to_s) }\\n+\\n+      it 'includes the attributes in the API' do\\n+        get api('/application/settings', admin)\\n+\\n+        expect(response).to have_gitlab_http_status(:ok)\\n+        attribute_names.each do |attribute|\\n+          expect(json_response.keys).to include(attribute)\\n+        end\\n+      end\\n+\\n+      it 'allows updating the settings' do\\n+        put api('/application/settings', admin), params: settings\\n+\\n+        expect(response).to have_gitlab_http_status(:ok)\\n+        settings.each do |attribute, value|\\n+          expect(ApplicationSetting.current.public_send(attribute)).to eq(value)\\n+        end\\n+      end\\n+\\n+      context 'missing sentry_dsn value when sentry_enabled is true' do\\n+        it 'returns a blank parameter error message' do\\n+          put api('/application/settings', admin), params: { sentry_enabled: true }\\n+\\n+          expect(response).to have_gitlab_http_status(:bad_request)\\n+          message = json_response['message']\\n+          expect(message[\\\"sentry_dsn\\\"]).to include(a_string_matching(\\\"can't be blank\\\"))\\n+        end\\n+      end\\n+    end\\n   end\\n end\\n\"},{\"old_path\":\"spec/requests/api/v3/github_spec.rb\",\"new_path\":\"spec/requests/api/v3/github_spec.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -6,7 +6,7 @@\\n   let_it_be(:user) { create(:user) }\\n   let_it_be(:unauthorized_user) { create(:user) }\\n   let_it_be(:admin) { create(:user, :admin) }\\n-  let_it_be(:project) { create(:project, :repository, creator: user) }\\n+  let_it_be_with_reload(:project) { create(:project, :repository, creator: user) }\\n \\n   before do\\n     project.add_maintainer(user)\\n@@ -506,11 +506,18 @@ def expect_project_under_namespace(projects, namespace, user)\\n \\n   describe 'GET /repos/:namespace/:project/commits/:sha' do\\n     let(:commit) { project.repository.commit }\\n-    let(:commit_id) { commit.id }\\n+\\n+    def call_api(commit_id: commit.id)\\n+      jira_get v3_api(\\\"/repos/#{project.namespace.path}/#{project.path}/commits/#{commit_id}\\\", user)\\n+    end\\n+\\n+    def response_diff_files(response)\\n+      Gitlab::Json.parse(response.body)['files']\\n+    end\\n \\n     context 'authenticated' do\\n-      it 'returns commit with github format' do\\n-        jira_get v3_api(\\\"/repos/#{project.namespace.path}/#{project.path}/commits/#{commit_id}\\\", user)\\n+      it 'returns commit with github format', :aggregate_failures do\\n+        call_api\\n \\n         expect(response).to have_gitlab_http_status(:ok)\\n         expect(response).to match_response_schema('entities/github/commit')\\n@@ -519,36 +526,130 @@ def expect_project_under_namespace(projects, namespace, user)\\n       it 'returns 200 when project path include a dot' do\\n         project.update!(path: 'foo.bar')\\n \\n-        jira_get v3_api(\\\"/repos/#{project.namespace.path}/#{project.path}/commits/#{commit_id}\\\", user)\\n+        call_api\\n \\n         expect(response).to have_gitlab_http_status(:ok)\\n       end\\n \\n-      it 'returns 200 when namespace path include a dot' do\\n-        group = create(:group, path: 'foo.bar')\\n-        project = create(:project, :repository, group: group)\\n-        project.add_reporter(user)\\n+      context 'when namespace path includes a dot' do\\n+        let(:group) { create(:group, path: 'foo.bar') }\\n+        let(:project) { create(:project, :repository, group: group) }\\n \\n-        jira_get v3_api(\\\"/repos/#{group.path}/#{project.path}/commits/#{commit_id}\\\", user)\\n+        it 'returns 200 when namespace path include a dot' do\\n+          project.add_reporter(user)\\n \\n-        expect(response).to have_gitlab_http_status(:ok)\\n+          call_api\\n+\\n+          expect(response).to have_gitlab_http_status(:ok)\\n+        end\\n+      end\\n+\\n+      context 'when the Gitaly `CommitDiff` RPC times out', :use_clean_rails_memory_store_caching do\\n+        let(:commit_diff_args) { [project.repository_storage, :diff_service, :commit_diff, any_args] }\\n+\\n+        before do\\n+          allow(Gitlab::GitalyClient).to receive(:call)\\n+            .and_call_original\\n+        end\\n+\\n+        it 'handles the error, logs it, and returns empty diff files', :aggregate_failures do\\n+          allow(Gitlab::GitalyClient).to receive(:call)\\n+            .with(*commit_diff_args)\\n+            .and_raise(GRPC::DeadlineExceeded)\\n+\\n+          expect(Gitlab::ErrorTracking)\\n+            .to receive(:track_exception)\\n+            .with an_instance_of(GRPC::DeadlineExceeded)\\n+\\n+          call_api\\n+\\n+          expect(response).to have_gitlab_http_status(:ok)\\n+          expect(response_diff_files(response)).to be_blank\\n+        end\\n+\\n+        it 'does not handle the error when feature flag is disabled', :aggregate_failures do\\n+          stub_feature_flags(api_v3_commits_skip_diff_files: false)\\n+\\n+          allow(Gitlab::GitalyClient).to receive(:call)\\n+            .with(*commit_diff_args)\\n+            .and_raise(GRPC::DeadlineExceeded)\\n+\\n+          call_api\\n+\\n+          expect(response).to have_gitlab_http_status(:error)\\n+        end\\n+\\n+        it 'only calls Gitaly once for all attempts within a period of time', :aggregate_failures do\\n+          expect(Gitlab::GitalyClient).to receive(:call)\\n+            .with(*commit_diff_args)\\n+            .once # \\u003c- once\\n+            .and_raise(GRPC::DeadlineExceeded)\\n+\\n+          3.times do\\n+            call_api\\n+\\n+            expect(response).to have_gitlab_http_status(:ok)\\n+            expect(response_diff_files(response)).to be_blank\\n+          end\\n+        end\\n+\\n+        it 'calls Gitaly again after a period of time', :aggregate_failures do\\n+          expect(Gitlab::GitalyClient).to receive(:call)\\n+            .with(*commit_diff_args)\\n+            .twice # \\u003c- twice\\n+            .and_raise(GRPC::DeadlineExceeded)\\n+\\n+          call_api\\n+\\n+          expect(response).to have_gitlab_http_status(:ok)\\n+          expect(response_diff_files(response)).to be_blank\\n+\\n+          travel_to((described_class::GITALY_TIMEOUT_CACHE_EXPIRY + 1.second).from_now) do\\n+            call_api\\n+\\n+            expect(response).to have_gitlab_http_status(:ok)\\n+            expect(response_diff_files(response)).to be_blank\\n+          end\\n+        end\\n+\\n+        it 'uses a unique cache key, allowing other calls to succeed' do\\n+          cache_key = [described_class::GITALY_TIMEOUT_CACHE_KEY, project.id, commit.cache_key].join(':')\\n+          Rails.cache.write(cache_key, 1)\\n+\\n+          expect(Gitlab::GitalyClient).to receive(:call)\\n+            .with(*commit_diff_args)\\n+            .once # \\u003c- once\\n+\\n+          call_api\\n+\\n+          expect(response).to have_gitlab_http_status(:ok)\\n+          expect(response_diff_files(response)).to be_blank\\n+\\n+          call_api(commit_id: commit.parent.id)\\n+\\n+          expect(response).to have_gitlab_http_status(:ok)\\n+          expect(response_diff_files(response).length).to eq(1)\\n+        end\\n       end\\n     end\\n \\n     context 'unauthenticated' do\\n+      let(:user) { nil }\\n+\\n       it 'returns 401' do\\n-        jira_get v3_api(\\\"/repos/#{project.namespace.path}/#{project.path}/commits/#{commit_id}\\\", nil)\\n+        call_api\\n \\n         expect(response).to have_gitlab_http_status(:unauthorized)\\n       end\\n     end\\n \\n     context 'unauthorized' do\\n+      let(:user) { unauthorized_user }\\n+\\n       it 'returns 404 when lower access level' do\\n-        project.add_guest(unauthorized_user)\\n+        project.add_guest(user)\\n \\n-        jira_get v3_api(\\\"/repos/#{project.namespace.path}/#{project.path}/commits/#{commit_id}\\\",\\n-                   unauthorized_user)\\n+        call_api\\n \\n         expect(response).to have_gitlab_http_status(:not_found)\\n       end\\n\"},{\"old_path\":\"spec/services/authorized_project_update/project_access_changed_service_spec.rb\",\"new_path\":\"spec/services/authorized_project_update/project_access_changed_service_spec.rb\",\"a_mode\":\"0\",\"b_mode\":\"100644\",\"new_file\":true,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -0,0 +1,21 @@\\n+# frozen_string_literal: true\\n+\\n+require 'spec_helper'\\n+\\n+RSpec.describe AuthorizedProjectUpdate::ProjectAccessChangedService do\\n+  describe '#execute' do\\n+    it 'schedules the project IDs' do\\n+      expect(AuthorizedProjectUpdate::ProjectRecalculateWorker).to receive(:bulk_perform_and_wait)\\n+        .with([[1], [2]])\\n+\\n+      described_class.new([1, 2]).execute\\n+    end\\n+\\n+    it 'permits non-blocking operation' do\\n+      expect(AuthorizedProjectUpdate::ProjectRecalculateWorker).to receive(:bulk_perform_async)\\n+        .with([[1], [2]])\\n+\\n+      described_class.new([1, 2]).execute(blocking: false)\\n+    end\\n+  end\\n+end\\n\"},{\"old_path\":\"spec/services/groups/transfer_service_spec.rb\",\"new_path\":\"spec/services/groups/transfer_service_spec.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -593,11 +593,16 @@\\n           let_it_be_with_reload(:group) { create(:group, :private, parent: old_parent_group) }\\n           let_it_be(:new_group_member) { create(:user) }\\n           let_it_be(:old_group_member) { create(:user) }\\n+          let_it_be(:unique_subgroup_member) { create(:user) }\\n+          let_it_be(:direct_project_member) { create(:user) }\\n \\n           before do\\n             new_parent_group.add_maintainer(new_group_member)\\n             old_parent_group.add_maintainer(old_group_member)\\n+            subgroup1.add_developer(unique_subgroup_member)\\n+            nested_project.add_developer(direct_project_member)\\n             group.refresh_members_authorized_projects\\n+            subgroup1.refresh_members_authorized_projects\\n           end\\n \\n           it 'removes old project authorizations' do\\n@@ -613,7 +618,7 @@\\n           end\\n \\n           it 'performs authorizations job immediately' do\\n-            expect(AuthorizedProjectsWorker).to receive(:bulk_perform_inline)\\n+            expect(AuthorizedProjectUpdate::ProjectRecalculateWorker).to receive(:bulk_perform_inline)\\n \\n             transfer_service.execute(new_parent_group)\\n           end\\n@@ -630,14 +635,24 @@\\n                 ProjectAuthorization.where(project_id: nested_project.id, user_id: new_group_member.id).size\\n               }.from(0).to(1)\\n             end\\n+\\n+            it 'preserves existing project authorizations for direct project members' do\\n+              expect { transfer_service.execute(new_parent_group) }.not_to change {\\n+                ProjectAuthorization.where(project_id: nested_project.id, user_id: direct_project_member.id).count\\n+              }\\n+            end\\n           end\\n \\n-          context 'for groups with many members' do\\n-            before do\\n-              11.times do\\n-                new_parent_group.add_maintainer(create(:user))\\n-              end\\n+          context 'for nested groups with unique members' do\\n+            it 'preserves existing project authorizations' do\\n+              expect { transfer_service.execute(new_parent_group) }.not_to change {\\n+                ProjectAuthorization.where(project_id: nested_project.id, user_id: unique_subgroup_member.id).count\\n+              }\\n             end\\n+          end\\n+\\n+          context 'for groups with many projects' do\\n+            let_it_be(:project_list) { create_list(:project, 11, :repository, :private, namespace: group) }\\n \\n             it 'adds new project authorizations for the user which makes a transfer' do\\n               transfer_service.execute(new_parent_group)\\n@@ -646,9 +661,21 @@\\n               expect(ProjectAuthorization.where(project_id: nested_project.id, user_id: user.id).size).to eq(1)\\n             end\\n \\n+            it 'adds project authorizations for users in the new hierarchy' do\\n+              expect { transfer_service.execute(new_parent_group) }.to change {\\n+                ProjectAuthorization.where(project_id: project_list.map { |project| project.id }, user_id: new_group_member.id).size\\n+              }.from(0).to(project_list.count)\\n+            end\\n+\\n+            it 'removes project authorizations for users in the old hierarchy' do\\n+              expect { transfer_service.execute(new_parent_group) }.to change {\\n+                ProjectAuthorization.where(project_id: project_list.map { |project| project.id }, user_id: old_group_member.id).size\\n+              }.from(project_list.count).to(0)\\n+            end\\n+\\n             it 'schedules authorizations job' do\\n-              expect(AuthorizedProjectsWorker).to receive(:bulk_perform_async)\\n-                .with(array_including(new_parent_group.members_with_parents.pluck(:user_id).map {|id| [id, anything] }))\\n+              expect(AuthorizedProjectUpdate::ProjectRecalculateWorker).to receive(:bulk_perform_async)\\n+                .with(array_including(group.all_projects.ids.map { |id| [id, anything] }))\\n \\n               transfer_service.execute(new_parent_group)\\n             end\\n\"},{\"old_path\":\"spec/services/issues/set_crm_contacts_service_spec.rb\",\"new_path\":\"spec/services/issues/set_crm_contacts_service_spec.rb\",\"a_mode\":\"0\",\"b_mode\":\"100644\",\"new_file\":true,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -0,0 +1,162 @@\\n+# frozen_string_literal: true\\n+\\n+require 'spec_helper'\\n+\\n+RSpec.describe Issues::SetCrmContactsService do\\n+  let_it_be(:user) { create(:user) }\\n+  let_it_be(:group) { create(:group) }\\n+  let_it_be(:project) { create(:project, group: group) }\\n+  let_it_be(:contacts) { create_list(:contact, 4, group: group) }\\n+\\n+  let(:issue) { create(:issue, project: project) }\\n+  let(:does_not_exist_or_no_permission) { \\\"The resource that you are attempting to access does not exist or you don't have permission to perform this action\\\" }\\n+\\n+  before do\\n+    create(:issue_customer_relations_contact, issue: issue, contact: contacts[0])\\n+    create(:issue_customer_relations_contact, issue: issue, contact: contacts[1])\\n+  end\\n+\\n+  subject(:set_crm_contacts) do\\n+    described_class.new(project: project, current_user: user, params: params).execute(issue)\\n+  end\\n+\\n+  describe '#execute' do\\n+    context 'when the user has no permission' do\\n+      let(:params) { { crm_contact_ids: [contacts[1].id, contacts[2].id] } }\\n+\\n+      it 'returns expected error response' do\\n+        response = set_crm_contacts\\n+\\n+        expect(response).to be_error\\n+        expect(response.message).to match_array(['You have insufficient permissions to set customer relations contacts for this issue'])\\n+      end\\n+    end\\n+\\n+    context 'when user has permission' do\\n+      before do\\n+        group.add_reporter(user)\\n+      end\\n+\\n+      context 'when the contact does not exist' do\\n+        let(:params) { { crm_contact_ids: [non_existing_record_id] } }\\n+\\n+        it 'returns expected error response' do\\n+          response = set_crm_contacts\\n+\\n+          expect(response).to be_error\\n+          expect(response.message).to match_array([\\\"Issue customer relations contacts #{non_existing_record_id}: #{does_not_exist_or_no_permission}\\\"])\\n+        end\\n+      end\\n+\\n+      context 'when the contact belongs to a different group' do\\n+        let(:group2) { create(:group) }\\n+        let(:contact) { create(:contact, group: group2) }\\n+        let(:params) { { crm_contact_ids: [contact.id] } }\\n+\\n+        before do\\n+          group2.add_reporter(user)\\n+        end\\n+\\n+        it 'returns expected error response' do\\n+          response = set_crm_contacts\\n+\\n+          expect(response).to be_error\\n+          expect(response.message).to match_array([\\\"Issue customer relations contacts #{contact.id}: #{does_not_exist_or_no_permission}\\\"])\\n+        end\\n+      end\\n+\\n+      context 'replace' do\\n+        let(:params) { { crm_contact_ids: [contacts[1].id, contacts[2].id] } }\\n+\\n+        it 'updates the issue with correct contacts' do\\n+          response = set_crm_contacts\\n+\\n+          expect(response).to be_success\\n+          expect(issue.customer_relations_contacts).to match_array([contacts[1], contacts[2]])\\n+        end\\n+      end\\n+\\n+      context 'add' do\\n+        let(:params) { { add_crm_contact_ids: [contacts[3].id] } }\\n+\\n+        it 'updates the issue with correct contacts' do\\n+          response = set_crm_contacts\\n+\\n+          expect(response).to be_success\\n+          expect(issue.customer_relations_contacts).to match_array([contacts[0], contacts[1], contacts[3]])\\n+        end\\n+      end\\n+\\n+      context 'remove' do\\n+        let(:params) { { remove_crm_contact_ids: [contacts[0].id] } }\\n+\\n+        it 'updates the issue with correct contacts' do\\n+          response = set_crm_contacts\\n+\\n+          expect(response).to be_success\\n+          expect(issue.customer_relations_contacts).to match_array([contacts[1]])\\n+        end\\n+      end\\n+\\n+      context 'when attempting to add more than 6' do\\n+        let(:id) { contacts[0].id }\\n+        let(:params) { { add_crm_contact_ids: [id, id, id, id, id, id, id] } }\\n+\\n+        it 'returns expected error message' do\\n+          response = set_crm_contacts\\n+\\n+          expect(response).to be_error\\n+          expect(response.message).to match_array(['You can only add up to 6 contacts at one time'])\\n+        end\\n+      end\\n+\\n+      context 'when trying to remove non-existent contact' do\\n+        let(:params) { { remove_crm_contact_ids: [non_existing_record_id] } }\\n+\\n+        it 'returns expected error message' do\\n+          response = set_crm_contacts\\n+\\n+          expect(response).to be_success\\n+          expect(response.message).to be_nil\\n+        end\\n+      end\\n+\\n+      context 'when combining params' do\\n+        let(:error_invalid_params) { 'You cannot combine crm_contact_ids with add_crm_contact_ids or remove_crm_contact_ids' }\\n+\\n+        context 'add and remove' do\\n+          let(:params) { { remove_crm_contact_ids: [contacts[1].id], add_crm_contact_ids: [contacts[3].id] } }\\n+\\n+          it 'updates the issue with correct contacts' do\\n+            response = set_crm_contacts\\n+\\n+            expect(response).to be_success\\n+            expect(issue.customer_relations_contacts).to match_array([contacts[0], contacts[3]])\\n+          end\\n+        end\\n+\\n+        context 'replace and remove' do\\n+          let(:params) { { crm_contact_ids: [contacts[3].id], remove_crm_contact_ids: [contacts[0].id] } }\\n+\\n+          it 'returns expected error response' do\\n+            response = set_crm_contacts\\n+\\n+            expect(response).to be_error\\n+            expect(response.message).to match_array([error_invalid_params])\\n+          end\\n+        end\\n+\\n+        context 'replace and add' do\\n+          let(:params) { { crm_contact_ids: [contacts[3].id], add_crm_contact_ids: [contacts[1].id] } }\\n+\\n+          it 'returns expected error response' do\\n+            response = set_crm_contacts\\n+\\n+            expect(response).to be_error\\n+            expect(response.message).to match_array([error_invalid_params])\\n+          end\\n+        end\\n+      end\\n+    end\\n+  end\\n+end\\n\"},{\"old_path\":\"spec/services/projects/container_repository/cleanup_tags_service_spec.rb\",\"new_path\":\"spec/services/projects/container_repository/cleanup_tags_service_spec.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"\"},{\"old_path\":\"spec/lib/gitlab/sidekiq_cluster_spec.rb\",\"new_path\":\"spec/sidekiq_cluster/sidekiq_cluster_spec.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":true,\"deleted_file\":false,\"diff\":\"@@ -1,9 +1,10 @@\\n # frozen_string_literal: true\\n \\n-require 'fast_spec_helper'\\n require 'rspec-parameterized'\\n \\n-RSpec.describe Gitlab::SidekiqCluster do\\n+require_relative '../../sidekiq_cluster/sidekiq_cluster'\\n+\\n+RSpec.describe Gitlab::SidekiqCluster do # rubocop:disable RSpec/FilePath\\n   describe '.trap_signals' do\\n     it 'traps the given signals' do\\n       expect(described_class).to receive(:trap).ordered.with(:INT)\\n\"},{\"old_path\":\"spec/spec_helper.rb\",\"new_path\":\"spec/spec_helper.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -107,9 +107,7 @@\\n         warn `curl -s -o log/goroutines.log http://localhost:9236/debug/pprof/goroutine?debug=2`\\n       end\\n     end\\n-  end\\n-\\n-  unless ENV['CI']\\n+  else\\n     # Allow running `:focus` examples locally,\\n     # falling back to all tests when there is no `:focus` example.\\n     config.filter_run focus: true\\n\"},{\"old_path\":\"spec/support/flaky_tests.rb\",\"new_path\":\"spec/support/flaky_tests.rb\",\"a_mode\":\"0\",\"b_mode\":\"100644\",\"new_file\":true,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -0,0 +1,36 @@\\n+# frozen_string_literal: true\\n+\\n+return unless ENV['CI']\\n+return unless ENV['SKIP_FLAKY_TESTS_AUTOMATICALLY'] == \\\"true\\\"\\n+return if ENV['CI_MERGE_REQUEST_LABELS'].to_s.include?('pipeline:run-flaky-tests')\\n+\\n+require_relative '../tooling/rspec_flaky/report'\\n+\\n+RSpec.configure do |config|\\n+  $flaky_test_example_ids = begin # rubocop:disable Style/GlobalVars\\n+    raise \\\"$SUITE_FLAKY_RSPEC_REPORT_PATH is empty.\\\" if ENV['SUITE_FLAKY_RSPEC_REPORT_PATH'].to_s.empty?\\n+    raise \\\"#{ENV['SUITE_FLAKY_RSPEC_REPORT_PATH']} doesn't exist\\\" unless File.exist?(ENV['SUITE_FLAKY_RSPEC_REPORT_PATH'])\\n+\\n+    RspecFlaky::Report.load(ENV['SUITE_FLAKY_RSPEC_REPORT_PATH']).map { |_, flaky_test_data| flaky_test_data[\\\"example_id\\\"] }\\n+  rescue =\\u003e e # rubocop:disable Style/RescueStandardError\\n+    puts e\\n+    []\\n+  end\\n+  $skipped_flaky_tests_report = [] # rubocop:disable Style/GlobalVars\\n+\\n+  config.around do |example|\\n+    # Skip flaky tests automatically\\n+    if $flaky_test_example_ids.include?(example.id) # rubocop:disable Style/GlobalVars\\n+      puts \\\"Skipping #{example.id} '#{example.full_description}' because it's flaky.\\\"\\n+      $skipped_flaky_tests_report \\u003c\\u003c example.id # rubocop:disable Style/GlobalVars\\n+    else\\n+      example.run\\n+    end\\n+  end\\n+\\n+  config.after(:suite) do\\n+    next unless ENV['SKIPPED_FLAKY_TESTS_REPORT_PATH']\\n+\\n+    File.write(ENV['SKIPPED_FLAKY_TESTS_REPORT_PATH'], \\\"#{$skipped_flaky_tests_report.join(\\\"\\\\n\\\")}\\\\n\\\") # rubocop:disable Style/GlobalVars\\n+  end\\n+end\\n\"},{\"old_path\":\"spec/tooling/quality/test_level_spec.rb\",\"new_path\":\"spec/tooling/quality/test_level_spec.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -49,7 +49,7 @@\\n     context 'when level is integration' do\\n       it 'returns a pattern' do\\n         expect(subject.pattern(:integration))\\n-          .to eq(\\\"spec/{controllers,mailers,requests}{,/**/}*_spec.rb\\\")\\n+          .to eq(\\\"spec/{commands,controllers,mailers,requests}{,/**/}*_spec.rb\\\")\\n       end\\n     end\\n \\n@@ -131,7 +131,7 @@\\n     context 'when level is integration' do\\n       it 'returns a regexp' do\\n         expect(subject.regexp(:integration))\\n-          .to eq(%r{spec/(controllers|mailers|requests)})\\n+          .to eq(%r{spec/(commands|controllers|mailers|requests)})\\n       end\\n     end\\n \\n@@ -204,6 +204,10 @@\\n       expect(subject.level_for('spec/mailers/abuse_report_mailer_spec.rb')).to eq(:integration)\\n     end\\n \\n+    it 'returns the correct level for an integration test in a subfolder' do\\n+      expect(subject.level_for('spec/commands/sidekiq_cluster/cli.rb')).to eq(:integration)\\n+    end\\n+\\n     it 'returns the correct level for a system test' do\\n       expect(subject.level_for('spec/features/abuse_report_spec.rb')).to eq(:system)\\n     end\\n\"},{\"old_path\":\"spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb\",\"new_path\":\"spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -82,8 +82,9 @@\\n             nil | 10  | nil\\n             0   | 5   | nil\\n             10  | 0   | 0\\n-            10  | 5   | 0.5\\n-            3   | 10  | (10 / 3.to_f)\\n+            10  | 5   | 50.0\\n+            17  | 3   | 17.65\\n+            3   | 10  | 333.33\\n           end\\n \\n           with_them do\\n\"},{\"old_path\":\"tooling/bin/find_change_diffs\",\"new_path\":\"tooling/bin/find_change_diffs\",\"a_mode\":\"100755\",\"b_mode\":\"100755\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -5,11 +5,22 @@ require 'gitlab'\\n require 'pathname'\\n \\n # This script saves the diffs of changes in an MR to the directory specified as the first argument\\n+#\\n+# It exits with a success code if diffs are found and saved, or if there are no changes, including if the script runs in\\n+# a pipeline that is not for a merge request.\\n \\n gitlab_token = ENV.fetch('PROJECT_TOKEN_FOR_CI_SCRIPTS_API_USAGE')\\n gitlab_endpoint = ENV.fetch('CI_API_V4_URL')\\n-mr_project_path = ENV.fetch('CI_MERGE_REQUEST_PROJECT_PATH')\\n-mr_iid = ENV.fetch('CI_MERGE_REQUEST_IID')\\n+mr_project_path = ENV['CI_MERGE_REQUEST_PROJECT_PATH']\\n+mr_iid = ENV['CI_MERGE_REQUEST_IID']\\n+\\n+puts \\\"CI_MERGE_REQUEST_PROJECT_PATH is missing.\\\" if mr_project_path.to_s.empty?\\n+puts \\\"CI_MERGE_REQUEST_IID is missing.\\\" if mr_iid.to_s.empty?\\n+\\n+unless mr_project_path \\u0026\\u0026 mr_iid\\n+  puts \\\"Exiting as this does not appear to be a merge request pipeline.\\\"\\n+  exit\\n+end\\n \\n abort(\\\"ERROR: Please specify a directory to write MR diffs into.\\\") if ARGV.empty?\\n output_diffs_dir = Pathname.new(ARGV.shift).expand_path\\n\"},{\"old_path\":\"tooling/bin/qa/check_if_only_quarantined_specs\",\"new_path\":\"tooling/bin/qa/check_if_only_quarantined_specs\",\"a_mode\":\"100755\",\"b_mode\":\"0\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":true,\"diff\":\"@@ -1,18 +0,0 @@\\n-#!/usr/bin/env ruby\\n-# frozen_string_literal: true\\n-\\n-require 'pathname'\\n-\\n-# This script assumes the first argument is a directory of files containing diffs of changes from an MR. It exits with a\\n-# success code if all diffs add a line that quarantines a test. If any diffs are not specs, or they are specs that don't\\n-# quarantine a test, it exits with code 1 to indicate failure (i.e., there was _not_ only quarantined specs).\\n-\\n-abort(\\\"ERROR: Please specify the directory containing MR diffs.\\\") if ARGV.empty?\\n-diffs_dir = Pathname.new(ARGV.shift).expand_path\\n-\\n-diffs_dir.glob('**/*').each do |path|\\n-  next if path.directory?\\n-\\n-  exit 1 unless path.to_s.end_with?('_spec.rb.diff')\\n-  exit 1 unless path.read.match?(/^\\\\+.*, quarantine:/)\\n-end\\n\"},{\"old_path\":\"tooling/bin/qa/package_and_qa_check\",\"new_path\":\"tooling/bin/qa/package_and_qa_check\",\"a_mode\":\"0\",\"b_mode\":\"100755\",\"new_file\":true,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -0,0 +1,45 @@\\n+#!/usr/bin/env ruby\\n+# frozen_string_literal: true\\n+\\n+require 'pathname'\\n+\\n+# This script checks if the package-and-qa job should trigger downstream pipelines to run the QA suite.\\n+#\\n+# It assumes the first argument is a directory of files containing diffs of changes from an MR\\n+# (e.g., created by tooling/bin/find_change_diffs). It exits with a success code if there are no diffs, or if the diffs\\n+# are suitable to run QA tests.\\n+#\\n+# The script will abort (exit code 1) if the argument is missing.\\n+#\\n+# The following condition will result in a failure code (2), indicating that package-and-qa should not run:\\n+#\\n+#   - If the changes only include tests being put in quarantine\\n+\\n+abort(\\\"ERROR: Please specify the directory containing MR diffs.\\\") if ARGV.empty?\\n+diffs_dir = Pathname.new(ARGV.shift).expand_path\\n+\\n+# Run package-and-qa if there are no diffs. E.g., in scheduled pipelines\\n+exit 0 if diffs_dir.glob('**/*').empty?\\n+\\n+files_count = 0\\n+specs_count = 0\\n+quarantine_specs_count = 0\\n+\\n+diffs_dir.glob('**/*').each do |path|\\n+  next if path.directory?\\n+\\n+  files_count += 1\\n+  next unless path.to_s.end_with?('_spec.rb.diff')\\n+\\n+  specs_count += 1\\n+  quarantine_specs_count += 1 if path.read.match?(/^\\\\+.*, quarantine:/)\\n+end\\n+\\n+# Run package-and-qa if there are no specs. E.g., when the MR changes QA framework files.\\n+exit 0 if specs_count == 0\\n+\\n+# Skip package-and-qa if there are only specs being put in quarantine.\\n+exit 2 if quarantine_specs_count == specs_count \\u0026\\u0026 quarantine_specs_count == files_count\\n+\\n+# Run package-and-qa under any other circumstances. E.g., if there are specs being put in quarantine but there are also\\n+# other changes that might need to be tested.\\n\"},{\"old_path\":\"tooling/quality/test_level.rb\",\"new_path\":\"tooling/quality/test_level.rb\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"@@ -54,6 +54,7 @@ class TestLevel\\n         tooling\\n       ],\\n       integration: %w[\\n+        commands\\n         controllers\\n         mailers\\n         requests\\n\"}],\"compare_timeout\":false,\"compare_same_ref\":false,\"web_url\":\"https://gitlab.com/gitlab-org/gitlab-foss/-/compare/4901ff1764398bb017487d4a5104b74bc284f33a...2295d352f6073101497f9bf4e4981c7ae72706a3\"}"
  },
  {
    "path": "testutils/testdata/index.json",
    "content": "{\"repo_slug\":\"sachin14/nexe\",\"fork_slug\":\"\",\"repo_link\":\"https://github.com/sachin14/nexe\",\"build_target_commit\":\"\",\"build_base_commit\":\"\",\"task_id\":\"\",\"branch_name\":\"main\",\"build_id\":\"2850df28b4a043959h65eb6c03772d5c\",\"repo_id\":\"7572a0f9a08a4130a2265f6eb2470eb5\",\"org_id\":\"1cd18453e7f440f1a0kd6418c5a708da\",\"git_provider\":\"github\",\"private_repo\":false,\"event_type\":\"push\",\"diff_url\":\"\",\"pull_request_number\":0,\"commits\":[{\"Sha\":\"a0fa2fb0201c62aa541c1a6eba516a8fefd874d8\",\"Link\":\"https://github.com/sachin14/nexe/commit/a0fa2fb0201c62aa541c1a6eba516a8fefd874d8\",\"added\":[],\"removed\":[],\"modified\":[\"Readme.md\"],\"message\":\"first commit\"},{\"Sha\":\"60143149e18581ad15b8a76fd2ed96e695d7826e\",\"Link\":\"https://github.com/sachin14/nexe/commit/60143149e18581ad15b8a76fd2ed96e695d7826e\",\"added\":[],\"removed\":[],\"modified\":[\"Readme.md\"],\"message\":\"second commit\"}],\"tas_file_name\":\".tas.yml\",\"locators\":\"\",\"locator_address\":\"\",\"parent_commit_coverage_exists\":false,\"license_tier\":\"\",\"collect_coverage\":false}\n"
  },
  {
    "path": "testutils/testdata/index.txt",
    "content": "{\"event_id\":\"19fab9d6b1469h1b87ef421800994c57\",\"build_id\":\"2850df28b4a043959h65eb6c03772d5c\",\"repo_id\":\"7572a0f9a08a4130a2265f6eb2470eb5\",\"org_id\":\"1cd18453e7f440f1a0kd6418c5a708da\",\"repo_slug\":\"sachin14/nexe\",\"repo_link\":\"https://github.com/sachin14/nexe\",\"commit_before\":\"4c208cc3ad0caeff76dyiaa94ad46a2006fad7e5\",\"target_commit\":\"60143149e18581ad15bje76fd2ed96e695d7826e\",\"git_provider\":\"github\",\"private_repo\":false,\"event_type\":\"push\",\"commits\":[{\"Sha\":\"a0fa2fb0201c62aa541c1a6eba516a8fefd874d8\",\"Message\":\"first commit\",\"Author\":{\"Name\":\"Sachin Kumar\",\"Email\":\"test@gmail.com\",\"Date\":\"2021-10-25T12:50:20+05:30\",\"Login\":\"sachin\",\"Avatar\":\"\"},\"Committer\":{\"Name\":\"Sachin Kumar\",\"Email\":\"test@gmail.com\",\"Date\":\"2021-10-25T12:50:20+05:30\",\"Login\":\"Sachin\",\"Avatar\":\"\"},\"Link\":\"https://github.com/sachin14/nexe/commit/a0fa2fb0201c62aa541c1a6eba516a8fefd874d8\",\"Added\":[],\"Removed\":[],\"Modified\":[\"Readme.md\"]},{\"Sha\":\"60143149e18581ad15b8a76fd2ed96e695d7826e\",\"Message\":\"second commit\",\"Author\":{\"Name\":\"Sachin Kumar\",\"Email\":\"test@gmail.com\",\"Date\":\"2021-10-25T12:50:43+05:30\",\"Login\":\"Sachin\",\"Avatar\":\"\"},\"Committer\":{\"Name\":\"Sachin Kumar\",\"Email\":\"test@gmail.com\",\"Date\":\"2021-10-25T12:50:43+05:30\",\"Login\":\"sachin\",\"Avatar\":\"\"},\"Link\":\"https://github.com/sachin14/nexe/commit/60143149e18581ad15b8a76fd2ed96e695d7826e\",\"Added\":[],\"Removed\":[],\"Modified\":[\"Readme.md\"]}],\"tas_file_name\":\".tas.yml\",\"parent_commit_coverage_exists\":false,\"branch_name\":\"main\",\"parsing_meta_list\":[{\"task_id\":\"1b8cced664f24d5f94572eaf1981387b\",\"commit_id\":\"a0fa2fb0201c62aa541c1a6eba516a8fefd874d8\"},{\"task_id\":\"4f9826032ca54c18b189d976c0bac91f\",\"commit_id\":\"60143149e18581ad15b8a76fd2ed96e695d7826e\"}]}"
  },
  {
    "path": "testutils/testdata/merge_requests/2/changes",
    "content": "{\"id\":6029114,\"iid\":15335,\"project_id\":13083,\"title\":\"Backport of add-epic-sidebar\",\"description\":\"## What does this MR do?\\nBackport of https://gitlab.com/gitlab-org/gitlab-ee/merge_requests/3253\\n\\n## Are there points in the code the reviewer needs to double check?\\nShouldn't be\\n\\n## Why was this MR needed?\\nBackport of some new shared components and CSS changes\\n\\n## Screenshots (if relevant)\\nNone\\n\\n## Does this MR meet the acceptance criteria?\\n\\n- Review\\n  - [ ] Has been reviewed by Frontend\\n\\n## What are the relevant issue numbers?\\nhttps://gitlab.com/gitlab-org/gitlab-ee/issues/3556\",\"state\":\"merged\",\"created_at\":\"2017-11-10T23:43:47.392Z\",\"updated_at\":\"2021-11-08T17:39:14.329Z\",\"merged_by\":{\"id\":502136,\"username\":\"fatihacet\",\"name\":\"Fatih Acet\",\"state\":\"active\",\"avatar_url\":\"https://gitlab.com/uploads/-/system/user/avatar/502136/avatar.png\",\"web_url\":\"https://gitlab.com/fatihacet\"},\"merge_user\":{\"id\":502136,\"username\":\"fatihacet\",\"name\":\"Fatih Acet\",\"state\":\"active\",\"avatar_url\":\"https://gitlab.com/uploads/-/system/user/avatar/502136/avatar.png\",\"web_url\":\"https://gitlab.com/fatihacet\"},\"merged_at\":\"2017-11-27T20:22:01.043Z\",\"closed_by\":null,\"closed_at\":null,\"target_branch\":\"master\",\"source_branch\":\"backport-add-epic-sidebar\",\"user_notes_count\":20,\"upvotes\":0,\"downvotes\":0,\"author\":{\"id\":408677,\"username\":\"ClemMakesApps\",\"name\":\"Clement Ho\",\"state\":\"active\",\"avatar_url\":\"https://secure.gravatar.com/avatar/013b4af8b474654bce8039ecd262a84a?s=80\\u0026d=identicon\",\"web_url\":\"https://gitlab.com/ClemMakesApps\"},\"assignees\":[{\"id\":502136,\"username\":\"fatihacet\",\"name\":\"Fatih Acet\",\"state\":\"active\",\"avatar_url\":\"https://gitlab.com/uploads/-/system/user/avatar/502136/avatar.png\",\"web_url\":\"https://gitlab.com/fatihacet\"}],\"assignee\":{\"id\":502136,\"username\":\"fatihacet\",\"name\":\"Fatih Acet\",\"state\":\"active\",\"avatar_url\":\"https://gitlab.com/uploads/-/system/user/avatar/502136/avatar.png\",\"web_url\":\"https://gitlab.com/fatihacet\"},\"reviewers\":[],\"source_project_id\":13083,\"target_project_id\":13083,\"labels\":[\"Category:Portfolio Management\",\"epics\",\"frontend\"],\"draft\":false,\"work_in_progress\":false,\"milestone\":{\"id\":349702,\"iid\":2,\"group_id\":9970,\"title\":\"10.2\",\"description\":\"\",\"state\":\"closed\",\"created_at\":\"2017-07-24T13:53:41.702Z\",\"updated_at\":\"2018-03-22T16:15:32.616Z\",\"due_date\":\"2017-11-22\",\"start_date\":\"2017-10-08\",\"expired\":true,\"web_url\":\"https://gitlab.com/groups/gitlab-org/-/milestones/2\"},\"merge_when_pipeline_succeeds\":false,\"merge_status\":\"can_be_merged\",\"sha\":\"fe93f9827537e4d761b1874218b009668a914ae4\",\"merge_commit_sha\":\"f8de23e626f7a1d0b2f80f996a5f129323adc970\",\"squash_commit_sha\":null,\"discussion_locked\":null,\"should_remove_source_branch\":null,\"force_remove_source_branch\":true,\"reference\":\"!15335\",\"references\":{\"short\":\"!15335\",\"relative\":\"!15335\",\"full\":\"gitlab-org/gitlab-foss!15335\"},\"web_url\":\"https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/15335\",\"time_stats\":{\"time_estimate\":0,\"total_time_spent\":0,\"human_time_estimate\":null,\"human_total_time_spent\":null},\"squash\":false,\"task_completion_status\":{\"count\":1,\"completed_count\":0},\"has_conflicts\":false,\"blocking_discussions_resolved\":true,\"approvals_before_merge\":null,\"subscribed\":false,\"changes_count\":\"17\",\"latest_build_started_at\":\"2017-11-17T17:09:10.405Z\",\"latest_build_finished_at\":null,\"first_deployed_to_production_at\":null,\"pipeline\":{\"id\":14073346,\"iid\":null,\"project_id\":13083,\"sha\":\"fe93f9827537e4d761b1874218b009668a914ae4\",\"ref\":\"backport-add-epic-sidebar\",\"status\":\"failed\",\"source\":\"push\",\"created_at\":\"2017-11-17T16:53:22.068Z\",\"updated_at\":\"2017-11-20T16:20:53.290Z\",\"web_url\":\"https://gitlab.com/gitlab-org/gitlab-foss/-/pipelines/14073346\"},\"head_pipeline\":{\"id\":14073346,\"iid\":null,\"project_id\":13083,\"sha\":\"fe93f9827537e4d761b1874218b009668a914ae4\",\"ref\":\"backport-add-epic-sidebar\",\"status\":\"failed\",\"source\":\"push\",\"created_at\":\"2017-11-17T16:53:22.068Z\",\"updated_at\":\"2017-11-20T16:20:53.290Z\",\"web_url\":\"https://gitlab.com/gitlab-org/gitlab-foss/-/pipelines/14073346\",\"before_sha\":\"fe93f9827537e4d761b1874218b009668a914ae4\",\"tag\":false,\"yaml_errors\":null,\"user\":{\"id\":408677,\"username\":\"ClemMakesApps\",\"name\":\"Clement Ho\",\"state\":\"active\",\"avatar_url\":\"https://secure.gravatar.com/avatar/013b4af8b474654bce8039ecd262a84a?s=80\\u0026d=identicon\",\"web_url\":\"https://gitlab.com/ClemMakesApps\"},\"started_at\":\"2017-11-17T17:09:10.405Z\",\"finished_at\":\"2017-11-20T16:20:53.246Z\",\"committed_at\":null,\"duration\":3272,\"queued_duration\":948,\"coverage\":\"53.62\",\"detailed_status\":{\"icon\":\"status_failed\",\"text\":\"failed\",\"label\":\"failed\",\"group\":\"failed\",\"tooltip\":\"failed\",\"has_details\":false,\"details_path\":\"/gitlab-org/gitlab-foss/-/pipelines/14073346\",\"illustration\":null,\"favicon\":\"/assets/ci_favicons/favicon_status_failed-41304d7f7e3828808b0c26771f0309e55296819a9beea3ea9fbf6689d9857c12.png\"}},\"diff_refs\":{\"base_sha\":\"2f74b1d32392427ce9cc3c0aff205c8991ba2dfc\",\"head_sha\":\"fe93f9827537e4d761b1874218b009668a914ae4\",\"start_sha\":\"c406824d319e5b1a073af7cf55c3f24bfa66e2a4\"},\"merge_error\":\"Merge request is not mergeable\",\"user\":{\"can_merge\":false},\"changes\":[{\"old_path\":\"app/assets/javascripts/lib/utils/datetime_utility.js\",\"new_path\":\"app/assets/javascripts/lib/utils/datetime_utility.js\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"--- a/app/assets/javascripts/lib/utils/datetime_utility.js\\n+++ b/app/assets/javascripts/lib/utils/datetime_utility.js\\n@@ -150,3 +150,17 @@ export function timeIntervalInWords(intervalInSeconds) {\\n   }\\n   return text;\\n }\\n+\\n+export function dateInWords(date, abbreviated = false) {\\n+  if (!date) return date;\\n+\\n+  const month = date.getMonth();\\n+  const year = date.getFullYear();\\n+\\n+  const monthNames = [s__('January'), s__('February'), s__('March'), s__('April'), s__('May'), s__('June'), s__('July'), s__('August'), s__('September'), s__('October'), s__('November'), s__('December')];\\n+  const monthNamesAbbr = [s__('Jan'), s__('Feb'), s__('Mar'), s__('Apr'), s__('May'), s__('Jun'), s__('Jul'), s__('Aug'), s__('Sep'), s__('Oct'), s__('Nov'), s__('Dec')];\\n+\\n+  const monthName = abbreviated ? monthNamesAbbr[month] : monthNames[month];\\n+\\n+  return `${monthName} ${date.getDate()}, ${year}`;\\n+}\\n\"},{\"old_path\":\"app/assets/javascripts/lib/utils/text_utility.js\",\"new_path\":\"app/assets/javascripts/lib/utils/text_utility.js\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"--- a/app/assets/javascripts/lib/utils/text_utility.js\\n+++ b/app/assets/javascripts/lib/utils/text_utility.js\\n@@ -55,3 +55,12 @@ export const slugify = str =\\u003e str.trim().toLowerCase();\\n  */\\n export const truncate = (string, maxLength) =\\u003e `${string.substr(0, (maxLength - 3))}...`;\\n \\n+/**\\n+ * Capitalizes first character\\n+ *\\n+ * @param {String} text\\n+ * @return {String}\\n+ */\\n+export function capitalizeFirstCharacter(text) {\\n+  return `${text[0].toUpperCase()}${text.slice(1)}`;\\n+}\\n\"},{\"old_path\":\"app/assets/javascripts/vue_shared/components/sidebar/collapsed_calendar_icon.vue\",\"new_path\":\"app/assets/javascripts/vue_shared/components/sidebar/collapsed_calendar_icon.vue\",\"a_mode\":\"0\",\"b_mode\":\"100644\",\"new_file\":true,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"--- /dev/null\\n+++ b/app/assets/javascripts/vue_shared/components/sidebar/collapsed_calendar_icon.vue\\n@@ -0,0 +1,46 @@\\n+\\u003cscript\\u003e\\n+  export default {\\n+    name: 'collapsedCalendarIcon',\\n+    props: {\\n+      containerClass: {\\n+        type: String,\\n+        required: false,\\n+        default: '',\\n+      },\\n+      text: {\\n+        type: String,\\n+        required: false,\\n+        default: '',\\n+      },\\n+      showIcon: {\\n+        type: Boolean,\\n+        required: false,\\n+        default: true,\\n+      },\\n+    },\\n+    methods: {\\n+      click() {\\n+        this.$emit('click');\\n+      },\\n+    },\\n+  };\\n+\\u003c/script\\u003e\\n+\\n+\\u003ctemplate\\u003e\\n+  \\u003cdiv\\n+    :class=\\\"containerClass\\\"\\n+    @click=\\\"click\\\"\\n+  \\u003e\\n+    \\u003ci\\n+      v-if=\\\"showIcon\\\"\\n+      class=\\\"fa fa-calendar\\\"\\n+      aria-hidden=\\\"true\\\"\\n+    \\u003e\\n+    \\u003c/i\\u003e\\n+    \\u003cslot\\u003e\\n+      \\u003cspan\\u003e\\n+        {{ text }}\\n+      \\u003c/span\\u003e\\n+    \\u003c/slot\\u003e\\n+  \\u003c/div\\u003e\\n+\\u003c/template\\u003e\\n\"},{\"old_path\":\"app/assets/javascripts/vue_shared/components/sidebar/collapsed_grouped_date_picker.vue\",\"new_path\":\"app/assets/javascripts/vue_shared/components/sidebar/collapsed_grouped_date_picker.vue\",\"a_mode\":\"0\",\"b_mode\":\"100644\",\"new_file\":true,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"--- /dev/null\\n+++ b/app/assets/javascripts/vue_shared/components/sidebar/collapsed_grouped_date_picker.vue\\n@@ -0,0 +1,109 @@\\n+\\u003cscript\\u003e\\n+  import { dateInWords } from '../../../lib/utils/datetime_utility';\\n+  import toggleSidebar from './toggle_sidebar.vue';\\n+  import collapsedCalendarIcon from './collapsed_calendar_icon.vue';\\n+\\n+  export default {\\n+    name: 'sidebarCollapsedGroupedDatePicker',\\n+    props: {\\n+      collapsed: {\\n+        type: Boolean,\\n+        required: false,\\n+        default: true,\\n+      },\\n+      showToggleSidebar: {\\n+        type: Boolean,\\n+        required: false,\\n+        default: false,\\n+      },\\n+      minDate: {\\n+        type: Date,\\n+        required: false,\\n+      },\\n+      maxDate: {\\n+        type: Date,\\n+        required: false,\\n+      },\\n+      disableClickableIcons: {\\n+        type: Boolean,\\n+        required: false,\\n+        default: false,\\n+      },\\n+    },\\n+    components: {\\n+      toggleSidebar,\\n+      collapsedCalendarIcon,\\n+    },\\n+    computed: {\\n+      hasMinAndMaxDates() {\\n+        return this.minDate \\u0026\\u0026 this.maxDate;\\n+      },\\n+      hasNoMinAndMaxDates() {\\n+        return !this.minDate \\u0026\\u0026 !this.maxDate;\\n+      },\\n+      showMinDateBlock() {\\n+        return this.minDate || this.hasNoMinAndMaxDates;\\n+      },\\n+      showFromText() {\\n+        return !this.maxDate \\u0026\\u0026 this.minDate;\\n+      },\\n+      iconClass() {\\n+        const disabledClass = this.disableClickableIcons ? 'disabled' : '';\\n+        return `block sidebar-collapsed-icon calendar-icon ${disabledClass}`;\\n+      },\\n+    },\\n+    methods: {\\n+      toggleSidebar() {\\n+        this.$emit('toggleCollapse');\\n+      },\\n+      dateText(dateType = 'min') {\\n+        const date = this[`${dateType}Date`];\\n+        const dateWords = dateInWords(date, true);\\n+        const parsedDateWords = dateWords ? dateWords.replace(',', '') : dateWords;\\n+\\n+        return date ? parsedDateWords : 'None';\\n+      },\\n+    },\\n+  };\\n+\\u003c/script\\u003e\\n+\\n+\\u003ctemplate\\u003e\\n+  \\u003cdiv class=\\\"block sidebar-grouped-item\\\"\\u003e\\n+    \\u003cdiv\\n+      v-if=\\\"showToggleSidebar\\\"\\n+      class=\\\"issuable-sidebar-header\\\"\\n+    \\u003e\\n+      \\u003ctoggle-sidebar\\n+        :collapsed=\\\"collapsed\\\"\\n+        @toggle=\\\"toggleSidebar\\\"\\n+      /\\u003e\\n+    \\u003c/div\\u003e\\n+    \\u003ccollapsed-calendar-icon\\n+      v-if=\\\"showMinDateBlock\\\"\\n+      :container-class=\\\"iconClass\\\"\\n+      @click=\\\"toggleSidebar\\\"\\n+    \\u003e\\n+      \\u003cspan class=\\\"sidebar-collapsed-value\\\"\\u003e\\n+        \\u003cspan v-if=\\\"showFromText\\\"\\u003eFrom\\u003c/span\\u003e\\n+        \\u003cspan\\u003e{{ dateText('min') }}\\u003c/span\\u003e\\n+      \\u003c/span\\u003e\\n+    \\u003c/collapsed-calendar-icon\\u003e\\n+    \\u003cdiv\\n+      v-if=\\\"hasMinAndMaxDates\\\"\\n+      class=\\\"text-center sidebar-collapsed-divider\\\"\\n+    \\u003e\\n+      -\\n+    \\u003c/div\\u003e\\n+    \\u003ccollapsed-calendar-icon\\n+      v-if=\\\"maxDate\\\"\\n+      :container-class=\\\"iconClass\\\"\\n+      :show-icon=\\\"!minDate\\\"\\n+      @click=\\\"toggleSidebar\\\"\\n+    \\u003e\\n+      \\u003cspan class=\\\"sidebar-collapsed-value\\\"\\u003e\\n+        \\u003cspan v-if=\\\"!minDate\\\"\\u003eUntil\\u003c/span\\u003e\\n+        \\u003cspan\\u003e{{ dateText('max') }}\\u003c/span\\u003e\\n+      \\u003c/span\\u003e\\n+    \\u003c/collapsed-calendar-icon\\u003e\\n+  \\u003c/div\\u003e\\n+\\u003c/template\\u003e\\n\"},{\"old_path\":\"app/assets/javascripts/vue_shared/components/sidebar/date_picker.vue\",\"new_path\":\"app/assets/javascripts/vue_shared/components/sidebar/date_picker.vue\",\"a_mode\":\"0\",\"b_mode\":\"100644\",\"new_file\":true,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"--- /dev/null\\n+++ b/app/assets/javascripts/vue_shared/components/sidebar/date_picker.vue\\n@@ -0,0 +1,163 @@\\n+\\u003cscript\\u003e\\n+  import datePicker from '../pikaday.vue';\\n+  import loadingIcon from '../loading_icon.vue';\\n+  import toggleSidebar from './toggle_sidebar.vue';\\n+  import collapsedCalendarIcon from './collapsed_calendar_icon.vue';\\n+  import { dateInWords } from '../../../lib/utils/datetime_utility';\\n+\\n+  export default {\\n+    name: 'sidebarDatePicker',\\n+    props: {\\n+      collapsed: {\\n+        type: Boolean,\\n+        required: false,\\n+        default: true,\\n+      },\\n+      showToggleSidebar: {\\n+        type: Boolean,\\n+        required: false,\\n+        default: false,\\n+      },\\n+      isLoading: {\\n+        type: Boolean,\\n+        required: false,\\n+        default: false,\\n+      },\\n+      editable: {\\n+        type: Boolean,\\n+        required: false,\\n+        default: false,\\n+      },\\n+      label: {\\n+        type: String,\\n+        required: false,\\n+        default: 'Date picker',\\n+      },\\n+      selectedDate: {\\n+        type: Date,\\n+        required: false,\\n+      },\\n+      minDate: {\\n+        type: Date,\\n+        required: false,\\n+      },\\n+      maxDate: {\\n+        type: Date,\\n+        required: false,\\n+      },\\n+    },\\n+    data() {\\n+      return {\\n+        editing: false,\\n+      };\\n+    },\\n+    components: {\\n+      datePicker,\\n+      toggleSidebar,\\n+      loadingIcon,\\n+      collapsedCalendarIcon,\\n+    },\\n+    computed: {\\n+      selectedAndEditable() {\\n+        return this.selectedDate \\u0026\\u0026 this.editable;\\n+      },\\n+      selectedDateWords() {\\n+        return dateInWords(this.selectedDate, true);\\n+      },\\n+      collapsedText() {\\n+        return this.selectedDateWords ? this.selectedDateWords : 'None';\\n+      },\\n+    },\\n+    methods: {\\n+      stopEditing() {\\n+        this.editing = false;\\n+      },\\n+      toggleDatePicker() {\\n+        this.editing = !this.editing;\\n+      },\\n+      newDateSelected(date = null) {\\n+        this.date = date;\\n+        this.editing = false;\\n+        this.$emit('saveDate', date);\\n+      },\\n+      toggleSidebar() {\\n+        this.$emit('toggleCollapse');\\n+      },\\n+    },\\n+  };\\n+\\u003c/script\\u003e\\n+\\n+\\u003ctemplate\\u003e\\n+  \\u003cdiv class=\\\"block\\\"\\u003e\\n+    \\u003cdiv class=\\\"issuable-sidebar-header\\\"\\u003e\\n+      \\u003ctoggle-sidebar\\n+        :collapsed=\\\"collapsed\\\"\\n+        @toggle=\\\"toggleSidebar\\\"\\n+      /\\u003e\\n+    \\u003c/div\\u003e\\n+    \\u003ccollapsed-calendar-icon\\n+      class=\\\"sidebar-collapsed-icon\\\"\\n+      :text=\\\"collapsedText\\\"\\n+    /\\u003e\\n+    \\u003cdiv class=\\\"title\\\"\\u003e\\n+      {{ label }}\\n+      \\u003cloading-icon\\n+        v-if=\\\"isLoading\\\"\\n+        :inline=\\\"true\\\"\\n+      /\\u003e\\n+      \\u003cdiv class=\\\"pull-right\\\"\\u003e\\n+        \\u003cbutton\\n+          v-if=\\\"editable \\u0026\\u0026 !editing\\\"\\n+          type=\\\"button\\\"\\n+          class=\\\"btn-blank btn-link btn-primary-hover-link btn-sidebar-action\\\"\\n+          @click=\\\"toggleDatePicker\\\"\\n+        \\u003e\\n+          Edit\\n+        \\u003c/button\\u003e\\n+        \\u003ctoggle-sidebar\\n+          v-if=\\\"showToggleSidebar\\\"\\n+          :collapsed=\\\"collapsed\\\"\\n+          @toggle=\\\"toggleSidebar\\\"\\n+        /\\u003e\\n+      \\u003c/div\\u003e\\n+    \\u003c/div\\u003e\\n+    \\u003cdiv class=\\\"value\\\"\\u003e\\n+      \\u003cdate-picker\\n+        v-if=\\\"editing\\\"\\n+        :selected-date=\\\"selectedDate\\\"\\n+        :min-date=\\\"minDate\\\"\\n+        :max-date=\\\"maxDate\\\"\\n+        :label=\\\"label\\\"\\n+        @newDateSelected=\\\"newDateSelected\\\"\\n+        @hidePicker=\\\"stopEditing\\\"\\n+      /\\u003e\\n+      \\u003cspan\\n+        v-else\\n+        class=\\\"value-content\\\"\\n+      \\u003e\\n+        \\u003ctemplate v-if=\\\"selectedDate\\\"\\u003e\\n+          \\u003cstrong\\u003e{{ selectedDateWords }}\\u003c/strong\\u003e\\n+          \\u003cspan\\n+            v-if=\\\"selectedAndEditable\\\"\\n+            class=\\\"no-value\\\"\\n+          \\u003e\\n+            -\\n+            \\u003cbutton\\n+              type=\\\"button\\\"\\n+              class=\\\"btn-blank btn-link btn-secondary-hover-link\\\"\\n+              @click=\\\"newDateSelected(null)\\\"\\n+            \\u003e\\n+              remove\\n+            \\u003c/button\\u003e\\n+          \\u003c/span\\u003e\\n+        \\u003c/template\\u003e\\n+        \\u003cspan\\n+          v-else\\n+          class=\\\"no-value\\\"\\n+        \\u003e\\n+          None\\n+        \\u003c/span\\u003e\\n+      \\u003c/span\\u003e\\n+    \\u003c/div\\u003e\\n+  \\u003c/div\\u003e\\n+\\u003c/template\\u003e\\n\"},{\"old_path\":\"app/assets/javascripts/vue_shared/components/sidebar/toggle_sidebar.vue\",\"new_path\":\"app/assets/javascripts/vue_shared/components/sidebar/toggle_sidebar.vue\",\"a_mode\":\"0\",\"b_mode\":\"100644\",\"new_file\":true,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"--- /dev/null\\n+++ b/app/assets/javascripts/vue_shared/components/sidebar/toggle_sidebar.vue\\n@@ -0,0 +1,30 @@\\n+\\u003cscript\\u003e\\n+  export default {\\n+    name: 'toggleSidebar',\\n+    props: {\\n+      collapsed: {\\n+        type: Boolean,\\n+        required: true,\\n+      },\\n+    },\\n+    methods: {\\n+      toggle() {\\n+        this.$emit('toggle');\\n+      },\\n+    },\\n+  };\\n+\\u003c/script\\u003e\\n+\\n+\\u003ctemplate\\u003e\\n+  \\u003cbutton\\n+    type=\\\"button\\\"\\n+    class=\\\"btn btn-blank gutter-toggle btn-sidebar-action\\\"\\n+    @click=\\\"toggle\\\"\\n+  \\u003e\\n+    \\u003ci\\n+      aria-label=\\\"toggle collapse\\\"\\n+      class=\\\"fa\\\"\\n+      :class=\\\"{ 'fa-angle-double-right': !collapsed, 'fa-angle-double-left': collapsed }\\\"\\n+    \\u003e\\u003c/i\\u003e\\n+  \\u003c/button\\u003e\\n+\\u003c/template\\u003e\\n\"},{\"old_path\":\"app/assets/javascripts/vue_shared/components/pikaday.vue\",\"new_path\":\"app/assets/javascripts/vue_shared/components/pikaday.vue\",\"a_mode\":\"0\",\"b_mode\":\"100644\",\"new_file\":true,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"--- /dev/null\\n+++ b/app/assets/javascripts/vue_shared/components/pikaday.vue\\n@@ -0,0 +1,79 @@\\n+\\u003cscript\\u003e\\n+  import Pikaday from 'pikaday';\\n+  import { parsePikadayDate, pikadayToString } from '../../lib/utils/datefix';\\n+\\n+  export default {\\n+    name: 'datePicker',\\n+    props: {\\n+      label: {\\n+        type: String,\\n+        required: false,\\n+        default: 'Date picker',\\n+      },\\n+      selectedDate: {\\n+        type: Date,\\n+        required: false,\\n+      },\\n+      minDate: {\\n+        type: Date,\\n+        required: false,\\n+      },\\n+      maxDate: {\\n+        type: Date,\\n+        required: false,\\n+      },\\n+    },\\n+    methods: {\\n+      selected(dateText) {\\n+        this.$emit('newDateSelected', this.calendar.toString(dateText));\\n+      },\\n+      toggled() {\\n+        this.$emit('hidePicker');\\n+      },\\n+    },\\n+    mounted() {\\n+      this.calendar = new Pikaday({\\n+        field: this.$el.querySelector('.dropdown-menu-toggle'),\\n+        theme: 'gitlab-theme animate-picker',\\n+        format: 'yyyy-mm-dd',\\n+        container: this.$el,\\n+        defaultDate: this.selectedDate,\\n+        setDefaultDate: !!this.selectedDate,\\n+        minDate: this.minDate,\\n+        maxDate: this.maxDate,\\n+        parse: dateString =\\u003e parsePikadayDate(dateString),\\n+        toString: date =\\u003e pikadayToString(date),\\n+        onSelect: this.selected.bind(this),\\n+        onClose: this.toggled.bind(this),\\n+      });\\n+\\n+      this.$el.append(this.calendar.el);\\n+      this.calendar.show();\\n+    },\\n+    beforeDestroy() {\\n+      this.calendar.destroy();\\n+    },\\n+  };\\n+\\u003c/script\\u003e\\n+\\n+\\u003ctemplate\\u003e\\n+  \\u003cdiv class=\\\"pikaday-container\\\"\\u003e\\n+    \\u003cdiv class=\\\"dropdown open\\\"\\u003e\\n+      \\u003cbutton\\n+        type=\\\"button\\\"\\n+        class=\\\"dropdown-menu-toggle\\\"\\n+        data-toggle=\\\"dropdown\\\"\\n+        @click=\\\"toggled\\\"\\n+      \\u003e\\n+        \\u003cspan class=\\\"dropdown-toggle-text\\\"\\u003e\\n+          {{label}}\\n+        \\u003c/span\\u003e\\n+        \\u003ci\\n+          class=\\\"fa fa-chevron-down\\\"\\n+          aria-hidden=\\\"true\\\"\\n+        \\u003e\\n+        \\u003c/i\\u003e\\n+      \\u003c/button\\u003e\\n+    \\u003c/div\\u003e\\n+  \\u003c/div\\u003e\\n+\\u003c/template\\u003e\\n\"},{\"old_path\":\"app/assets/stylesheets/framework/buttons.scss\",\"new_path\":\"app/assets/stylesheets/framework/buttons.scss\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"--- a/app/assets/stylesheets/framework/buttons.scss\\n+++ b/app/assets/stylesheets/framework/buttons.scss\\n@@ -408,6 +408,7 @@\\n   padding: 0;\\n   background: transparent;\\n   border: 0;\\n+  border-radius: 0;\\n \\n   \\u0026:hover,\\n   \\u0026:active,\\n@@ -417,3 +418,25 @@\\n     box-shadow: none;\\n   }\\n }\\n+\\n+.btn-link.btn-secondary-hover-link {\\n+  color: $gl-text-color-secondary;\\n+\\n+  \\u0026:hover,\\n+  \\u0026:active,\\n+  \\u0026:focus {\\n+    color: $gl-link-color;\\n+    text-decoration: none;\\n+  }\\n+}\\n+\\n+.btn-link.btn-primary-hover-link {\\n+  color: inherit;\\n+\\n+  \\u0026:hover,\\n+  \\u0026:active,\\n+  \\u0026:focus {\\n+    color: $gl-link-color;\\n+    text-decoration: none;\\n+  }\\n+}\\n\"},{\"old_path\":\"app/assets/stylesheets/framework/sidebar.scss\",\"new_path\":\"app/assets/stylesheets/framework/sidebar.scss\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"--- a/app/assets/stylesheets/framework/sidebar.scss\\n+++ b/app/assets/stylesheets/framework/sidebar.scss\\n@@ -43,11 +43,13 @@\\n   }\\n \\n   .sidebar-collapsed-icon {\\n-    cursor: pointer;\\n-\\n     .btn {\\n       background-color: $gray-light;\\n     }\\n+\\n+    \\u0026:not(.disabled) {\\n+      cursor: pointer;\\n+    }\\n   }\\n }\\n \\n@@ -55,6 +57,10 @@\\n   padding-right: 0;\\n   z-index: 300;\\n \\n+  .btn-sidebar-action {\\n+    display: inline-flex;\\n+  }\\n+\\n   @media (min-width: $screen-sm-min) and (max-width: $screen-sm-max) {\\n     \\u0026:not(.wiki-sidebar):not(.build-sidebar):not(.issuable-bulk-update-sidebar) .content-wrapper {\\n       padding-right: $gutter_collapsed_width;\\n@@ -136,3 +142,18 @@\\n .issuable-sidebar {\\n   @include new-style-dropdown;\\n }\\n+\\n+.pikaday-container {\\n+  .pika-single {\\n+    margin-top: 2px;\\n+    width: 250px;\\n+  }\\n+\\n+  .dropdown-menu-toggle {\\n+    line-height: 20px;\\n+  }\\n+}\\n+\\n+.sidebar-collapsed-icon .sidebar-collapsed-value {\\n+  font-size: 12px;\\n+}\\n\"},{\"old_path\":\"app/assets/stylesheets/pages/issuable.scss\",\"new_path\":\"app/assets/stylesheets/pages/issuable.scss\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"--- a/app/assets/stylesheets/pages/issuable.scss\\n+++ b/app/assets/stylesheets/pages/issuable.scss\\n@@ -284,10 +284,15 @@\\n       font-weight: $gl-font-weight-normal;\\n     }\\n \\n-    .no-value {\\n+    .no-value,\\n+    .btn-secondary-hover-link {\\n       color: $gl-text-color-secondary;\\n     }\\n \\n+    .btn-secondary-hover-link:hover {\\n+      color: $gl-link-color;\\n+    }\\n+\\n     .sidebar-collapsed-icon {\\n       display: none;\\n     }\\n@@ -295,6 +300,8 @@\\n     .gutter-toggle {\\n       margin-top: 7px;\\n       border-left: 1px solid $border-gray-normal;\\n+      padding-left: 0;\\n+      text-align: center;\\n     }\\n \\n     .title .gutter-toggle {\\n@@ -367,7 +374,7 @@\\n         fill: $issuable-sidebar-color;\\n       }\\n \\n-      \\u0026:hover,\\n+      \\u0026:hover:not(.disabled),\\n       \\u0026:hover .todo-undone {\\n         color: $gl-text-color;\\n \\n@@ -908,3 +915,21 @@\\n     margin: 0 3px;\\n   }\\n }\\n+\\n+.right-sidebar-collapsed {\\n+  .sidebar-grouped-item {\\n+    .sidebar-collapsed-icon {\\n+      margin-bottom: 0;\\n+    }\\n+\\n+    .sidebar-collapsed-divider {\\n+      line-height: 5px;\\n+      font-size: 12px;\\n+      color: $theme-gray-700;\\n+\\n+      + .sidebar-collapsed-icon {\\n+        padding-top: 0;\\n+      }\\n+    }\\n+  }\\n+}\\n\"},{\"old_path\":\"spec/javascripts/lib/utils/text_utility_spec.js\",\"new_path\":\"spec/javascripts/lib/utils/text_utility_spec.js\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"--- a/spec/javascripts/lib/utils/text_utility_spec.js\\n+++ b/spec/javascripts/lib/utils/text_utility_spec.js\\n@@ -23,6 +23,14 @@ describe('text_utility', () =\\u003e {\\n     });\\n   });\\n \\n+  describe('capitalizeFirstCharacter', () =\\u003e {\\n+    it('returns string with first letter capitalized', () =\\u003e {\\n+      expect(textUtils.capitalizeFirstCharacter('gitlab')).toEqual('Gitlab');\\n+      expect(textUtils.highCountTrim(105)).toBe('99+');\\n+      expect(textUtils.highCountTrim(100)).toBe('99+');\\n+    });\\n+  });\\n+\\n   describe('humanize', () =\\u003e {\\n     it('should remove underscores and uppercase the first letter', () =\\u003e {\\n       expect(textUtils.humanize('foo_bar')).toEqual('Foo bar');\\n\"},{\"old_path\":\"spec/javascripts/vue_shared/components/sidebar/collapsed_calendar_icon_spec.js\",\"new_path\":\"spec/javascripts/vue_shared/components/sidebar/collapsed_calendar_icon_spec.js\",\"a_mode\":\"0\",\"b_mode\":\"100644\",\"new_file\":true,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"--- /dev/null\\n+++ b/spec/javascripts/vue_shared/components/sidebar/collapsed_calendar_icon_spec.js\\n@@ -0,0 +1,35 @@\\n+import Vue from 'vue';\\n+import collapsedCalendarIcon from '~/vue_shared/components/sidebar/collapsed_calendar_icon.vue';\\n+import mountComponent from '../../../helpers/vue_mount_component_helper';\\n+\\n+describe('collapsedCalendarIcon', () =\\u003e {\\n+  let vm;\\n+  beforeEach(() =\\u003e {\\n+    const CollapsedCalendarIcon = Vue.extend(collapsedCalendarIcon);\\n+    vm = mountComponent(CollapsedCalendarIcon, {\\n+      containerClass: 'test-class',\\n+      text: 'text',\\n+      showIcon: false,\\n+    });\\n+  });\\n+\\n+  it('should add class to container', () =\\u003e {\\n+    expect(vm.$el.classList.contains('test-class')).toEqual(true);\\n+  });\\n+\\n+  it('should hide calendar icon if showIcon', () =\\u003e {\\n+    expect(vm.$el.querySelector('.fa-calendar')).toBeNull();\\n+  });\\n+\\n+  it('should render text', () =\\u003e {\\n+    expect(vm.$el.querySelector('span').innerText.trim()).toEqual('text');\\n+  });\\n+\\n+  it('should emit click event when container is clicked', () =\\u003e {\\n+    const click = jasmine.createSpy();\\n+    vm.$on('click', click);\\n+\\n+    vm.$el.click();\\n+    expect(click).toHaveBeenCalled();\\n+  });\\n+});\\n\"},{\"old_path\":\"spec/javascripts/vue_shared/components/sidebar/collapsed_grouped_date_picker_spec.js\",\"new_path\":\"spec/javascripts/vue_shared/components/sidebar/collapsed_grouped_date_picker_spec.js\",\"a_mode\":\"0\",\"b_mode\":\"100644\",\"new_file\":true,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"--- /dev/null\\n+++ b/spec/javascripts/vue_shared/components/sidebar/collapsed_grouped_date_picker_spec.js\\n@@ -0,0 +1,91 @@\\n+import Vue from 'vue';\\n+import collapsedGroupedDatePicker from '~/vue_shared/components/sidebar/collapsed_grouped_date_picker.vue';\\n+import mountComponent from '../../../helpers/vue_mount_component_helper';\\n+\\n+describe('collapsedGroupedDatePicker', () =\\u003e {\\n+  let vm;\\n+  beforeEach(() =\\u003e {\\n+    const CollapsedGroupedDatePicker = Vue.extend(collapsedGroupedDatePicker);\\n+    vm = mountComponent(CollapsedGroupedDatePicker, {\\n+      showToggleSidebar: true,\\n+    });\\n+  });\\n+\\n+  it('should render toggle sidebar if showToggleSidebar', (done) =\\u003e {\\n+    expect(vm.$el.querySelector('.issuable-sidebar-header')).toBeDefined();\\n+\\n+    vm.showToggleSidebar = false;\\n+    Vue.nextTick(() =\\u003e {\\n+      expect(vm.$el.querySelector('.issuable-sidebar-header')).toBeNull();\\n+      done();\\n+    });\\n+  });\\n+\\n+  it('toggleCollapse events', () =\\u003e {\\n+    const toggleCollapse = jasmine.createSpy();\\n+\\n+    beforeEach((done) =\\u003e {\\n+      vm.minDate = new Date('07/17/2016');\\n+      Vue.nextTick(done);\\n+    });\\n+\\n+    it('should emit when sidebar is toggled', () =\\u003e {\\n+      vm.$el.querySelector('.gutter-toggle').click();\\n+      expect(toggleCollapse).toHaveBeenCalled();\\n+    });\\n+\\n+    it('should emit when collapsed-calendar-icon is clicked', () =\\u003e {\\n+      vm.$el.querySelector('.sidebar-collapsed-icon').click();\\n+      expect(toggleCollapse).toHaveBeenCalled();\\n+    });\\n+  });\\n+\\n+  describe('minDate and maxDate', () =\\u003e {\\n+    beforeEach((done) =\\u003e {\\n+      vm.minDate = new Date('07/17/2016');\\n+      vm.maxDate = new Date('07/17/2017');\\n+      Vue.nextTick(done);\\n+    });\\n+\\n+    it('should render both collapsed-calendar-icon', () =\\u003e {\\n+      const icons = vm.$el.querySelectorAll('.sidebar-collapsed-icon');\\n+      expect(icons.length).toEqual(2);\\n+      expect(icons[0].innerText.trim()).toEqual('Jul 17 2016');\\n+      expect(icons[1].innerText.trim()).toEqual('Jul 17 2017');\\n+    });\\n+  });\\n+\\n+  describe('minDate', () =\\u003e {\\n+    beforeEach((done) =\\u003e {\\n+      vm.minDate = new Date('07/17/2016');\\n+      Vue.nextTick(done);\\n+    });\\n+\\n+    it('should render minDate in collapsed-calendar-icon', () =\\u003e {\\n+      const icons = vm.$el.querySelectorAll('.sidebar-collapsed-icon');\\n+      expect(icons.length).toEqual(1);\\n+      expect(icons[0].innerText.trim()).toEqual('From Jul 17 2016');\\n+    });\\n+  });\\n+\\n+  describe('maxDate', () =\\u003e {\\n+    beforeEach((done) =\\u003e {\\n+      vm.maxDate = new Date('07/17/2017');\\n+      Vue.nextTick(done);\\n+    });\\n+\\n+    it('should render maxDate in collapsed-calendar-icon', () =\\u003e {\\n+      const icons = vm.$el.querySelectorAll('.sidebar-collapsed-icon');\\n+      expect(icons.length).toEqual(1);\\n+      expect(icons[0].innerText.trim()).toEqual('Until Jul 17 2017');\\n+    });\\n+  });\\n+\\n+  describe('no dates', () =\\u003e {\\n+    it('should render None', () =\\u003e {\\n+      const icons = vm.$el.querySelectorAll('.sidebar-collapsed-icon');\\n+      expect(icons.length).toEqual(1);\\n+      expect(icons[0].innerText.trim()).toEqual('None');\\n+    });\\n+  });\\n+});\\n\"},{\"old_path\":\"spec/javascripts/vue_shared/components/sidebar/date_picker_spec.js\",\"new_path\":\"spec/javascripts/vue_shared/components/sidebar/date_picker_spec.js\",\"a_mode\":\"0\",\"b_mode\":\"100644\",\"new_file\":true,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"--- /dev/null\\n+++ b/spec/javascripts/vue_shared/components/sidebar/date_picker_spec.js\\n@@ -0,0 +1,117 @@\\n+import Vue from 'vue';\\n+import sidebarDatePicker from '~/vue_shared/components/sidebar/date_picker.vue';\\n+import mountComponent from '../../../helpers/vue_mount_component_helper';\\n+\\n+describe('sidebarDatePicker', () =\\u003e {\\n+  let vm;\\n+  beforeEach(() =\\u003e {\\n+    const SidebarDatePicker = Vue.extend(sidebarDatePicker);\\n+    vm = mountComponent(SidebarDatePicker, {\\n+      label: 'label',\\n+      isLoading: true,\\n+    });\\n+  });\\n+\\n+  it('should emit toggleCollapse when collapsed toggle sidebar is clicked', () =\\u003e {\\n+    const toggleCollapse = jasmine.createSpy();\\n+    vm.$on('toggleCollapse', toggleCollapse);\\n+\\n+    vm.$el.querySelector('.issuable-sidebar-header .gutter-toggle').click();\\n+    expect(toggleCollapse).toHaveBeenCalled();\\n+  });\\n+\\n+  it('should render collapsed-calendar-icon', () =\\u003e {\\n+    expect(vm.$el.querySelector('.sidebar-collapsed-icon')).toBeDefined();\\n+  });\\n+\\n+  it('should render label', () =\\u003e {\\n+    expect(vm.$el.querySelector('.title').innerText.trim()).toEqual('label');\\n+  });\\n+\\n+  it('should render loading-icon when isLoading', () =\\u003e {\\n+    expect(vm.$el.querySelector('.fa-spin')).toBeDefined();\\n+  });\\n+\\n+  it('should render value when not editing', () =\\u003e {\\n+    expect(vm.$el.querySelector('.value-content')).toBeDefined();\\n+  });\\n+\\n+  it('should render None if there is no selectedDate', () =\\u003e {\\n+    expect(vm.$el.querySelector('.value-content span').innerText.trim()).toEqual('None');\\n+  });\\n+\\n+  it('should render date-picker when editing', (done) =\\u003e {\\n+    vm.editing = true;\\n+    Vue.nextTick(() =\\u003e {\\n+      expect(vm.$el.querySelector('.pika-label')).toBeDefined();\\n+      done();\\n+    });\\n+  });\\n+\\n+  describe('editable', () =\\u003e {\\n+    beforeEach((done) =\\u003e {\\n+      vm.editable = true;\\n+      Vue.nextTick(done);\\n+    });\\n+\\n+    it('should render edit button', () =\\u003e {\\n+      expect(vm.$el.querySelector('.title .btn-blank').innerText.trim()).toEqual('Edit');\\n+    });\\n+\\n+    it('should enable editing when edit button is clicked', (done) =\\u003e {\\n+      vm.isLoading = false;\\n+      Vue.nextTick(() =\\u003e {\\n+        vm.$el.querySelector('.title .btn-blank').click();\\n+        expect(vm.editing).toEqual(true);\\n+        done();\\n+      });\\n+    });\\n+  });\\n+\\n+  it('should render date if selectedDate', (done) =\\u003e {\\n+    vm.selectedDate = new Date('07/07/2017');\\n+    Vue.nextTick(() =\\u003e {\\n+      expect(vm.$el.querySelector('.value-content strong').innerText.trim()).toEqual('Jul 7, 2017');\\n+      done();\\n+    });\\n+  });\\n+\\n+  describe('selectedDate and editable', () =\\u003e {\\n+    beforeEach((done) =\\u003e {\\n+      vm.selectedDate = new Date('07/07/2017');\\n+      vm.editable = true;\\n+      Vue.nextTick(done);\\n+    });\\n+\\n+    it('should render remove button if selectedDate and editable', () =\\u003e {\\n+      expect(vm.$el.querySelector('.value-content .btn-blank').innerText.trim()).toEqual('remove');\\n+    });\\n+\\n+    it('should emit saveDate when remove button is clicked', () =\\u003e {\\n+      const saveDate = jasmine.createSpy();\\n+      vm.$on('saveDate', saveDate);\\n+\\n+      vm.$el.querySelector('.value-content .btn-blank').click();\\n+      expect(saveDate).toHaveBeenCalled();\\n+    });\\n+  });\\n+\\n+  describe('showToggleSidebar', () =\\u003e {\\n+    beforeEach((done) =\\u003e {\\n+      vm.showToggleSidebar = true;\\n+      Vue.nextTick(done);\\n+    });\\n+\\n+    it('should render toggle-sidebar when showToggleSidebar', () =\\u003e {\\n+      expect(vm.$el.querySelector('.title .gutter-toggle')).toBeDefined();\\n+    });\\n+\\n+    it('should emit toggleCollapse when toggle sidebar is clicked', () =\\u003e {\\n+      const toggleCollapse = jasmine.createSpy();\\n+      vm.$on('toggleCollapse', toggleCollapse);\\n+\\n+      vm.$el.querySelector('.title .gutter-toggle').click();\\n+      expect(toggleCollapse).toHaveBeenCalled();\\n+    });\\n+  });\\n+});\\n\"},{\"old_path\":\"spec/javascripts/vue_shared/components/sidebar/toggle_sidebar_spec.js\",\"new_path\":\"spec/javascripts/vue_shared/components/sidebar/toggle_sidebar_spec.js\",\"a_mode\":\"0\",\"b_mode\":\"100644\",\"new_file\":true,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"--- /dev/null\\n+++ b/spec/javascripts/vue_shared/components/sidebar/toggle_sidebar_spec.js\\n@@ -0,0 +1,32 @@\\n+import Vue from 'vue';\\n+import toggleSidebar from '~/vue_shared/components/sidebar/toggle_sidebar.vue';\\n+import mountComponent from '../../../helpers/vue_mount_component_helper';\\n+\\n+describe('toggleSidebar', () =\\u003e {\\n+  let vm;\\n+  beforeEach(() =\\u003e {\\n+    const ToggleSidebar = Vue.extend(toggleSidebar);\\n+    vm = mountComponent(ToggleSidebar, {\\n+      collapsed: true,\\n+    });\\n+  });\\n+\\n+  it('should render \\u003c\\u003c when collapsed', () =\\u003e {\\n+    expect(vm.$el.querySelector('.fa').classList.contains('fa-angle-double-left')).toEqual(true);\\n+  });\\n+\\n+  it('should render \\u003e\\u003e when collapsed', () =\\u003e {\\n+    vm.collapsed = false;\\n+    Vue.nextTick(() =\\u003e {\\n+      expect(vm.$el.querySelector('.fa').classList.contains('fa-angle-double-right')).toEqual(true);\\n+    });\\n+  });\\n+\\n+  it('should emit toggle event when button clicked', () =\\u003e {\\n+    const toggle = jasmine.createSpy();\\n+    vm.$on('toggle', toggle);\\n+    vm.$el.click();\\n+\\n+    expect(toggle).toHaveBeenCalled();\\n+  });\\n+});\\n\"},{\"old_path\":\"spec/javascripts/vue_shared/components/pikaday_spec.js\",\"new_path\":\"spec/javascripts/vue_shared/components/pikaday_spec.js\",\"a_mode\":\"0\",\"b_mode\":\"100644\",\"new_file\":true,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"--- /dev/null\\n+++ b/spec/javascripts/vue_shared/components/pikaday_spec.js\\n@@ -0,0 +1,29 @@\\n+import Vue from 'vue';\\n+import datePicker from '~/vue_shared/components/pikaday.vue';\\n+import mountComponent from '../../helpers/vue_mount_component_helper';\\n+\\n+describe('datePicker', () =\\u003e {\\n+  let vm;\\n+  beforeEach(() =\\u003e {\\n+    const DatePicker = Vue.extend(datePicker);\\n+    vm = mountComponent(DatePicker, {\\n+      label: 'label',\\n+    });\\n+  });\\n+\\n+  it('should render label text', () =\\u003e {\\n+    expect(vm.$el.querySelector('.dropdown-toggle-text').innerText.trim()).toEqual('label');\\n+  });\\n+\\n+  it('should show calendar', () =\\u003e {\\n+    expect(vm.$el.querySelector('.pika-single')).toBeDefined();\\n+  });\\n+\\n+  it('should toggle when dropdown is clicked', () =\\u003e {\\n+    const hidePicker = jasmine.createSpy();\\n+    vm.$on('hidePicker', hidePicker);\\n+\\n+    vm.$el.querySelector('.dropdown-menu-toggle').click();\\n+    expect(hidePicker).toHaveBeenCalled();\\n+  });\\n+});\\n\"},{\"old_path\":\"spec/javascripts/datetime_utility_spec.js\",\"new_path\":\"spec/javascripts/datetime_utility_spec.js\",\"a_mode\":\"100644\",\"b_mode\":\"100644\",\"new_file\":false,\"renamed_file\":false,\"deleted_file\":false,\"diff\":\"--- a/spec/javascripts/datetime_utility_spec.js\\n+++ b/spec/javascripts/datetime_utility_spec.js\\n@@ -1,4 +1,4 @@\\n-import { timeIntervalInWords } from '~/lib/utils/datetime_utility';\\n+import * as datetimeUtility from '~/lib/utils/datetime_utility';\\n \\n (() =\\u003e {\\n   describe('Date time utils', () =\\u003e {\\n@@ -89,10 +89,22 @@ import { timeIntervalInWords } from '~/lib/utils/datetime_utility';\\n \\n   describe('timeIntervalInWords', () =\\u003e {\\n     it('should return string with number of minutes and seconds', () =\\u003e {\\n-      expect(timeIntervalInWords(9.54)).toEqual('9 seconds');\\n-      expect(timeIntervalInWords(1)).toEqual('1 second');\\n-      expect(timeIntervalInWords(200)).toEqual('3 minutes 20 seconds');\\n-      expect(timeIntervalInWords(6008)).toEqual('100 minutes 8 seconds');\\n+      expect(datetimeUtility.timeIntervalInWords(9.54)).toEqual('9 seconds');\\n+      expect(datetimeUtility.timeIntervalInWords(1)).toEqual('1 second');\\n+      expect(datetimeUtility.timeIntervalInWords(200)).toEqual('3 minutes 20 seconds');\\n+      expect(datetimeUtility.timeIntervalInWords(6008)).toEqual('100 minutes 8 seconds');\\n+    });\\n+  });\\n+\\n+  describe('dateInWords', () =\\u003e {\\n+    const date = new Date('07/01/2016');\\n+\\n+    it('should return date in words', () =\\u003e {\\n+      expect(datetimeUtility.dateInWords(date)).toEqual('July 1, 2016');\\n+    });\\n+\\n+    it('should return abbreviated month name', () =\\u003e {\\n+      expect(datetimeUtility.dateInWords(date, true)).toEqual('Jul 1, 2016');\\n     });\\n   });\\n })();\\n\"}],\"overflow\":false}"
  },
  {
    "path": "testutils/testdata/payload.json",
    "content": "{\n  \"repo_link\": \"https://gittest.com/user/nexe\",\n  \"repo_slug\": \"/user/nexe\",\n  \"build_target_commit\": \"iued83e783dhiewd9\",\n  \"build_base_commit\": \"udhihei3hd83y8dye\",\n  \"task_id\": \"9sj239edfd48y\",\n  \"branch_name\": \"ut\",\n  \"build_id\": \"fudf3ufjicjir34\",\n  \"repo_id\": \"2edejr48f\",\n  \"org_id\": \"ed39udjdj\",\n  \"git_provider\": \"gittest\",\n  \"private_repo\" : false,\n  \"event_type\": \"pull-request\",\n  \"diff_url\": \"https://api.gittest.com/user/nexe/diff/abcshd\",\n  \"pull_request_number\": 2,\n  \"tas_file_name\": \"user.tas\",\n  \"locators\": \"sdfr\",\n  \"locator_address\": \"sjc/dwd/\",\n  \"parent_commit_coverage_exists\": false\n}"
  },
  {
    "path": "testutils/testdata/pulls/2",
    "content": "diff --git a/src/steps/resource.ts b/src/steps/resource.ts\nindex b50377a8..37b84a2f 100644\n--- a/src/steps/resource.ts\n+++ b/src/steps/resource.ts\n@@ -10,7 +10,7 @@ export default async function resource(compiler: NexeCompiler, next: () => Promi\n   }\n   const step = compiler.log.step('Bundling Resources...')\n   let count = 0\n-\n+  const testCommitChangeM = \"Added 1 line in steps.ts\"\n   // workaround for https://github.com/sindresorhus/globby/issues/127\n   // and https://github.com/mrmlnc/fast-glob#pattern-syntax\n   const resourcesWithForwardSlashes = resources.map((r) => r.replace(/\\\\/g, '/'))"
  },
  {
    "path": "testutils/testdata/sample_config.json",
    "content": "{\n    \"Config\":\"\",\n    \"DBConf\":{\n      \"host\":\"\",\n      \"port\":\"\",\n      \"user\":\"\",\n      \"password\":\"\"\n    },\n    \"Port\":\"9876\",\n    \"payloadAddress\":\"\",\n    \"LogFile\":\"\",\n    \"LogConfig\":{\n      \"EnableConsole\":true,\n      \"ConsoleJSONFormat\":false,\n      \"ConsoleLevel\":\"debug\",\n      \"EnableFile\":true,\n      \"FileJSONFormat\":true,\n      \"FileLevel\":\"debug\",\n      \"FileLocation\":\"\"\n    },\n    \"coverage\":false,\n    \"parser\":false,\n    \"Env\":\"dev\",\n    \"Verbose\":false,\n    \"Azure\":{\n      \"ContainerName\":\"\",\n      \"StorageAccountName\":\"\",\n      \"StorageAccessKey\":\"\"\n    }\n  }"
  },
  {
    "path": "testutils/testdata/secretTestData/invalidsecretfile.json",
    "content": "{\n    \"data\": [\"qwert\", \"zxcvvb\"]\n}"
  },
  {
    "path": "testutils/testdata/secretTestData/secretOauthFile.json",
    "content": "{\n  \"access_token\": \"token\",\n  \"expiry\": \"2022-02-22T16:22:01+05:30\",\n  \"refresh_token\": \"refresh\"\n}\n"
  },
  {
    "path": "testutils/testdata/secretTestData/secretfile.json",
    "content": "{\n  \"abc\": \"val\",\n  \"xyz\": \"val2\"\n}\n"
  },
  {
    "path": "testutils/testdata/tas.yaml",
    "content": "# supported frameworks: mocha|jest|jasmine\nframework: mocha\n# supported tiers: xmall|small|medium|large|xlarge\ntier: xsmall\nblocklist:\n  # format: \"<filename>##<suit-name>##<suit-name>##<test-name>\"\n  - \"src/test/api.js\"\n  - \"src/test/api1.js##this is a test-suite\"\n  - \"src/test/api2.js##this is a test-suite##this is a test-case\"\npostMerge:\n  # env vars provided at the time of discovering and executing the post-merge tests\n  env:\n    REPONAME: nexe\n    AWS_KEY: ${{ secrets.AWS_KEY }}\n  # glob-pattern for identifying the test files\n  pattern:\n    - \"./test/**/*.spec.ts\"\n  # strategy for trigerring builds for post-merge\n  strategy:\n    threshold: 1\n    name: after_n_commits\npreMerge:\n  pattern:\n    - \"./test/**/*.spec.ts\"\npreRun:\n  # set of commands to run before running the tests like `yarn install`, `yarn build`\n  command:\n    - npm ci\n    - docker build --build-arg NPM_TOKEN=${{ secrets.NPM_TOKEN }} --tag=nucleus\npostRun:\n  # set of commands to run after running the tests\n  command:\n    - node --version\n# path to your custom configuration file required by framework\nconfigFile: mocharc.yml\n# provide the version of nodejs required for your project\nnodeVersion: 14.17.2\nversion: 2.0\n"
  },
  {
    "path": "testutils/testdata/taskPayload.json",
    "content": "{\n    \"task_id\": \"axgubjynae\",\n    \"status\": \"open\",\n    \"repo_slug\": \"\",\n    \"repo_link\": \"https://xyz123.com/qwerty/nexe\",\n    \"repo_id\": \"35338247\",\n    \"org_id\": \"\",\n    \"git_provider\": \"github\",\n    \"commit_id,omitempty\": \"a8cdf48146d0360251cc113394c26fa91e1b0e24\",\n    \"build_id\": \"29d327a4f29842cdbc6cd7e8b0a1ba5d\",\n    \"start_time\": \"2022-02-06T16:20:30+05:30\",\n    \"end_time,omitempty\": \"2022-02-06T16:22:01+05:30\",\n    \"remark,omitempty\": \"dummy_remark\"\n}"
  },
  {
    "path": "testutils/testdata/tasyml/duplicate_submodule_postmerge.yaml",
    "content": "postMerge:\n  subModules:\n    - name: some-module-1\n      path: \"./somepath\"\n      pattern:\n        - \"./x/y/z\"\n      framework: mocha\n      configFile: \"x/y/z\"\n\n    - name: some-module-1\n      path: \"./somepath\"\n      pattern:\n        - \"./x/y/z\"\n      framework: jasmine\n      configFile: \"x/y/z\"\n\npreMerge:\n  subModules:\n    - name: some-module-1\n      path: \"./somepath\"\n      framework: jasmine\n      pattern:\n        - \"./x/y/z\"\n      configFile: \"/x/y/z\"\n\n\nparallelism : 1\nversion: 2.0.1\n"
  },
  {
    "path": "testutils/testdata/tasyml/duplicate_submodule_premerge.yaml",
    "content": "postMerge:\n  subModules:\n    - name: some-module-1\n      path: \"./somepath\"\n      pattern:\n        - \"./x/y/z\"\n      framework: mocha\n      configFile: \"x/y/z\"\n    \n\npreMerge:\n  subModules:\n    - name: some-module-1\n      path: \"./somepath\"\n      framework: jasmine\n      pattern:\n        - \"./x/y/z\"\n      configFile: \"/x/y/z\"\n\n    - name: some-module-1\n      path: \"./somepath-2\"\n      framework: mocha\n      pattern:\n        - \"./x/y/z\"\n      configFile: \"/x/y/z\"\n\nparallelism : 1\nversion: 2.0.1\n"
  },
  {
    "path": "testutils/testdata/tasyml/framework_only_required.yml",
    "content": "framework: mocha\nversion: 1.2\n"
  },
  {
    "path": "testutils/testdata/tasyml/invalidVersion.yml",
    "content": "version: \"a.b.c\""
  },
  {
    "path": "testutils/testdata/tasyml/invalid_fields.yml",
    "content": "framework: hello\nnodeVersion: test\nversion: 1.0\n"
  },
  {
    "path": "testutils/testdata/tasyml/invalid_types.yml",
    "content": "framework: mocha\npreMerge: hello\npostMerge: world\n"
  },
  {
    "path": "testutils/testdata/tasyml/invalid_typesv2.yml",
    "content": "postMerge:\n  - name: some-module-1\n    path: \"./somepath\"\n    patterns:\n      - \"./x/y/z\"\n    framework: some-module\n    runPrerunEveryTime: 1\n    nodeVersion: \"some\"\n    configFile: 1\n\npreMerge:\n  - name: some-module-1\n    path: \"./somepath\"\n    framework: some-module\n    patterns:\n      - \"./x/y/z\"\n    runPrerunEveryTime: 1\n    nodeVersion: \"some\"\n    configFile: 1\n\nparallelism : mocha"
  },
  {
    "path": "testutils/testdata/tasyml/junk.yml",
    "content": "hadksjhdkjshd\nsdafjkdjf%aksjdf\n    jhsjkfjdslf\n  fsfdsfkljkljslfou2y73918yehqwqk384@#%$#^$%q312\n\najsdlsf\n"
  },
  {
    "path": "testutils/testdata/tasyml/postmerge_emptyv1.yml",
    "content": "---\nframework: jest\npreMerge:\n  env:\n    NODE_ENV: development\n  pattern:\n    - \"{packages,scripts}/**/__tests__/*{.js,.coffee,[!d].ts}\"\npreRun:\n  env:\n    NODE_ENV: development\n  command:\n    - yarn\npostRun:\n  command:\n    - node --version\nconfigFile: scripts/jest/config.source-www.js\nnodeVersion: 14.17.6\nversion: 1.0\n"
  },
  {
    "path": "testutils/testdata/tasyml/postmerge_emptyv2.yaml",
    "content": "\npreMerge:\n  subModules:\n    - name: some-module-1\n      path: \"./somepath\"\n      framework: jasmine\n      pattern:\n        - \"./x/y/z\"\n      nodeVersion: 17.0.1\n      configFile: \"/x/y/z\"\n\nparallelism : 1\nversion: 2.0.1\n"
  },
  {
    "path": "testutils/testdata/tasyml/pre_merge_emptyv1.yml",
    "content": "---\nframework: jest\npostMerge:\n  env:\n      NODE_ENV: development\n  pattern:\n    - \"{packages,scripts}/**/__tests__/*{.js,.coffee,[!d].ts}\"\npreRun:\n  env:\n    NODE_ENV: development\n  command:\n    - yarn\npostRun:\n  command:\n    - node --version\nconfigFile: scripts/jest/config.source-www.js\nnodeVersion: 14.17.6\nversion: 1.0\n"
  },
  {
    "path": "testutils/testdata/tasyml/premerge_emptyv2.yaml",
    "content": "postMerge:\n  subModules:\n    - name: some-module-1\n      path: \"./somepath\"\n      pattern:\n        - \"./x/y/z\"\n      framework: mocha\n      configFile: \"x/y/z\"\n\n\nparallelism : 1\nversion: 2.0.1\n"
  },
  {
    "path": "testutils/testdata/tasyml/valid.yml",
    "content": "---\nframework: jest\npostMerge:\n  env:\n      NODE_ENV: development\n  pattern:\n    - \"{packages,scripts}/**/__tests__/*{.js,.coffee,[!d].ts}\"\npreMerge:\n  env:\n    NODE_ENV: development\n  pattern:\n    - \"{packages,scripts}/**/__tests__/*{.js,.coffee,[!d].ts}\"\npreRun:\n  env:\n    NODE_ENV: development\n  command:\n    - yarn\npostRun:\n  command:\n    - node --version\nconfigFile: scripts/jest/config.source-www.js\nnodeVersion: 14.17.6\nversion: 1.0\n"
  },
  {
    "path": "testutils/testdata/tasyml/validV2.yml",
    "content": "postMerge:\n  subModules:\n    - name: some-module-1\n      path: \"./somepath\"\n      pattern:\n        - \"./x/y/z\"\n      framework: mocha\n      configFile: \"x/y/z\"\n\npreMerge:\n  subModules:\n    - name: some-module-1\n      path: \"./somepath\"\n      framework: jasmine\n      pattern:\n        - \"./x/y/z\"\n      configFile: \"/x/y/z\"\n\nparallelism : 1\nversion: 2.0.1\n"
  },
  {
    "path": "testutils/testdata/tasyml/valid_with_cachekeyV2.yml",
    "content": "postMerge:\n  subModules:\n    - name: some-module-1\n      path: \"./somepath\"\n      pattern:\n        - \"./x/y/z\"\n      framework: mocha\n      configFile: \"x/y/z\"\n\npreMerge:\n  subModules:\n    - name: some-module-1\n      path: \"./somepath\"\n      framework: jasmine\n      pattern:\n        - \"./x/y/z\"\n      configFile: \"/x/y/z\"\n\nparallelism : 1\nversion: 2.0.1\ncache:\n  key: \"xyz\"\n  paths: \n    - \"abcd\""
  },
  {
    "path": "testutils/testdata/tasyml/validwithCacheKey.yml",
    "content": "---\nframework: jest\npostMerge:\n  env:\n      NODE_ENV: development\n  pattern:\n    - \"{packages,scripts}/**/__tests__/*{.js,.coffee,[!d].ts}\"\npreMerge:\n  env:\n    NODE_ENV: development\n  pattern:\n    - \"{packages,scripts}/**/__tests__/*{.js,.coffee,[!d].ts}\"\npreRun:\n  env:\n    NODE_ENV: development\n  command:\n    - yarn\npostRun:\n  command:\n    - node --version\nconfigFile: scripts/jest/config.source-www.js\nnodeVersion: 14.17.6\nversion: 1.0\ncache:\n  key: \"xyz\"\n  paths: \n    - \"abcd\""
  },
  {
    "path": "testutils/testdata/testblocklistdata/testBlocklist.json",
    "content": "[\n    {\n        \"name\": \"t1\",\n        \"repo\": \"fake\",\n        \"test_locator\" : \"src/test/f1.spec.js\"\n    }\n]\n"
  },
  {
    "path": "testutils/testdirectory/testdir/file",
    "content": ""
  },
  {
    "path": "testutils/testfile",
    "content": ""
  },
  {
    "path": "testutils/utils.go",
    "content": "package testutils\n\nimport (\n\t\"encoding/json\"\n\t\"fmt\"\n\t\"os\"\n\t\"path\"\n\t\"runtime\"\n\n\t\"github.com/LambdaTest/test-at-scale/config\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/core\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/errs\"\n\t\"github.com/LambdaTest/test-at-scale/pkg/lumber\"\n)\n\n// getCurrentWorkingDir give the file path of this file\nfunc getCurrentWorkingDir() (string, error) {\n\t_, filename, _, ok := runtime.Caller(1)\n\tif !ok {\n\t\treturn \"\", errs.New(\"runtime.Calller(1) was unable to recover information\")\n\t}\n\tfilepath := path.Join(path.Dir(filename), \"../\")\n\treturn filepath, nil\n}\n\n// GetConfig returns a dummy NucleusConfig using the json file pointed by ApplicationConfigPath\nfunc GetConfig() (*config.NucleusConfig, error) {\n\tcwd, err := getCurrentWorkingDir()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tconfigJSON, err := os.ReadFile(cwd + ApplicationConfigPath) // AplicationConfigPath points to dummy config file for NucleusConfig\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tvar tasConfig *config.NucleusConfig\n\terr = json.Unmarshal(configJSON, &tasConfig)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn tasConfig, nil\n}\n\n// GetTaskPayload returns a dummy core.TaskPayload using the json file pointed by TaskPayloadPath\nfunc GetTaskPayload() (*core.TaskPayload, error) {\n\tcwd, err := getCurrentWorkingDir()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tpayloadJSON, err := os.ReadFile(cwd + TaskPayloadPath) // TaskPayloadPath points to json file containing dummy TaskPayload\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tvar p *core.TaskPayload\n\terr = json.Unmarshal(payloadJSON, &p)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn p, nil\n}\n\n// GetLogger returns a dummy lumber.Logger.\nfunc GetLogger() (lumber.Logger, error) {\n\tlogger, err := lumber.NewLogger(lumber.LoggingConfig{ConsoleLevel: lumber.Debug}, true, 1)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn logger, nil\n}\n\n// GetPayload returns a dummy core.Payload using the json file pointed by PayloadPath.\nfunc GetPayload() (*core.Payload, error) {\n\tcwd, err := getCurrentWorkingDir()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tpayloadJSON, err := os.ReadFile(cwd + PayloadPath) // PayloadPath points to json file containing dummy PayloadPath\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tvar p *core.Payload\n\terr = json.Unmarshal(payloadJSON, &p)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn p, nil\n}\n\n// GetGitlabCommitDiff returns a dummy GitlabCommitDiff as slice of byte data.\nfunc GetGitlabCommitDiff() ([]byte, error) {\n\tcwd, err := getCurrentWorkingDir()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdata, err := os.ReadFile(cwd + GitlabCommitDiff) // GitLabCommitDiff points to json file containing dummy GitLabCommitDiff\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn data, err\n}\n\nfunc LoadFile(relativePath string) ([]byte, error) {\n\tcwd, err := getCurrentWorkingDir()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tabsPath := fmt.Sprintf(\"%s/%s\", cwd, relativePath)\n\tdata, err := os.ReadFile(absPath)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn data, err\n}\n"
  }
]